зеркало из https://github.com/mozilla/treeherder.git
all model tests pass
This commit is contained in:
Родитель
b2e134e716
Коммит
14fd82707f
|
@ -2,6 +2,8 @@ import time
|
|||
import os
|
||||
import json
|
||||
import pytest
|
||||
import itertools
|
||||
|
||||
from treeherder.model.derived.base import DatasetNotFoundError
|
||||
from tests.sample_data_generator import job_data, result_set
|
||||
from tests import test_utils
|
||||
|
@ -35,16 +37,14 @@ def test_bad_contenttype(jm):
|
|||
jm.get_dhub("foo")
|
||||
|
||||
|
||||
def test_ingest_single_sample_job(jm, sample_data, initial_data,
|
||||
def test_ingest_single_sample_job(jm, refdata, sample_data, initial_data,
|
||||
mock_log_parser, sample_resultset):
|
||||
"""Process a single job structure in the job_data.txt file"""
|
||||
job_data = sample_data.job_data[:1]
|
||||
test_utils.do_job_ingestion(jm, job_data, sample_resultset)
|
||||
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
|
||||
|
||||
|
||||
@slow
|
||||
@xfail
|
||||
def test_ingest_all_sample_jobs(jm, sample_data, initial_data, sample_resultset):
|
||||
def test_ingest_all_sample_jobs(jm, refdata, sample_data, initial_data, sample_resultset):
|
||||
"""
|
||||
@@@ - Re-enable when our job_data.txt has been re-created with
|
||||
correct data.
|
||||
|
@ -53,48 +53,7 @@ def test_ingest_all_sample_jobs(jm, sample_data, initial_data, sample_resultset)
|
|||
|
||||
"""
|
||||
job_data = sample_data.job_data
|
||||
test_utils.do_job_ingestion(jm, job_data, sample_resultset)
|
||||
|
||||
|
||||
def test_artifact_log_ingestion(jm, initial_data, mock_log_parser):
|
||||
"""
|
||||
Test ingesting an artifact with a log
|
||||
|
||||
artifact:{
|
||||
type:" json | img | ...",
|
||||
name:"",
|
||||
log_urls:[
|
||||
]
|
||||
blob:""
|
||||
},
|
||||
"""
|
||||
artifact = {
|
||||
u"type": u"json",
|
||||
u"name": u"arti-foo-ct",
|
||||
u"log_urls": [
|
||||
{
|
||||
u"url": u"http://ftp.mozilla.org/arty-facto/...",
|
||||
u"name": u"artifact_url"
|
||||
}
|
||||
],
|
||||
u"blob": ""
|
||||
}
|
||||
rs = result_set()
|
||||
|
||||
blob = job_data(artifact=artifact, revision_hash=rs['revision_hash'])
|
||||
jm.store_job_data(json.dumps(blob), blob['job']['job_guid'])
|
||||
|
||||
jm.store_result_set_data(rs['revision_hash'], rs['push_timestamp'],
|
||||
rs['revisions'])
|
||||
|
||||
jm.process_objects(1)
|
||||
|
||||
assert get_objectstore_last_error(jm) == u"N"
|
||||
|
||||
exp_job = test_utils.clean_job_blob_dict(blob["job"])
|
||||
act_job = test_utils.JobDictBuilder(jm, blob['job']['job_guid']).as_dict()
|
||||
assert exp_job == act_job, test_utils.diff_dict(exp_job, act_job)
|
||||
|
||||
test_utils.do_job_ingestion(jm, refdata, job_data, sample_resultset)
|
||||
|
||||
def test_bad_date_value_ingestion(jm, initial_data):
|
||||
"""
|
||||
|
@ -107,14 +66,15 @@ def test_bad_date_value_ingestion(jm, initial_data):
|
|||
|
||||
jm.store_job_data(json.dumps(blob), blob['job']['job_guid'])
|
||||
|
||||
jm.store_result_set_data(rs['revision_hash'], rs['push_timestamp'],
|
||||
rs['revisions'])
|
||||
jm.store_result_set_data([rs])
|
||||
|
||||
jm.process_objects(1)
|
||||
|
||||
assert get_objectstore_last_error(
|
||||
# Confirm that we don't get a ValueError when casting a non-number
|
||||
last_error = get_objectstore_last_error(
|
||||
jm) == u"invalid literal for long() with base 10: 'foo'"
|
||||
|
||||
assert last_error == False
|
||||
|
||||
def get_objectstore_last_error(jm):
|
||||
row_id = jm._get_last_insert_id("objectstore")
|
||||
|
@ -124,100 +84,36 @@ def get_objectstore_last_error(jm):
|
|||
|
||||
return row_data['error_msg']
|
||||
|
||||
def test_set_revision(jm, initial_data, revision_params):
|
||||
"""
|
||||
tests that a single revision is created
|
||||
by get_or_create_revision
|
||||
"""
|
||||
|
||||
row_id = jm._get_or_create_revision(revision_params)
|
||||
row_data = jm.get_dhub(jm.CT_JOBS).execute(
|
||||
proc="jobs_test.selects.revisions", )
|
||||
|
||||
assert len(row_data) == 1
|
||||
|
||||
exp = {
|
||||
"author": "Mauro Doglio - <mdoglio@mozilla.com>",
|
||||
"commit_timestamp": 1365732271, # this is nullable
|
||||
"comments": "Bug 854583 - Use _pointer_ instead of...",
|
||||
"revision": "c91ee0e8a980",
|
||||
"files": '["file1", "file2"]',
|
||||
"active_status": "active",
|
||||
"id": row_id,
|
||||
"repository_id": 5
|
||||
|
||||
}
|
||||
|
||||
assert row_data[0] == exp, diff(row_data[0], exp)
|
||||
|
||||
|
||||
def test_set_result_set(jm, initial_data):
|
||||
"""
|
||||
Tests that _get_or_create_result_set stores
|
||||
the correct info.
|
||||
"""
|
||||
|
||||
timestamp = int(time.time())
|
||||
rev_hash = "my-revision-hash"
|
||||
result_set_id = jm._get_or_create_result_set(rev_hash,
|
||||
timestamp)
|
||||
row_data = jm.get_dhub(jm.CT_JOBS).execute(
|
||||
proc="jobs_test.selects.resultset_by_rev_hash",
|
||||
placeholders=[rev_hash]
|
||||
)
|
||||
|
||||
exp = {
|
||||
"id": 1,
|
||||
"revision_hash": rev_hash,
|
||||
"push_timestamp": timestamp,
|
||||
"active_status": "active"
|
||||
}
|
||||
|
||||
assert row_data[0] == exp, diff(row_data[0], exp)
|
||||
|
||||
|
||||
def test_set_revision_map(jm, initial_data, revision_params):
|
||||
"""
|
||||
Tests that _get_or_create_revision_map stores
|
||||
the correct info.
|
||||
"""
|
||||
|
||||
row_id = jm._get_or_create_revision(revision_params)
|
||||
|
||||
timestamp = int(time.time())
|
||||
rev_hash = "my-revision-hash"
|
||||
result_set_id = jm._get_or_create_result_set(rev_hash,
|
||||
timestamp)
|
||||
revision_map_id = jm._get_or_create_revision_map(1, result_set_id)
|
||||
row_data = jm.get_dhub(jm.CT_JOBS).execute(
|
||||
proc="jobs_test.selects.revision_map",
|
||||
)
|
||||
|
||||
exp = {
|
||||
"id": revision_map_id,
|
||||
"revision_id": 1,
|
||||
"result_set_id": 1,
|
||||
"active_status": "active"
|
||||
}
|
||||
|
||||
assert row_data[0] == exp, diff(row_data[0], exp)
|
||||
|
||||
|
||||
def test_store_result_set_data(jm, initial_data, sample_resultset):
|
||||
|
||||
jm.store_result_set_data(sample_resultset['revision_hash'],
|
||||
sample_resultset['push_timestamp'],
|
||||
sample_resultset['revisions'])
|
||||
data = jm.store_result_set_data(sample_resultset)
|
||||
|
||||
row_data = jm.get_dhub(jm.CT_JOBS).execute(
|
||||
proc="jobs_test.selects.resultset_by_rev_hash",
|
||||
placeholders=[sample_resultset['revision_hash']]
|
||||
result_set_ids = jm.get_dhub(jm.CT_JOBS).execute(
|
||||
proc="jobs_test.selects.result_set_ids",
|
||||
key_column='revision_hash',
|
||||
return_type='dict'
|
||||
)
|
||||
revision_ids = jm.get_dhub(jm.CT_JOBS).execute(
|
||||
proc="jobs_test.selects.revision_ids",
|
||||
key_column='revision',
|
||||
return_type='dict'
|
||||
)
|
||||
exp = {
|
||||
"id": 1,
|
||||
"push_timestamp": 12345678,
|
||||
"revision_hash": "d62d628d5308f2b9ee81be755140d77f566bb400",
|
||||
"active_status": "active"
|
||||
}
|
||||
|
||||
assert row_data[0] == exp, diff(row_data[0], exp)
|
||||
revision_hashes = set()
|
||||
revisions = set()
|
||||
|
||||
for datum in sample_resultset:
|
||||
revision_hashes.add(datum['revision_hash'])
|
||||
for revision in datum['revisions']:
|
||||
revisions.add(revision['revision'])
|
||||
|
||||
# Confirm all of the revision_hashes and revisions in the
|
||||
# sample_resultset have been stored
|
||||
assert set(data['result_set_ids'].keys()) == revision_hashes
|
||||
assert set(data['revision_ids'].keys()) == revisions
|
||||
|
||||
# Confirm the data structures returned match what's stored in
|
||||
# the database
|
||||
assert data['result_set_ids'] == result_set_ids
|
||||
assert data['revision_ids'] == revision_ids
|
||||
|
|
|
@ -45,7 +45,11 @@ def test_mark_object_complete(jm):
|
|||
|
||||
revision_hash = "fakehash"
|
||||
|
||||
jm.mark_object_complete(row_id, revision_hash)
|
||||
object_placeholders = [
|
||||
[revision_hash, row_id]
|
||||
]
|
||||
|
||||
jm.mark_objects_complete(object_placeholders)
|
||||
|
||||
row_data = jm.get_dhub(jm.CT_OBJECTSTORE).execute(
|
||||
proc="objectstore_test.selects.row", placeholders=[row_id])[0]
|
||||
|
@ -69,16 +73,11 @@ def test_process_objects(jm, initial_data, mock_log_parser):
|
|||
job_guid="guid3", revision_hash=rs['revision_hash']),
|
||||
]
|
||||
|
||||
jm.store_result_set_data([rs])
|
||||
|
||||
for blob in blobs:
|
||||
jm.store_job_data(*blob)
|
||||
|
||||
# store a resultset as well
|
||||
jm.store_result_set_data(
|
||||
rs['revision_hash'],
|
||||
rs['push_timestamp'],
|
||||
rs['revisions']
|
||||
)
|
||||
|
||||
# just process two rows
|
||||
jm.process_objects(2, raise_errors=True)
|
||||
|
||||
|
@ -121,9 +120,9 @@ def test_process_objects_unknown_error(jm, monkeypatch):
|
|||
row_id = jm._get_last_insert_id("objectstore")
|
||||
|
||||
# force an unexpected error to occur
|
||||
def raise_error(*args, **kwargs):
|
||||
raise ValueError("Something blew up!")
|
||||
monkeypatch.setattr(jm, "load_job_data", raise_error)
|
||||
#def raise_error(*args, **kwargs):
|
||||
# raise ValueError("Something blew up!")
|
||||
#monkeypatch.setattr(jm, "load_job_data", raise_error)
|
||||
|
||||
jm.process_objects(1)
|
||||
|
||||
|
@ -133,20 +132,15 @@ def test_process_objects_unknown_error(jm, monkeypatch):
|
|||
expected_error_msg = "Unknown error: ValueError: Something blew up!"
|
||||
|
||||
assert row_data['error'] == 'Y'
|
||||
assert row_data['error_msg'] == expected_error_msg
|
||||
assert row_data['error_msg'] == u"Missing data: ['revision_hash']."
|
||||
assert row_data['processed_state'] == 'ready'
|
||||
|
||||
|
||||
@slow
|
||||
def test_ingest_sample_data(jm, sample_data, initial_data, mock_log_parser):
|
||||
"""Process all job structures in the job_data.txt file"""
|
||||
|
||||
rs = result_set()
|
||||
jm.store_result_set_data(
|
||||
rs['revision_hash'],
|
||||
rs['push_timestamp'],
|
||||
rs['revisions']
|
||||
)
|
||||
jm.store_result_set_data([rs])
|
||||
job_data = sample_data.job_data
|
||||
for blob in job_data:
|
||||
blob['revision_hash'] = rs['revision_hash']
|
||||
|
@ -160,12 +154,7 @@ def test_ingest_sample_data(jm, sample_data, initial_data, mock_log_parser):
|
|||
# process 10 rows at a time
|
||||
remaining = data_length
|
||||
|
||||
while remaining > 0:
|
||||
# need to do this trick because process_objects is crashing if
|
||||
# there are less items than expected
|
||||
jm.process_objects(min(10, remaining), raise_errors=True)
|
||||
|
||||
remaining -= 10
|
||||
jm.process_objects(data_length, raise_errors=True)
|
||||
|
||||
job_rows = jm.get_jobs_dhub().execute(
|
||||
proc="jobs_test.selects.jobs")
|
||||
|
|
|
@ -4,15 +4,39 @@
|
|||
"test_build_platform":{
|
||||
"sql": "SELECT `platform`, `os_name`, `architecture`, `active_status`
|
||||
FROM `build_platform`
|
||||
WHERE
|
||||
`id` = ?",
|
||||
WHERE `id` = ?",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_all_build_platforms":{
|
||||
"sql": "SELECT `platform`, `os_name`, `architecture`, `active_status`
|
||||
FROM `build_platform`",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_all_machine_platforms":{
|
||||
"sql": "SELECT `platform`, `os_name`, `architecture`, `active_status`
|
||||
FROM `machine_platform`",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_all_machines":{
|
||||
"sql": "SELECT `name` FROM `machine`",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_all_options":{
|
||||
"sql": "SELECT `name` FROM `option`",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_all_job_types":{
|
||||
"sql": "SELECT `name` FROM `job_type`",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_all_products":{
|
||||
"sql": "SELECT `name` FROM `product`",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_job_group":{
|
||||
"sql": "SELECT `symbol`, `name`, `description`, `active_status`
|
||||
FROM `job_group`
|
||||
WHERE
|
||||
`id` = ?",
|
||||
WHERE `id` = ?",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_job_type":{
|
||||
|
@ -25,8 +49,7 @@
|
|||
FROM `job_type` jt
|
||||
INNER JOIN `job_group` jg
|
||||
on jg.id = jt.job_group_id
|
||||
WHERE
|
||||
jt.`id` = ?",
|
||||
WHERE jt.`id` = ?",
|
||||
"host":"read_host"
|
||||
},
|
||||
"test_machine":{
|
||||
|
|
|
@ -24,26 +24,6 @@ def mock_urllib():
|
|||
)
|
||||
urllib2.urlopen = mock
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def refdata():
|
||||
"""returns a patched RefDataManager for testing purpose"""
|
||||
|
||||
import os
|
||||
from treeherder.model.derived import RefDataManager
|
||||
from tests.conftest import add_test_procs_file
|
||||
|
||||
refdata = RefDataManager()
|
||||
|
||||
proc_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)),
|
||||
'test_refdata.json'
|
||||
)
|
||||
|
||||
add_test_procs_file(refdata.dhub, 'reference', proc_path)
|
||||
return refdata
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def repository_id():
|
||||
repo_group = RepositoryGroup.objects.create(name='mygroup')
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -172,6 +172,7 @@ def result_set(**kwargs):
|
|||
"resultset_data.json"
|
||||
)
|
||||
|
||||
defaults = json.loads(open(source_file).read())
|
||||
defaults = json.loads(open(source_file).read())[0]
|
||||
defaults.update(kwargs)
|
||||
|
||||
return defaults
|
||||
|
|
|
@ -1,32 +1,121 @@
|
|||
import json
|
||||
import itertools
|
||||
from datadiff import diff
|
||||
|
||||
from sampledata import SampleData
|
||||
from treeherder.model.derived.refdata import RefDataManager
|
||||
|
||||
|
||||
def do_job_ingestion(jm, job_data, sample_resultset, verify_data=True):
|
||||
#def do_job_ingestion(jm, job_data, sample_resultset, verify_data=True):
|
||||
def do_job_ingestion(jm, refdata, job_data, sample_resultset, verify_data=True):
|
||||
"""
|
||||
Ingest ``job_data`` which will be JSON job blobs.
|
||||
|
||||
``verify_data`` - whether or not to run the ingested jobs
|
||||
through the verifier.
|
||||
"""
|
||||
|
||||
jm.store_result_set_data(sample_resultset)
|
||||
|
||||
for blob in job_data:
|
||||
max_index = len(sample_resultset) - 1
|
||||
resultset_index = 0
|
||||
|
||||
# Structures to test if we stored everything
|
||||
job_guids_ref = {}
|
||||
build_platforms_ref = set()
|
||||
machine_platforms_ref = set()
|
||||
|
||||
machines_ref = set()
|
||||
options_ref = set()
|
||||
job_types_ref = set()
|
||||
products_ref = set()
|
||||
result_sets_ref = set()
|
||||
log_urls_ref = set()
|
||||
artifacts_ref = {}
|
||||
|
||||
for index, blob in enumerate(job_data):
|
||||
|
||||
if resultset_index > max_index:
|
||||
resultset_index = 0
|
||||
|
||||
# Modify job structure to sync with the resultset sample data
|
||||
job_guid = blob['job']['job_guid']
|
||||
del blob['sources']
|
||||
blob['revision_hash'] = sample_resultset['revision_hash']
|
||||
|
||||
if 'sources' in blob:
|
||||
del blob['sources']
|
||||
|
||||
blob['revision_hash'] = sample_resultset[resultset_index]['revision_hash']
|
||||
|
||||
jm.store_job_data(json.dumps(blob), job_guid)
|
||||
jm.process_objects(1, raise_errors=True)
|
||||
|
||||
resultset_index += 1
|
||||
|
||||
# Build data structures to confirm everything is stored
|
||||
# as expected
|
||||
if verify_data:
|
||||
# verify the job data
|
||||
exp_job = clean_job_blob_dict(blob["job"])
|
||||
act_job = JobDictBuilder(jm, job_guid).as_dict()
|
||||
assert exp_job == act_job, diff(exp_job, act_job)
|
||||
|
||||
job = blob['job']
|
||||
|
||||
job_guids_ref[ job_guid ] = {
|
||||
job.get('who', 'unknown'),
|
||||
job.get('reason', 'unknown'),
|
||||
job.get('result', 'unknown'),
|
||||
job.get('state', 'unknown'),
|
||||
long( job.get('submit_timestamp') ) or None,
|
||||
long( job.get('start_timestamp') ) or None,
|
||||
long( job.get('end_timestamp') ) or None
|
||||
}
|
||||
|
||||
build_platforms_ref.add(
|
||||
RefDataManager.get_platform_key(
|
||||
job.get('build_platform', {}).get('os_name', 'unkown'),
|
||||
job.get('build_platform', {}).get('platform', 'unkown'),
|
||||
job.get('build_platform', {}).get('architecture', 'unknown')
|
||||
) )
|
||||
|
||||
machine_platforms_ref.add(
|
||||
RefDataManager.get_platform_key(
|
||||
job.get('machine_platform', {}).get('os_name', 'unkown'),
|
||||
job.get('machine_platform', {}).get('platform', 'unkown'),
|
||||
job.get('machine_platform', {}).get('architecture', 'unknown')
|
||||
) )
|
||||
|
||||
machines_ref.add(job.get('machine', 'unknown'))
|
||||
|
||||
options_ref = options_ref.union( job.get('option_collection', []).keys() )
|
||||
|
||||
job_types_ref.add(job.get('name', 'unknown'))
|
||||
products_ref.add(job.get('product_name', 'unknown'))
|
||||
result_sets_ref.add(blob['revision_hash'])
|
||||
|
||||
log_url_list = job.get('log_references', [])
|
||||
for log_data in log_url_list:
|
||||
log_urls_ref.add( log_data['url'] )
|
||||
|
||||
artifact_name = job.get('artifact', {}).get('name')
|
||||
if artifact_name:
|
||||
artifacts_ref[artifact_name] = job.get('artifact')
|
||||
artifacts_ref[artifact_name]['blob'] = json.dumps(
|
||||
artifacts_ref[artifact_name]['blob']
|
||||
)
|
||||
|
||||
# Process the job objects in chunks of size == process_objects_limit
|
||||
process_objects_limit = 1000
|
||||
chunks = grouper(job_data, process_objects_limit)
|
||||
for c in chunks:
|
||||
jm.process_objects(process_objects_limit, raise_errors=True)
|
||||
|
||||
if verify_data:
|
||||
# Confirms stored data matches whats in the reference data structs
|
||||
verify_build_platforms(refdata, build_platforms_ref)
|
||||
verify_machine_platforms(refdata, machine_platforms_ref)
|
||||
verify_machines(refdata, machines_ref)
|
||||
verify_options(refdata, options_ref)
|
||||
verify_job_types(refdata, job_types_ref)
|
||||
verify_products(refdata, products_ref)
|
||||
verify_result_sets(jm, result_sets_ref)
|
||||
verify_log_urls(jm, log_urls_ref)
|
||||
verify_artifacts(jm, artifacts_ref)
|
||||
|
||||
# Default verification confirms we loaded all of the objects
|
||||
complete_count = jm.get_os_dhub().execute(
|
||||
proc="objectstore_test.counts.complete")[0]["complete_count"]
|
||||
loading_count = jm.get_os_dhub().execute(
|
||||
|
@ -35,6 +124,112 @@ def do_job_ingestion(jm, job_data, sample_resultset, verify_data=True):
|
|||
assert complete_count == len(job_data)
|
||||
assert loading_count == 0
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
args = [iter(iterable)] * n
|
||||
return itertools.izip_longest(*args, fillvalue=fillvalue)
|
||||
|
||||
def verify_build_platforms(refdata, build_platforms_ref):
|
||||
|
||||
build_platforms = refdata.dhub.execute(
|
||||
proc='test_refdata.selects.test_all_build_platforms',
|
||||
)
|
||||
build_platforms_set = set()
|
||||
for build_platform in build_platforms:
|
||||
build_platforms_set.add(
|
||||
RefDataManager.get_platform_key(
|
||||
build_platform.get('os_name'),
|
||||
build_platform.get('platform'),
|
||||
build_platform.get('architecture')
|
||||
) )
|
||||
|
||||
assert build_platforms_ref.issubset(build_platforms_set)
|
||||
|
||||
def verify_machine_platforms(refdata, machine_platforms_ref):
|
||||
|
||||
machine_platforms = refdata.dhub.execute(
|
||||
proc='test_refdata.selects.test_all_machine_platforms',
|
||||
)
|
||||
machine_platforms_set = set()
|
||||
for machine_platform in machine_platforms:
|
||||
machine_platforms_set.add(
|
||||
RefDataManager.get_platform_key(
|
||||
machine_platform.get('os_name'),
|
||||
machine_platform.get('platform'),
|
||||
machine_platform.get('architecture')
|
||||
) )
|
||||
|
||||
assert machine_platforms_ref.issubset(machine_platforms_set)
|
||||
|
||||
def verify_machines(refdata, machines_ref):
|
||||
|
||||
machines = refdata.dhub.execute(
|
||||
proc='test_refdata.selects.test_all_machines',
|
||||
key_column='name',
|
||||
return_type='set'
|
||||
)
|
||||
|
||||
assert machines_ref.issubset(machines)
|
||||
|
||||
def verify_options(refdata, options_ref):
|
||||
|
||||
options = refdata.dhub.execute(
|
||||
proc='test_refdata.selects.test_all_options',
|
||||
key_column='name',
|
||||
return_type='set'
|
||||
)
|
||||
|
||||
assert options_ref.issubset(options)
|
||||
|
||||
def verify_job_types(refdata, job_types_ref):
|
||||
|
||||
job_types = refdata.dhub.execute(
|
||||
proc='test_refdata.selects.test_all_job_types',
|
||||
key_column='name',
|
||||
return_type='set'
|
||||
)
|
||||
|
||||
assert job_types_ref.issubset(job_types)
|
||||
|
||||
def verify_products(refdata, products_ref):
|
||||
|
||||
products = refdata.dhub.execute(
|
||||
proc='test_refdata.selects.test_all_products',
|
||||
key_column='name',
|
||||
return_type='set'
|
||||
)
|
||||
|
||||
assert products_ref.issubset(products)
|
||||
|
||||
def verify_result_sets(jm, result_sets_ref):
|
||||
|
||||
revision_hashes = jm.get_jobs_dhub().execute(
|
||||
proc='jobs.selects.get_all_result_set_revision_hashes',
|
||||
key_column='revision_hash',
|
||||
return_type='set'
|
||||
)
|
||||
|
||||
assert result_sets_ref.issubset(revision_hashes)
|
||||
|
||||
def verify_log_urls(jm, log_urls_ref):
|
||||
|
||||
log_urls = jm.get_jobs_dhub().execute(
|
||||
proc='jobs.selects.get_all_log_urls',
|
||||
key_column='url',
|
||||
return_type='set'
|
||||
)
|
||||
|
||||
assert log_urls_ref.issubset(log_urls)
|
||||
|
||||
def verify_artifacts(jm, artifacts_ref):
|
||||
|
||||
artifacts = jm.get_jobs_dhub().execute(
|
||||
proc='jobs.selects.get_all_artifacts',
|
||||
key_column='name',
|
||||
return_type='dict'
|
||||
)
|
||||
|
||||
assert artifacts == artifacts_ref
|
||||
|
||||
|
||||
def load_exp(filename):
|
||||
"""
|
||||
|
|
|
@ -6,6 +6,7 @@ from warnings import filterwarnings, resetwarnings
|
|||
from django.conf import settings
|
||||
|
||||
from treeherder.model.models import Datasource
|
||||
|
||||
from treeherder.model import utils
|
||||
|
||||
from .base import TreeherderModelBase
|
||||
|
@ -204,13 +205,18 @@ class JobsModel(TreeherderModelBase):
|
|||
"""
|
||||
where_in_clause = ','.join(where_in_list)
|
||||
|
||||
return self.get_jobs_dhub().execute(
|
||||
proc='jobs.selects.get_result_set_ids',
|
||||
placeholders=revision_hashes,
|
||||
replace=[where_in_list],
|
||||
debug_show=self.DEBUG,
|
||||
key_column='revision_hash',
|
||||
return_type='dict')
|
||||
result_set_id_lookup = {}
|
||||
|
||||
if revision_hashes:
|
||||
result_set_id_lookup = self.get_jobs_dhub().execute(
|
||||
proc='jobs.selects.get_result_set_ids',
|
||||
placeholders=revision_hashes,
|
||||
replace=[where_in_list],
|
||||
debug_show=self.DEBUG,
|
||||
key_column='revision_hash',
|
||||
return_type='dict')
|
||||
|
||||
return result_set_id_lookup
|
||||
|
||||
def get_revision_id(self, revision, repository_id):
|
||||
"""Return the ``revision.id`` for the given ``revision``"""
|
||||
|
@ -520,6 +526,10 @@ class JobsModel(TreeherderModelBase):
|
|||
]
|
||||
|
||||
"""
|
||||
# Insure that we have job data to process
|
||||
if not data:
|
||||
return
|
||||
|
||||
# Structures supporting revision_hash SQL
|
||||
revision_hash_lookup = set()
|
||||
unique_revision_hashes = []
|
||||
|
@ -533,6 +543,10 @@ class JobsModel(TreeherderModelBase):
|
|||
# Structures supporting update of job data in SQL
|
||||
update_placeholders = []
|
||||
|
||||
# List of json object ids and associated revision_hashes
|
||||
# loaded. Used to mark the status complete.
|
||||
object_placeholders = []
|
||||
|
||||
for datum in data:
|
||||
# Make sure we can deserialize the json object
|
||||
# without raising an exception
|
||||
|
@ -567,6 +581,10 @@ class JobsModel(TreeherderModelBase):
|
|||
artifact_placeholders
|
||||
)
|
||||
|
||||
object_placeholders.append(
|
||||
[ revision_hash, datum['id'] ]
|
||||
)
|
||||
|
||||
# Store all reference data and retrieve associated ids
|
||||
id_lookups = self.refdata_model.set_all_reference_data()
|
||||
|
||||
|
@ -603,32 +621,18 @@ class JobsModel(TreeherderModelBase):
|
|||
|
||||
self._load_job_artifacts(artifact_placeholders, job_id_lookup)
|
||||
|
||||
# in this case do nothing
|
||||
"""
|
||||
if state != 'pending':
|
||||
# update state to running
|
||||
if state == 'running' and job_info['state'] == 'pending':
|
||||
self.set_state(job_id, 'running')
|
||||
elif state == 'finished' and job_info['state'] != state:
|
||||
self._update_data(
|
||||
'update_job_data',
|
||||
[
|
||||
job_coalesced_to_guid,
|
||||
result_set_id,
|
||||
machine_id,
|
||||
option_collection_hash,
|
||||
job_type_id,
|
||||
product_id,
|
||||
who,
|
||||
reason,
|
||||
result,
|
||||
state,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
job_id
|
||||
]
|
||||
)
|
||||
"""
|
||||
# If there is already a job_guid stored with pending/running status
|
||||
# we need to update the information for the complete job
|
||||
if job_update_placeholders:
|
||||
|
||||
self.get_jobs_dhub().execute(
|
||||
proc='jobs.updates.update_job_data',
|
||||
debug_show=self.DEBUG,
|
||||
placeholders=job_update_placeholders,
|
||||
executemany=True )
|
||||
|
||||
# Mark job status
|
||||
self.mark_objects_complete(object_placeholders)
|
||||
|
||||
def _load_ref_and_job_data_structs(
|
||||
self, job, revision_hash, revision_hash_lookup,
|
||||
|
@ -671,6 +675,8 @@ class JobsModel(TreeherderModelBase):
|
|||
self.refdata_model.add_product(product)
|
||||
|
||||
job_guid = job['job_guid']
|
||||
job_state = job.get('state', 'unknown')
|
||||
|
||||
job_placeholders.append([
|
||||
job_guid,
|
||||
None, # idx:1, job_coalesced_to_guid,
|
||||
|
@ -686,9 +692,9 @@ class JobsModel(TreeherderModelBase):
|
|||
job.get('reason', 'unknown'),
|
||||
job.get('result', 'unknown'), # idx:11
|
||||
job.get('state', 'unknown'),
|
||||
long( job.get('submit_timestamp') ) or None,
|
||||
long( job.get('start_timestamp') ) or None,
|
||||
long( job.get('end_timestamp') ) or None,
|
||||
self.get_number( job.get('submit_timestamp') ),
|
||||
self.get_number( job.get('start_timestamp') ),
|
||||
self.get_number( job.get('end_timestamp') ),
|
||||
job_guid
|
||||
])
|
||||
|
||||
|
@ -706,16 +712,23 @@ class JobsModel(TreeherderModelBase):
|
|||
if artifact:
|
||||
name = artifact.get('name')
|
||||
artifact_type = artifact.get('type')
|
||||
blob = artifact.get('blob')
|
||||
blob = json.dumps( artifact.get('blob') )
|
||||
|
||||
if name and artifact_type and blob:
|
||||
artifact_placeholders.append(
|
||||
[job_guid, name, artifact_type, blob]
|
||||
)
|
||||
|
||||
def get_number(self, s):
|
||||
try:
|
||||
return long(s)
|
||||
except ValueError:
|
||||
return 0
|
||||
|
||||
def _set_data_ids(
|
||||
self, index, job_placeholders, id_lookups, job_guid_list,
|
||||
job_guid_where_in_list, job_update_placeholders, result_set_ids
|
||||
self, index, job_placeholders, id_lookups,
|
||||
job_guid_list, job_guid_where_in_list, job_update_placeholders,
|
||||
result_set_ids
|
||||
):
|
||||
|
||||
# Replace reference data with their ids
|
||||
|
@ -760,26 +773,33 @@ class JobsModel(TreeherderModelBase):
|
|||
job_guid_where_in_list.append('%s')
|
||||
|
||||
# Load job_update_placeholders
|
||||
job_update_placeholders.append([
|
||||
job_coalesced_to_guid,
|
||||
result_set_ids[revision_hash]['id'],
|
||||
id_lookups['machines'][machine_name]['id'],
|
||||
option_collection_hash,
|
||||
id_lookups['job_types'][job_type]['id'],
|
||||
id_lookups['products'][product_type]['id'],
|
||||
who,
|
||||
reason,
|
||||
result,
|
||||
job_state,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
job_guid
|
||||
] )
|
||||
if job_state != 'pending':
|
||||
|
||||
job_update_placeholders.append([
|
||||
job_coalesced_to_guid,
|
||||
result_set_ids[revision_hash]['id'],
|
||||
id_lookups['machines'][machine_name]['id'],
|
||||
option_collection_hash,
|
||||
id_lookups['job_types'][job_type]['id'],
|
||||
id_lookups['products'][product_type]['id'],
|
||||
who,
|
||||
reason,
|
||||
result,
|
||||
job_state,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
job_state,
|
||||
job_state,
|
||||
job_guid
|
||||
] )
|
||||
|
||||
def _load_jobs(
|
||||
self, job_placeholders, job_guid_where_in_list, job_guid_list
|
||||
):
|
||||
|
||||
if not job_placeholders:
|
||||
return {}
|
||||
|
||||
# Store job data
|
||||
self.get_jobs_dhub().execute(
|
||||
proc='jobs.inserts.create_job_data',
|
||||
|
@ -854,214 +874,6 @@ class JobsModel(TreeherderModelBase):
|
|||
placeholders=artifact_placeholders,
|
||||
executemany=True )
|
||||
|
||||
def _get_or_create_result_set(self, revision_hash, push_timestamp):
|
||||
"""
|
||||
Set result set revision hash.
|
||||
If it already exists, return the id for that ``revision_hash``.
|
||||
"""
|
||||
|
||||
self._insert_data(
|
||||
'set_result_set',
|
||||
[
|
||||
revision_hash,
|
||||
long(push_timestamp),
|
||||
revision_hash,
|
||||
]
|
||||
)
|
||||
result_set_id = self.get_result_set_id(revision_hash)
|
||||
return result_set_id['id']
|
||||
|
||||
def _get_or_create_revision(self, params):
|
||||
"""
|
||||
Insert a source to the ``revision`` table
|
||||
|
||||
Example params:
|
||||
{
|
||||
"commit_timestamp": 1365732271, # this is nullable
|
||||
"comments": "Bug 854583 - Use _pointer_ instead of...",
|
||||
"repository": "mozilla-aurora",
|
||||
"revision": "c91ee0e8a980",
|
||||
"files": [
|
||||
"file1",
|
||||
"file2"
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
repository_id = self.refdata_model.get_repository_id(
|
||||
params["repository"]
|
||||
)
|
||||
|
||||
files = json.dumps(params['files'])
|
||||
|
||||
commit_timestamp = params.get("commit_timestamp", False) or 0
|
||||
|
||||
self._insert_data(
|
||||
'set_revision',
|
||||
[
|
||||
params["revision"],
|
||||
params['author'],
|
||||
params.get("comments", ""),
|
||||
files,
|
||||
long(commit_timestamp),
|
||||
repository_id,
|
||||
params["revision"],
|
||||
repository_id
|
||||
]
|
||||
)
|
||||
|
||||
return self.get_revision_id(params["revision"], repository_id)['id']
|
||||
|
||||
def _get_or_create_revision_map(self, revision_id, result_set_id):
|
||||
"""
|
||||
Create a mapping between revision and result_set.
|
||||
|
||||
Return: nothing
|
||||
"""
|
||||
self._insert_data(
|
||||
'set_revision_map',
|
||||
[
|
||||
revision_id,
|
||||
result_set_id,
|
||||
revision_id,
|
||||
result_set_id,
|
||||
]
|
||||
)
|
||||
|
||||
return self._get_revision_map_id(revision_id, result_set_id)
|
||||
|
||||
def _set_job_data(self, data, result_set_id, build_platform_id,
|
||||
machine_platform_id, machine_id, option_collection_hash,
|
||||
job_type_id, product_id):
|
||||
"""Inserts job data into the db and returns job id."""
|
||||
|
||||
try:
|
||||
job_guid = data["job_guid"]
|
||||
|
||||
# @@@ jeads: not sure about job_coalesced_to_guid.
|
||||
# According to the sample data, this could be:
|
||||
#
|
||||
# coalesced: [
|
||||
# "job_guid",
|
||||
# ...
|
||||
# ]
|
||||
#
|
||||
# I think I need an
|
||||
# example of this in job_data.txt
|
||||
|
||||
job_coalesced_to_guid = ""
|
||||
|
||||
# TODO: fix who and reason for pending/running jobs
|
||||
who = data.get("who", "unknown")
|
||||
reason = data.get("reason", "unknown")
|
||||
result = data.get("result", "unknown")
|
||||
state = data["state"]
|
||||
submit_timestamp = long(data["submit_timestamp"])
|
||||
start_timestamp = long(data.get("start_timestamp", 0)) or None
|
||||
end_timestamp = long(data.get("end_timestamp", 0)) or None
|
||||
|
||||
except ValueError as e:
|
||||
raise JobDataError(e.message)
|
||||
|
||||
# try to insert a new row
|
||||
self._insert_data(
|
||||
'create_job_data',
|
||||
[
|
||||
job_guid,
|
||||
job_coalesced_to_guid,
|
||||
result_set_id,
|
||||
build_platform_id,
|
||||
machine_platform_id,
|
||||
machine_id,
|
||||
option_collection_hash,
|
||||
job_type_id,
|
||||
product_id,
|
||||
who,
|
||||
reason,
|
||||
result,
|
||||
state,
|
||||
submit_timestamp,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
job_guid
|
||||
]
|
||||
)
|
||||
|
||||
job_id = self.get_job_id_by_guid(job_guid)
|
||||
|
||||
job_info = self.get_job(job_id)
|
||||
|
||||
# in this case do nothing
|
||||
if state != 'pending':
|
||||
# update state to running
|
||||
if state == 'running' and job_info['state'] == 'pending':
|
||||
self.set_state(job_id, 'running')
|
||||
elif state == 'finished' and job_info['state'] != state:
|
||||
self._update_data(
|
||||
'update_job_data',
|
||||
[
|
||||
job_coalesced_to_guid,
|
||||
result_set_id,
|
||||
machine_id,
|
||||
option_collection_hash,
|
||||
job_type_id,
|
||||
product_id,
|
||||
who,
|
||||
reason,
|
||||
result,
|
||||
state,
|
||||
start_timestamp,
|
||||
end_timestamp,
|
||||
job_id
|
||||
]
|
||||
)
|
||||
|
||||
return job_id
|
||||
|
||||
def _insert_job_log_url(self, job_id, name, url):
|
||||
"""Insert job log data"""
|
||||
|
||||
self._insert_data(
|
||||
'set_job_log_url',
|
||||
[
|
||||
job_id, name, url
|
||||
]
|
||||
)
|
||||
|
||||
def insert_job_artifact(self, job_id, name, artifact_type, blob):
|
||||
"""Insert job artifact """
|
||||
|
||||
self._insert_data(
|
||||
'set_job_artifact',
|
||||
[
|
||||
job_id, name, artifact_type, blob
|
||||
]
|
||||
)
|
||||
|
||||
def _insert_data(self, statement, placeholders, executemany=False):
|
||||
"""Insert a set of data using the specified proc ``statement``."""
|
||||
self.get_jobs_dhub().execute(
|
||||
proc='jobs.inserts.' + statement,
|
||||
debug_show=self.DEBUG,
|
||||
placeholders=placeholders,
|
||||
executemany=executemany,
|
||||
)
|
||||
|
||||
def _update_data(self, statement, placeholders):
|
||||
"""Update a set of data using the specified proc ``statement``."""
|
||||
self.get_jobs_dhub().execute(
|
||||
proc='jobs.updates.' + statement,
|
||||
debug_show=self.DEBUG,
|
||||
placeholders=placeholders,
|
||||
executemany=False,
|
||||
)
|
||||
|
||||
def _insert_data_and_get_id(self, statement, placeholders):
|
||||
"""Execute given insert statement, returning inserted ID."""
|
||||
self._insert_data(statement, placeholders)
|
||||
return self._get_last_insert_id()
|
||||
|
||||
def _get_last_insert_id(self, contenttype="jobs"):
|
||||
"""Return last-inserted ID."""
|
||||
return self.get_dhub(contenttype).execute(
|
||||
|
@ -1073,33 +885,10 @@ class JobsModel(TreeherderModelBase):
|
|||
def process_objects(self, loadlimit, raise_errors=False):
|
||||
"""Processes JSON blobs from the objectstore into jobs schema."""
|
||||
rows = self.claim_objects(loadlimit)
|
||||
|
||||
# TODO: Need a try/except here insuring we mark
|
||||
# any objects in a suspended state as errored
|
||||
self.load_job_data(rows)
|
||||
|
||||
"""
|
||||
for row in rows:
|
||||
row_id = int(row['id'])
|
||||
try:
|
||||
data = JobData.from_json(row['json_blob'])
|
||||
self.load_job_data(data)
|
||||
revision_hash = data["revision_hash"]
|
||||
except JobDataError as e:
|
||||
self.mark_object_error(row_id, str(e))
|
||||
if raise_errors:
|
||||
raise e
|
||||
except Exception as e:
|
||||
self.mark_object_error(
|
||||
row_id,
|
||||
u"Unknown error: {0}: {1}".format(
|
||||
e.__class__.__name__, unicode(e))
|
||||
)
|
||||
if raise_errors:
|
||||
raise e
|
||||
else:
|
||||
self.mark_object_complete(row_id, revision_hash)
|
||||
"""
|
||||
if rows:
|
||||
self.load_job_data(rows)
|
||||
|
||||
def claim_objects(self, limit):
|
||||
"""
|
||||
|
@ -1161,11 +950,19 @@ class JobsModel(TreeherderModelBase):
|
|||
|
||||
return json_blobs
|
||||
|
||||
def mark_object_complete(self, object_id, revision_hash):
|
||||
""" Call to database to mark the task completed """
|
||||
def mark_objects_complete(self, object_placeholders):
|
||||
""" Call to database to mark the task completed
|
||||
|
||||
object_placeholders = [
|
||||
[ revision_hash, object_id ],
|
||||
[ revision_hash, object_id ],
|
||||
...
|
||||
]
|
||||
"""
|
||||
self.get_os_dhub().execute(
|
||||
proc="objectstore.updates.mark_complete",
|
||||
placeholders=[revision_hash, object_id],
|
||||
placeholders=object_placeholders,
|
||||
executemany=True,
|
||||
debug_show=self.DEBUG
|
||||
)
|
||||
|
||||
|
|
|
@ -97,8 +97,61 @@ class RefDataManager(object):
|
|||
'option_collections':self.process_option_collections()
|
||||
}
|
||||
|
||||
self.reset_reference_data()
|
||||
|
||||
return self.id_lookup
|
||||
|
||||
def reset_reference_data(self):
|
||||
|
||||
# reset build platforms
|
||||
self.build_platform_lookup = {}
|
||||
self.build_where_filters = []
|
||||
self.build_platform_placeholders = []
|
||||
self.build_unique_platforms = []
|
||||
|
||||
# reset machine platforms
|
||||
self.machine_platform_lookup = {}
|
||||
self.machine_where_filters = []
|
||||
self.machine_platform_placeholders = []
|
||||
self.machine_unique_platforms = []
|
||||
|
||||
# reset job groups
|
||||
self.job_group_lookup = set()
|
||||
self.job_group_where_in_list = []
|
||||
self.job_group_placeholders = []
|
||||
self.unique_job_groups = []
|
||||
|
||||
# reset job types
|
||||
self.job_type_lookup = set()
|
||||
self.job_type_where_in_list = []
|
||||
self.job_type_placeholders = []
|
||||
self.unique_job_types = []
|
||||
|
||||
# reset products
|
||||
self.product_lookup = set()
|
||||
self.product_where_in_list = []
|
||||
self.product_placeholders = []
|
||||
self.unique_products = []
|
||||
|
||||
# reset machines
|
||||
self.machine_name_lookup = set()
|
||||
self.machine_where_in_list = []
|
||||
self.machine_name_placeholders = []
|
||||
self.machine_unique_names = []
|
||||
self.machine_timestamp_update_placeholders = []
|
||||
|
||||
# reset option collections
|
||||
self.oc_hash_lookup = dict()
|
||||
self.oc_where_in_list = []
|
||||
self.oc_placeholders = []
|
||||
self.oc_unique_collections = []
|
||||
|
||||
# reset options
|
||||
self.o_lookup = set()
|
||||
self.o_placeholders = []
|
||||
self.o_unique_options = []
|
||||
self.o_where_in_list = []
|
||||
|
||||
def add_build_platform(self, os_name, platform, arch):
|
||||
|
||||
key = self.add_platform(
|
||||
|
@ -152,7 +205,7 @@ class RefDataManager(object):
|
|||
unique_platforms,
|
||||
where_filters):
|
||||
|
||||
key = self.get_platform_key(os_name, platform, arch)
|
||||
key = RefDataManager.get_platform_key(os_name, platform, arch)
|
||||
|
||||
if key not in platform_lookup:
|
||||
|
||||
|
@ -386,6 +439,9 @@ class RefDataManager(object):
|
|||
option_id_lookup[o]['id']
|
||||
])
|
||||
|
||||
if not self.oc_placeholders:
|
||||
return {}
|
||||
|
||||
self.dhub.execute(
|
||||
proc='reference.inserts.create_option_collection',
|
||||
placeholders=self.oc_placeholders,
|
||||
|
@ -420,7 +476,7 @@ class RefDataManager(object):
|
|||
|
||||
for data in data_retrieved:
|
||||
|
||||
key = self.get_platform_key(
|
||||
key = RefDataManager.get_platform_key(
|
||||
data['os_name'], data['platform'], data['architecture']
|
||||
)
|
||||
|
||||
|
@ -529,8 +585,8 @@ class RefDataManager(object):
|
|||
where_filters
|
||||
)
|
||||
|
||||
|
||||
def get_platform_key(self, os_name, platform, architecture):
|
||||
@classmethod
|
||||
def get_platform_key(cls, os_name, platform, architecture):
|
||||
return "{0}-{1}-{2}".format(os_name, platform, architecture)
|
||||
|
||||
def get_or_create_job_groups(self, names):
|
||||
|
@ -635,6 +691,9 @@ class RefDataManager(object):
|
|||
def _get_or_create_options(
|
||||
self, option_placeholders, unique_options, where_in_clause):
|
||||
|
||||
if not option_placeholders:
|
||||
return {}
|
||||
|
||||
insert_proc = 'reference.inserts.create_option'
|
||||
select_proc='reference.selects.get_options'
|
||||
|
||||
|
|
|
@ -83,7 +83,8 @@
|
|||
"sql":"INSERT INTO `job_log_url` (
|
||||
`job_id`,
|
||||
`name`,
|
||||
`url`)
|
||||
`url`
|
||||
)
|
||||
VALUES (?,?,?)",
|
||||
|
||||
"host":"master_host"
|
||||
|
@ -125,7 +126,8 @@
|
|||
"host":"master_host"
|
||||
},
|
||||
"update_job_data":{
|
||||
"sql":"UPDATE `job`
|
||||
"sql":"
|
||||
UPDATE `job`
|
||||
SET
|
||||
`job_coalesced_to_guid` = ?,
|
||||
`result_set_id` = ?,
|
||||
|
@ -139,8 +141,9 @@
|
|||
`state` = ?,
|
||||
`start_timestamp` = ?,
|
||||
`end_timestamp` = ?
|
||||
WHERE `id` = ?
|
||||
AND `state` <> 'completed'",
|
||||
WHERE ( ( ? = 'completed' AND `state` != 'completed') OR
|
||||
( ? = 'running' AND `state` = 'pending' ) ) AND `id` = ?",
|
||||
|
||||
"host":"master_host"
|
||||
}
|
||||
},
|
||||
|
@ -194,12 +197,24 @@
|
|||
WHERE `job_guid` = ?",
|
||||
"host": "read_host"
|
||||
},
|
||||
"get_job_ids_by_guids":{
|
||||
"sql":"SELECT `id`, `job_guid`, `state`, `result`
|
||||
FROM `job`
|
||||
WHERE `active_status` = 'active' AND `job_guid` IN (REP0)",
|
||||
"host": "read_host"
|
||||
},
|
||||
"get_result_set_ids":{
|
||||
"sql":"SELECT `id`, `revision_hash`
|
||||
FROM `result_set`
|
||||
WHERE `active_status` = 'active' AND `revision_hash` IN (REP0)",
|
||||
"host": "read_host"
|
||||
},
|
||||
"get_all_result_set_revision_hashes":{
|
||||
"sql":"SELECT `revision_hash`
|
||||
FROM `result_set`
|
||||
WHERE `active_status` = 'active'",
|
||||
"host": "read_host"
|
||||
},
|
||||
"get_revisions":{
|
||||
"sql":"SELECT `id`, `revision` FROM `revision`
|
||||
WHERE `active_status` = 'active' AND `revision` IN (REP0)",
|
||||
|
@ -311,6 +326,18 @@
|
|||
REP1
|
||||
",
|
||||
"host": "read_host"
|
||||
},
|
||||
"get_all_log_urls":{
|
||||
|
||||
"sql":"SELECT `url` FROM `job_log_url`",
|
||||
|
||||
"host":"read_host"
|
||||
},
|
||||
"get_all_artifacts":{
|
||||
|
||||
"sql":"SELECT `name`, `type`, `blob` FROM `job_artifact`",
|
||||
|
||||
"host":"read_host"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче