diff --git a/tests/etl/test_pushlog.py b/tests/etl/test_pushlog.py index daee57e51..ee1c5b704 100644 --- a/tests/etl/test_pushlog.py +++ b/tests/etl/test_pushlog.py @@ -125,9 +125,4 @@ def test_empty_json_pushes(jm, test_base_dir, process = HgPushlogProcess() process.run(pushlog_fake_url, jm.project) - pushes_stored = jm.get_dhub().execute( - proc="jobs_test.selects.result_set_ids", - return_type='tuple' - ) - - assert len(pushes_stored) == 0 + assert Push.objects.count() == 0 diff --git a/tests/jobs_test.json b/tests/jobs_test.json index a2a6051da..117cd550a 100644 --- a/tests/jobs_test.json +++ b/tests/jobs_test.json @@ -23,45 +23,9 @@ "sql": "SELECT * FROM `job_artifact` WHERE job_id = ?", "host_type": "master_host" }, - "job_source": { - "sql": "SELECT res.push_timestamp, - rev.comments, - rev.repository_id, - rev.revision - FROM `revision` as rev - LEFT JOIN `revision_map` as revmap - ON rev.id = revmap.revision_id - LEFT JOIN `result_set` as res - ON revmap.result_set_id = res.id - LEFT JOIN `job` - ON job.result_set_id = res.id - WHERE job.id = ? - ", - "host_type": "master_host" - }, "row_by_guid": { "sql": "SELECT * FROM `job` WHERE `job_guid` = ?", "host_type": "master_host" - }, - "resultset_by_long_revision": { - "sql": "SELECT * FROM `result_set` WHERE `long_revision` = ?", - "host_type": "master_host" - }, - "revision_ids": { - "sql": "SELECT `id`, `revision`, `long_revision` FROM `revision`", - "host_type": "master_host" - }, - "revision_map": { - "sql": "SELECT * FROM `revision_map`", - "host_type": "master_host" - }, - "result_set_ids": { - "sql": "SELECT `id`, `push_timestamp`, `revision_hash`, `long_revision`, `short_revision` FROM `result_set`", - "host_type": "master_host" - }, - "result_sets": { - "sql": "SELECT * FROM `result_set`", - "host_type": "master_host" } }, "updates": { diff --git a/treeherder/model/derived/jobs.py b/treeherder/model/derived/jobs.py index 4ee70b47e..5162ddab2 100644 --- a/treeherder/model/derived/jobs.py +++ b/treeherder/model/derived/jobs.py @@ -726,31 +726,6 @@ into chunks of chunk_size size. Returns the number of result sets deleted""" return new_data - def get_revision_from_revision_hash(self, revision_hash): - """ - Find a revision based on a revision_hash, if possible - - This function only exists for backward-compatibility. This is needed - while we have older resultsets that were storing their revision_hashes - the old way, rather than just using their revisions. And for any jobs - that use the old revision_hashes through the API as the way to - identify what resultset owns the current job. - - Once jobs are no longer submitted with revision_hashes, then we can - remove this function. - """ - - proc = "jobs.selects.get_revision_from_revision_hash" - rh = self.execute( - placeholders=[revision_hash], - proc=proc, - debug_show=self.DEBUG, - ) - if not len(rh): - raise ValueError("Revision hash not found: {}".format( - revision_hash)) - return rh[0]["long_revision"] - def _load_job(self, job_datum, push_id, lower_tier_signatures): """ Load a job into the treeherder database