From 49d4dfc59d399d3ef6499db5695200c03f41ac8d Mon Sep 17 00:00:00 2001 From: Ed Morley Date: Mon, 29 Jun 2015 23:54:15 +0100 Subject: [PATCH] Bug 1178389 - Remove job_log_url.parse_timestamp Since it's unused. --- tests/conftest.py | 8 ++------ treeherder/client/thclient/client.py | 11 ++--------- treeherder/model/derived/jobs.py | 8 ++------ treeherder/model/sql/jobs.json | 11 +++++------ .../model/sql/template_schema/project_jobs.sql.tmpl | 2 -- treeherder/webapp/api/job_log_url.py | 7 ++----- 6 files changed, 13 insertions(+), 34 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 3825225c6..363a65e2c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,7 +12,6 @@ import pytest from django.core.management import call_command from webtest.app import TestApp import responses -import time from tests.sampledata import SampleData from treeherder.etl.oauth_utils import OAuthCredentials @@ -398,11 +397,8 @@ def mock_post_collection(monkeypatch, set_oauth_credentials): @pytest.fixture def mock_update_parse_status(monkeypatch, set_oauth_credentials): def _update_parse_status(th_client, project, oauth_key, oauth_secret, - job_log_url_id, parse_status, timestamp=None): - if timestamp is None: - timestamp = time.time() - jsondata = json.dumps({'parse_status': parse_status, - 'parse_timestamp': timestamp}) + job_log_url_id, parse_status): + jsondata = json.dumps({'parse_status': parse_status}) signed_uri = th_client._get_project_uri( project, th_client.UPDATE_ENDPOINT.format(job_log_url_id), diff --git a/treeherder/client/thclient/client.py b/treeherder/client/thclient/client.py index 670edb92a..b2ccd0091 100644 --- a/treeherder/client/thclient/client.py +++ b/treeherder/client/thclient/client.py @@ -871,8 +871,7 @@ class TreeherderClient(object): timeout=timeout) def update_parse_status(self, project, oauth_key, oauth_secret, - job_log_url_id, parse_status, timestamp=None, - timeout=None): + job_log_url_id, parse_status, timeout=None): """ Updates the parsing status of a treeherder job @@ -881,17 +880,11 @@ class TreeherderClient(object): :param oauth_secret: oauth secret credential :param parse_status: string representing parse status of a treeherder job - :param timestamp: timestamp of when parse status was updated (defaults - to now) :param timeout: custom timeout in seconds (defaults to class timeout) """ - if timestamp is None: - timestamp = time.time() - self._post_json(project, self.UPDATE_ENDPOINT.format(job_log_url_id), oauth_key, oauth_secret, - json.dumps({'parse_status': parse_status, - 'parse_timestamp': timestamp}), + json.dumps({'parse_status': parse_status}), timeout=timeout) diff --git a/treeherder/model/derived/jobs.py b/treeherder/model/derived/jobs.py index 7962656a6..fd92385c1 100644 --- a/treeherder/model/derived/jobs.py +++ b/treeherder/model/derived/jobs.py @@ -1825,8 +1825,6 @@ into chunks of chunk_size size. Returns the number of result sets deleted""" result_sets = [] - time_now = int(time.time()) - if log_placeholders: for index, log_ref in enumerate(log_placeholders): job_guid = log_ref[0] @@ -1837,7 +1835,6 @@ into chunks of chunk_size size. Returns the number of result sets deleted""" # Replace job_guid with id log_placeholders[index][0] = job_id - log_placeholders[index].append(time_now) task = dict() # a log can be submitted already parsed. So only schedule @@ -1927,13 +1924,12 @@ into chunks of chunk_size size. Returns the number of result sets deleted""" ) return data - def update_job_log_url_status(self, job_log_url_id, - parse_status, parse_timestamp): + def update_job_log_url_status(self, job_log_url_id, parse_status): self.jobs_execute( proc='jobs.updates.update_job_log_url', debug_show=self.DEBUG, - placeholders=[parse_status, parse_timestamp, job_log_url_id]) + placeholders=[parse_status, job_log_url_id]) def get_performance_series_from_signatures(self, signatures, interval_seconds): diff --git a/treeherder/model/sql/jobs.json b/treeherder/model/sql/jobs.json index 2612077d8..815fbca27 100644 --- a/treeherder/model/sql/jobs.json +++ b/treeherder/model/sql/jobs.json @@ -192,10 +192,9 @@ `job_id`, `name`, `url`, - `parse_status`, - `parse_timestamp` + `parse_status` ) - VALUES (?,?,?,?,?)", + VALUES (?,?,?,?)", "host_type":"master_host" }, @@ -332,7 +331,7 @@ "update_job_log_url":{ "sql":"UPDATE `job_log_url` - SET `parse_status` = ?, `parse_timestamp` = ? + SET `parse_status` = ? WHERE `id` = ? AND `active_status` = 'active'", "host_type":"master_host" @@ -649,13 +648,13 @@ "host_type":"read_host" }, "get_job_log_url_detail":{ - "sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status`, `parse_timestamp` + "sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status` FROM job_log_url WHERE id = ? and active_status = 'active'", "host_type":"read_host" }, "get_job_log_url_list":{ - "sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status`, `parse_timestamp` + "sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status` FROM job_log_url WHERE job_id in (REP0) and active_status='active'", diff --git a/treeherder/model/sql/template_schema/project_jobs.sql.tmpl b/treeherder/model/sql/template_schema/project_jobs.sql.tmpl index 1d59b3401..74e0d8228 100644 --- a/treeherder/model/sql/template_schema/project_jobs.sql.tmpl +++ b/treeherder/model/sql/template_schema/project_jobs.sql.tmpl @@ -381,12 +381,10 @@ CREATE TABLE `job_log_url` ( `name` varchar(50) COLLATE utf8_bin NOT NULL, `url` varchar(255) COLLATE utf8_bin NOT NULL, `parse_status` enum('pending', 'parsed', 'failed') COLLATE utf8_bin DEFAULT 'pending', - `parse_timestamp` int(10) NOT NULL, `active_status` enum('active','onhold','deleted') COLLATE utf8_bin DEFAULT 'active', PRIMARY KEY (`id`), KEY `idx_job_id` (`job_id`), KEY `idx_name` (`name`), - KEY `idx_parse_timestamp` (`parse_timestamp`), KEY `idx_active_status` (`active_status`), KEY `idx_parse_status` (`parse_status`), CONSTRAINT `fk_job_log_url` FOREIGN KEY (`job_id`) REFERENCES `job` (`id`) diff --git a/treeherder/webapp/api/job_log_url.py b/treeherder/webapp/api/job_log_url.py index 0dbffc4b5..5747d4a99 100644 --- a/treeherder/webapp/api/job_log_url.py +++ b/treeherder/webapp/api/job_log_url.py @@ -56,14 +56,11 @@ class JobLogUrlViewSet(viewsets.ViewSet): """ try: parse_status = request.DATA["parse_status"] - parse_timestamp = request.DATA["parse_timestamp"] - jm.update_job_log_url_status(pk, parse_status, parse_timestamp) + jm.update_job_log_url_status(pk, parse_status) obj = jm.get_job_log_url_detail(pk) return Response(obj) except KeyError: - raise ParseError(detail=("The parse_status and parse_timestamp" - " parameters are mandatory for this" - " endpoint")) + raise ParseError(detail=("The parse_status parameter is mandatory for this endpoint")) @action() @with_jobs