зеркало из https://github.com/mozilla/treeherder.git
Bug 1178389 - Remove job_log_url.parse_timestamp
Since it's unused.
This commit is contained in:
Родитель
5e6ac65659
Коммит
49d4dfc59d
|
@ -12,7 +12,6 @@ import pytest
|
|||
from django.core.management import call_command
|
||||
from webtest.app import TestApp
|
||||
import responses
|
||||
import time
|
||||
|
||||
from tests.sampledata import SampleData
|
||||
from treeherder.etl.oauth_utils import OAuthCredentials
|
||||
|
@ -398,11 +397,8 @@ def mock_post_collection(monkeypatch, set_oauth_credentials):
|
|||
@pytest.fixture
|
||||
def mock_update_parse_status(monkeypatch, set_oauth_credentials):
|
||||
def _update_parse_status(th_client, project, oauth_key, oauth_secret,
|
||||
job_log_url_id, parse_status, timestamp=None):
|
||||
if timestamp is None:
|
||||
timestamp = time.time()
|
||||
jsondata = json.dumps({'parse_status': parse_status,
|
||||
'parse_timestamp': timestamp})
|
||||
job_log_url_id, parse_status):
|
||||
jsondata = json.dumps({'parse_status': parse_status})
|
||||
signed_uri = th_client._get_project_uri(
|
||||
project,
|
||||
th_client.UPDATE_ENDPOINT.format(job_log_url_id),
|
||||
|
|
|
@ -871,8 +871,7 @@ class TreeherderClient(object):
|
|||
timeout=timeout)
|
||||
|
||||
def update_parse_status(self, project, oauth_key, oauth_secret,
|
||||
job_log_url_id, parse_status, timestamp=None,
|
||||
timeout=None):
|
||||
job_log_url_id, parse_status, timeout=None):
|
||||
"""
|
||||
Updates the parsing status of a treeherder job
|
||||
|
||||
|
@ -881,17 +880,11 @@ class TreeherderClient(object):
|
|||
:param oauth_secret: oauth secret credential
|
||||
:param parse_status: string representing parse status of a treeherder
|
||||
job
|
||||
:param timestamp: timestamp of when parse status was updated (defaults
|
||||
to now)
|
||||
:param timeout: custom timeout in seconds (defaults to class timeout)
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = time.time()
|
||||
|
||||
self._post_json(project, self.UPDATE_ENDPOINT.format(job_log_url_id),
|
||||
oauth_key, oauth_secret,
|
||||
json.dumps({'parse_status': parse_status,
|
||||
'parse_timestamp': timestamp}),
|
||||
json.dumps({'parse_status': parse_status}),
|
||||
timeout=timeout)
|
||||
|
||||
|
||||
|
|
|
@ -1825,8 +1825,6 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
|
|||
|
||||
result_sets = []
|
||||
|
||||
time_now = int(time.time())
|
||||
|
||||
if log_placeholders:
|
||||
for index, log_ref in enumerate(log_placeholders):
|
||||
job_guid = log_ref[0]
|
||||
|
@ -1837,7 +1835,6 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
|
|||
|
||||
# Replace job_guid with id
|
||||
log_placeholders[index][0] = job_id
|
||||
log_placeholders[index].append(time_now)
|
||||
task = dict()
|
||||
|
||||
# a log can be submitted already parsed. So only schedule
|
||||
|
@ -1927,13 +1924,12 @@ into chunks of chunk_size size. Returns the number of result sets deleted"""
|
|||
)
|
||||
return data
|
||||
|
||||
def update_job_log_url_status(self, job_log_url_id,
|
||||
parse_status, parse_timestamp):
|
||||
def update_job_log_url_status(self, job_log_url_id, parse_status):
|
||||
|
||||
self.jobs_execute(
|
||||
proc='jobs.updates.update_job_log_url',
|
||||
debug_show=self.DEBUG,
|
||||
placeholders=[parse_status, parse_timestamp, job_log_url_id])
|
||||
placeholders=[parse_status, job_log_url_id])
|
||||
|
||||
def get_performance_series_from_signatures(self, signatures, interval_seconds):
|
||||
|
||||
|
|
|
@ -192,10 +192,9 @@
|
|||
`job_id`,
|
||||
`name`,
|
||||
`url`,
|
||||
`parse_status`,
|
||||
`parse_timestamp`
|
||||
`parse_status`
|
||||
)
|
||||
VALUES (?,?,?,?,?)",
|
||||
VALUES (?,?,?,?)",
|
||||
|
||||
"host_type":"master_host"
|
||||
},
|
||||
|
@ -332,7 +331,7 @@
|
|||
"update_job_log_url":{
|
||||
|
||||
"sql":"UPDATE `job_log_url`
|
||||
SET `parse_status` = ?, `parse_timestamp` = ?
|
||||
SET `parse_status` = ?
|
||||
WHERE `id` = ?
|
||||
AND `active_status` = 'active'",
|
||||
"host_type":"master_host"
|
||||
|
@ -649,13 +648,13 @@
|
|||
"host_type":"read_host"
|
||||
},
|
||||
"get_job_log_url_detail":{
|
||||
"sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status`, `parse_timestamp`
|
||||
"sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status`
|
||||
FROM job_log_url
|
||||
WHERE id = ? and active_status = 'active'",
|
||||
"host_type":"read_host"
|
||||
},
|
||||
"get_job_log_url_list":{
|
||||
"sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status`, `parse_timestamp`
|
||||
"sql": "SELECT `id`, `job_id`, `name`, `url`, `parse_status`
|
||||
FROM job_log_url
|
||||
WHERE job_id in (REP0)
|
||||
and active_status='active'",
|
||||
|
|
|
@ -381,12 +381,10 @@ CREATE TABLE `job_log_url` (
|
|||
`name` varchar(50) COLLATE utf8_bin NOT NULL,
|
||||
`url` varchar(255) COLLATE utf8_bin NOT NULL,
|
||||
`parse_status` enum('pending', 'parsed', 'failed') COLLATE utf8_bin DEFAULT 'pending',
|
||||
`parse_timestamp` int(10) NOT NULL,
|
||||
`active_status` enum('active','onhold','deleted') COLLATE utf8_bin DEFAULT 'active',
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx_job_id` (`job_id`),
|
||||
KEY `idx_name` (`name`),
|
||||
KEY `idx_parse_timestamp` (`parse_timestamp`),
|
||||
KEY `idx_active_status` (`active_status`),
|
||||
KEY `idx_parse_status` (`parse_status`),
|
||||
CONSTRAINT `fk_job_log_url` FOREIGN KEY (`job_id`) REFERENCES `job` (`id`)
|
||||
|
|
|
@ -56,14 +56,11 @@ class JobLogUrlViewSet(viewsets.ViewSet):
|
|||
"""
|
||||
try:
|
||||
parse_status = request.DATA["parse_status"]
|
||||
parse_timestamp = request.DATA["parse_timestamp"]
|
||||
jm.update_job_log_url_status(pk, parse_status, parse_timestamp)
|
||||
jm.update_job_log_url_status(pk, parse_status)
|
||||
obj = jm.get_job_log_url_detail(pk)
|
||||
return Response(obj)
|
||||
except KeyError:
|
||||
raise ParseError(detail=("The parse_status and parse_timestamp"
|
||||
" parameters are mandatory for this"
|
||||
" endpoint"))
|
||||
raise ParseError(detail=("The parse_status parameter is mandatory for this endpoint"))
|
||||
|
||||
@action()
|
||||
@with_jobs
|
||||
|
|
Загрузка…
Ссылка в новой задаче