Merge pull request #118 from mozilla/post-artifacts-api

Post artifacts api
This commit is contained in:
camd 2014-03-11 08:20:07 -07:00
Родитель 2ad4bb608b d10506b710
Коммит 508a320bbb
16 изменённых файлов: 376 добавлений и 147 удалений

Просмотреть файл

@ -4,4 +4,4 @@
source /etc/profile.d/treeherder.sh
source ../venv/bin/activate
exec ../venv/bin/python manage.py celeryd -c 3 -E --maxtasksperchild=500 --logfile=/var/log/celery/treeherder_worker.log
exec ../venv/bin/python manage.py celeryd -c 3 -Q default -E --maxtasksperchild=500 --logfile=/var/log/celery/treeherder_worker.log

Просмотреть файл

@ -24,4 +24,4 @@ oauth2==1.5.211
httplib2==0.7.4
git+git://github.com/jeads/datasource@143ac08d11
git+git://github.com/mozilla/treeherder-client@abe6a85180
git+git://github.com/mozilla/treeherder-client@1dc3644494

Просмотреть файл

@ -3,15 +3,17 @@ import pytest
import simplejson as json
from webtest.app import TestApp
from django.core.urlresolvers import reverse
from django.template import Context, Template
from treeherder.webapp.wsgi import application
from thclient import TreeherderJobCollection
from thclient import (TreeherderJobCollection, TreeherderRequest)
from treeherder.etl.oauth_utils import OAuthCredentials
from tests.sampledata import SampleData
from tests import test_utils
@pytest.fixture
def pending_jobs():
"""returns a list of buildapi pending jobs"""
@ -72,7 +74,7 @@ def running_jobs_stored(
@pytest.fixture
def completed_jobs_stored(
jm, completed_jobs, result_set_stored):
jm, completed_jobs, result_set_stored, mock_send_request ):
"""
stores a list of buildapi completed jobs into the objectstore
"""
@ -88,3 +90,27 @@ def completed_jobs_stored(
@pytest.fixture
def completed_jobs_loaded(jm, completed_jobs_stored):
jm.process_objects(1, raise_errors=True)
@pytest.fixture
def mock_send_request(monkeypatch, jm):
def _send(th_request, th_collection):
OAuthCredentials.set_credentials(SampleData.get_credentials())
credentials = OAuthCredentials.get_credentials(jm.project)
th_request.oauth_key = credentials['consumer_key']
th_request.oauth_secret = credentials['consumer_secret']
signed_uri = th_request.get_signed_uri(
th_collection.to_json(), th_request.get_uri(th_collection)
)
response = TestApp(application).post_json(
str(signed_uri), params=th_collection.get_collection_data()
)
response.getcode = lambda: response.status_int
return response
monkeypatch.setattr(TreeherderRequest, 'send', _send)

Просмотреть файл

@ -2,6 +2,7 @@ import pytest
import json
from webtest.app import TestApp
from treeherder.etl.mixins import JsonLoaderMixin, OAuthLoaderMixin
from treeherder.etl.oauth_utils import OAuthCredentials
from treeherder.webapp.wsgi import application
from treeherder.etl import common
@ -14,8 +15,8 @@ def mock_post_json_data(monkeypatch, jm):
th_collection = data[jm.project]
OAuthLoaderMixin.set_credentials( SampleData.get_credentials() )
credentials = OAuthLoaderMixin.get_credentials(jm.project)
OAuthCredentials.set_credentials( SampleData.get_credentials() )
credentials = OAuthCredentials.get_credentials(jm.project)
tr = TreeherderRequest(
protocol='http',

Просмотреть файл

@ -0,0 +1,33 @@
import pytest
from webtest.app import TestApp
from thclient import TreeherderRequest
from treeherder.etl.oauth_utils import OAuthCredentials
from treeherder.webapp.wsgi import application
from tests.sampledata import SampleData
@pytest.fixture
def mock_send_request(monkeypatch, jm):
def _send(th_request, th_collection):
OAuthCredentials.set_credentials(SampleData.get_credentials())
credentials = OAuthCredentials.get_credentials(jm.project)
th_request.oauth_key = credentials['consumer_key']
th_request.oauth_secret = credentials['consumer_secret']
signed_uri = th_request.get_signed_uri(
th_collection.to_json(), th_request.get_uri(th_collection)
)
response = TestApp(application).post_json(
str(signed_uri), params=th_collection.get_collection_data()
)
response.getcode = lambda: response.status_int
return response
monkeypatch.setattr(TreeherderRequest, 'send', _send)

Просмотреть файл

@ -23,7 +23,7 @@ def jobs_with_local_log(jm, initial_data):
return [job]
def test_parse_log(jm, initial_data, jobs_with_local_log, sample_resultset):
def test_parse_log(jm, initial_data, jobs_with_local_log, sample_resultset, mock_send_request):
"""
check that at least 2 job_artifacts get inserted when running
a parse_log task

Просмотреть файл

@ -6,6 +6,7 @@ from webtest.app import TestApp, AppError
from sampledata import SampleData
from treeherder.model.derived.refdata import RefDataManager
from treeherder.etl.mixins import OAuthLoaderMixin
from treeherder.etl.oauth_utils import OAuthCredentials
from treeherder.webapp.wsgi import application
from thclient import TreeherderRequest
@ -16,9 +17,9 @@ def post_collection(
consumer_key=None, consumer_secret=None):
# Set the credentials
OAuthLoaderMixin.set_credentials( SampleData.get_credentials() )
OAuthCredentials.set_credentials( SampleData.get_credentials() )
credentials = OAuthLoaderMixin.get_credentials(project)
credentials = OAuthCredentials.get_credentials(project)
# The only time the credentials should be overridden are when
# a client needs to test authentication failure confirmation
@ -55,9 +56,9 @@ def post_job_data(
uri = 'http://localhost{0}'.format(uri)
# Set the credentials
OAuthLoaderMixin.set_credentials( SampleData.get_credentials() )
OAuthCredentials.set_credentials( SampleData.get_credentials() )
credentials = OAuthLoaderMixin.get_credentials(project)
credentials = OAuthCredentials.get_credentials(project)
tr = TreeherderRequest(
protocol='http',

Просмотреть файл

@ -30,6 +30,8 @@ class BzApiBugProcess(JsonExtractorMixin):
last_fetched = cache.get('bz_last_fetched')
curr_date = datetime.date.today()
bug_list = []
if last_fetched:
# if we have a last_fetched timestamp available
# we don't need pagination.
@ -41,7 +43,6 @@ class BzApiBugProcess(JsonExtractorMixin):
offset = 0
limit = 500
bug_list = []
# fetch new pages no more than 30 times
# this is a safe guard to not generate an infinite loop
# in case something went wrong
@ -66,4 +67,7 @@ class BzApiBugProcess(JsonExtractorMixin):
cache.set('bz_last_fetched', curr_date, 60 * 60 * 24)
rdm = RefDataManager()
rdm.update_bugscache(bug_list)
try:
rdm.update_bugscache(bug_list)
finally:
rdm.disconnect()

Просмотреть файл

@ -1,9 +1,7 @@
from StringIO import StringIO
import gzip
import os
import urllib2
import logging
import copy
from collections import defaultdict
import simplejson as json
@ -12,6 +10,8 @@ from thclient import TreeherderRequest
from django.core.urlresolvers import reverse
from django.conf import settings
from treeherder.etl.oauth_utils import OAuthCredentials
logger = logging.getLogger(__name__)
@ -112,74 +112,14 @@ class ResultSetsLoaderMixin(JsonLoaderMixin):
message = json.loads(response.read())
logger.error("ResultSet loading failed: {0}".format(message['message']))
class OAuthLoaderMixin(object):
credentials = {}
param_keys = set([
'oauth_body_hash',
'oauth_signature',
'oauth_consumer_key',
'oauth_nonce',
'oauth_timestamp',
'oauth_signature_method',
'oauth_version',
'oauth_token',
'user'
])
credentials_file = os.path.join(
os.path.dirname(__file__),
'data',
'credentials.json'
)
@classmethod
def get_parameters(cls, query_params):
parameters = {}
for key in cls.param_keys:
parameters[key] = query_params.get(key, None)
return parameters
@classmethod
def set_credentials(cls, credentials={}):
# Only get the credentials once
if not cls.credentials and not credentials:
try:
with open(cls.credentials_file) as f:
credentials = f.read()
cls.credentials = json.loads(credentials)
except IOError:
msg = ('Credentials file not found at {0}.'
' Try running `manage.py export_project_credentials`'
' to generate them').format(cls.credentials_file)
logger.error(msg)
except e:
logger.error(e)
raise e
else:
cls.credentials = credentials
@classmethod
def get_credentials(cls, project):
return copy.deepcopy( cls.credentials.get(project, {}) )
@classmethod
def get_consumer_secret(cls, project):
return copy.deepcopy( cls.credentials.get(project, {}) )
def load(self, th_collections):
for project in th_collections:
credentials = OAuthLoaderMixin.get_credentials(project)
credentials = OAuthCredentials.get_credentials(project)
th_request = TreeherderRequest(
protocol=settings.TREEHERDER_REQUEST_PROTOCOL,
@ -187,21 +127,12 @@ class OAuthLoaderMixin(object):
project=project,
oauth_key=credentials.get('consumer_key', None),
oauth_secret=credentials.get('consumer_secret', None)
)
)
response = th_request.send( th_collections[project] )
response = th_request.send(th_collections[project])
if not response or response.status != 200:
message = response.read()
logger.error("collection loading failed: {0}".format(message))
class OAuthLoaderError(Exception):
def __init__(self, msg, Errors):
Exception.__init__(self, msg)
self.Errors = Errors
if not OAuthLoaderMixin.credentials:
# Only set the credentials once when the module is loaded
OAuthLoaderMixin.set_credentials()

Просмотреть файл

@ -0,0 +1,76 @@
import json
from treeherder import path
import copy
import logging
logger = logging.getLogger(__name__)
class OAuthCredentials():
credentials = {}
param_keys = set([
'oauth_body_hash',
'oauth_signature',
'oauth_consumer_key',
'oauth_nonce',
'oauth_timestamp',
'oauth_signature_method',
'oauth_version',
'oauth_token',
'user'
])
credentials_file = path('etl', 'data', 'credentials.json')
@classmethod
def get_parameters(cls, query_params):
parameters = {}
for key in cls.param_keys:
parameters[key] = query_params.get(key, None)
return parameters
@classmethod
def set_credentials(cls, credentials={}):
# Only get the credentials once
if not cls.credentials and not credentials:
try:
with open(cls.credentials_file) as f:
credentials = f.read()
cls.credentials = json.loads(credentials)
except IOError:
msg = ('Credentials file not found at {0}.'
' Try running `manage.py export_project_credentials`'
' to generate them').format(cls.credentials_file)
logger.error(msg)
except Exception, e:
logger.error(e)
raise e
else:
cls.credentials = credentials
@classmethod
def get_credentials(cls, project):
return copy.deepcopy(cls.credentials.get(project, {}))
@classmethod
def get_consumer_secret(cls, project):
return copy.deepcopy(cls.credentials.get(project, {}) )
class OAuthLoaderError(Exception):
def __init__(self, msg, Errors):
Exception.__init__(self, msg)
self.Errors = Errors
if not OAuthCredentials.credentials:
# Only set the credentials once when the module is loaded
OAuthCredentials.set_credentials()

Просмотреть файл

@ -8,25 +8,27 @@ http://docs.celeryproject.org/en/latest/userguide/canvas.html#guide-canvas
"""
import simplejson as json
import re
import urllib
from celery import task
from django.conf import settings
from django.core.urlresolvers import reverse
from thclient import TreeherderArtifactCollection, TreeherderRequest
from treeherder.model.derived import JobsModel, RefDataManager
from treeherder.log_parser.artifactbuildercollection import ArtifactBuilderCollection
from treeherder.events.publisher import JobFailurePublisher, JobStatusPublisher
from treeherder.etl.common import get_remote_content
from treeherder.etl.oauth_utils import OAuthCredentials
@task(name='parse-log')
def parse_log(project, job_id, result_set_id, check_errors=False):
def parse_log(project, log_url, job_guid, resultset, check_errors=False):
"""
Call ArtifactBuilderCollection on the given job.
"""
pattern_obj = re.compile('\d+:\d+:\d+\s+')
jm = JobsModel(project=project)
rdm = RefDataManager()
open_bugs_cache = {}
closed_bugs_cache = {}
@ -36,25 +38,22 @@ def parse_log(project, job_id, result_set_id, check_errors=False):
try:
# return the resultset with the job id to identify if the UI wants
# to fetch the whole thing.
resultset = jm.get_result_set_by_id(result_set_id=result_set_id)[0]
del(resultset["active_status"])
del(resultset["revision_hash"])
log_references = jm.get_log_references(job_id)
bugscache_uri = reverse("bugscache-list")
# we may have many log references per job
for log in log_references:
credentials = OAuthCredentials.get_credentials(project)
if log_url:
# parse a log given its url
artifact_bc = ArtifactBuilderCollection(
log['url'],
log_url,
check_errors=check_errors,
)
artifact_bc.parse()
artifact_list = []
for name, artifact in artifact_bc.artifacts.items():
artifact_list.append((job_id, name, 'json', json.dumps(artifact)))
artifact_list.append((job_guid, name, 'json', json.dumps(artifact)))
if check_errors:
# I'll try to begin with a full_text search on the entire row
@ -70,27 +69,59 @@ def parse_log(project, job_id, result_set_id, check_errors=False):
clean_line = pattern_obj.sub('', err['line'])
if clean_line not in open_bugs_cache:
open_bugs_cache[clean_line] = rdm.get_suggested_bugs(
clean_line)
query_params = urllib.urlencode({
"search": clean_line,
"status": 'open'
})
open_bugs_cache[clean_line] = get_remote_content(
"{0}{1}?{2}".format(
settings.API_HOSTNAME,
bugscache_uri,
query_params)
)
if clean_line not in closed_bugs_cache:
closed_bugs_cache[clean_line] = rdm.get_suggested_bugs(
clean_line, open_bugs=False)
query_params = urllib.urlencode({
"search": clean_line,
"status": 'closed'
})
closed_bugs_cache[clean_line] = get_remote_content(
"{0}{1}?{2}".format(
settings.API_HOSTNAME,
bugscache_uri,
query_params)
)
open_bugs_suggestions[ err['line'] ] = open_bugs_cache[clean_line]
closed_bugs_suggestions[ err['line'] ] = closed_bugs_cache[clean_line]
artifact_list.append((job_id, 'Open bugs', 'json', json.dumps(open_bugs_suggestions)))
artifact_list.append((job_id, 'Closed bugs', 'json', json.dumps(closed_bugs_suggestions)))
artifact_list.append((job_guid, 'Open bugs', 'json', json.dumps(open_bugs_suggestions)))
artifact_list.append((job_guid, 'Closed bugs', 'json', json.dumps(closed_bugs_suggestions)))
# store the artifacts generated
jm.store_job_artifact(artifact_list)
status_publisher.publish(job_id, resultset, project, 'processed')
tac = TreeherderArtifactCollection()
for artifact in artifact_list:
ta = tac.get_artifact({
"job_guid": artifact[0],
"name": artifact[1],
"type": artifact[2],
"blob": artifact[3]
})
tac.add(ta)
req = TreeherderRequest(
protocol=settings.TREEHERDER_REQUEST_PROTOCOL,
host=settings.TREEHERDER_REQUEST_HOST,
project=project,
oauth_key=credentials.get('consumer_key', None),
oauth_secret=credentials.get('consumer_secret', None),
)
req.send(tac)
status_publisher.publish(job_guid, resultset, project, 'processed')
if check_errors:
failure_publisher.publish(job_id, project)
failure_publisher.publish(job_guid, project)
finally:
rdm.disconnect()
jm.disconnect()
status_publisher.disconnect()
failure_publisher.disconnect()

Просмотреть файл

@ -546,15 +546,25 @@ class JobsModel(TreeherderModelBase):
return data
def get_result_set_by_id(self, result_set_id):
"""Get a single result_set by ``id``."""
proc = "jobs.selects.get_result_set_by_id"
def get_push_timestamp_lookup(self, result_set_ids):
"""Get the push timestamp for a list of result_set."""
# Generate a list of result_set_ids
id_placeholders = []
repl = []
for data in result_set_ids:
id_placeholders.append('%s')
repl.append(','.join(id_placeholders))
proc = "jobs.selects.get_result_set_push_timestamp"
data = self.get_jobs_dhub().execute(
proc=proc,
placeholders=[result_set_id],
placeholders=result_set_ids,
debug_show=self.DEBUG,
replace=repl,
return_type="dict",
key_column="id"
)
return data
##################
@ -946,12 +956,7 @@ class JobsModel(TreeherderModelBase):
url = log.get('url', 'unknown')
url = url[0:255]
log_placeholders.append(
[
job_guid,
name,
url
] )
log_placeholders.append([job_guid, name, url])
artifact = job.get('artifact', {})
if artifact:
@ -1057,9 +1062,12 @@ class JobsModel(TreeherderModelBase):
placeholders=job_placeholders,
executemany=True )
job_guid_where_in_clause = ",".join(job_guid_where_in_list)
return self.get_job_ids_by_guid(job_guid_list)
def get_job_ids_by_guid(self, job_guid_list):
job_guid_where_in_clause = ",".join(["%s"] * len(job_guid_list))
# Retrieve new job ids
job_id_lookup = self.get_jobs_dhub().execute(
proc='jobs.selects.get_job_ids_by_guids',
debug_show=self.DEBUG,
@ -1070,6 +1078,7 @@ class JobsModel(TreeherderModelBase):
return job_id_lookup
def _load_log_urls(self, log_placeholders, job_id_lookup,
job_results):
@ -1078,18 +1087,22 @@ class JobsModel(TreeherderModelBase):
tasks = []
result_sets = []
if log_placeholders:
for index, log_ref in enumerate(log_placeholders):
job_guid = log_placeholders[index][0]
job_guid = log_ref[0]
job_id = job_id_lookup[job_guid]['id']
result = job_results[job_guid]
result_set_id = job_id_lookup[job_guid]['result_set_id']
result_sets.append(result_set_id)
# Replace job_guid with id
log_placeholders[index][0] = job_id
task = dict()
task['id'] = job_id
task['job_guid'] = job_guid
task['log_url'] = log_ref[2]
task['result_set_id'] = result_set_id
if result != 'success':
task['check_errors'] = True
@ -1099,6 +1112,9 @@ class JobsModel(TreeherderModelBase):
task['routing_key'] = 'parse_log.success'
tasks.append(task)
# a dict of result_set_id => push_timestamp
push_timestamp_lookup = self.get_push_timestamp_lookup(result_sets)
# Store the log references
self.get_jobs_dhub().execute(
proc='jobs.inserts.set_job_log_url',
@ -1107,9 +1123,16 @@ class JobsModel(TreeherderModelBase):
executemany=True)
for task in tasks:
parse_log.apply_async(args=[self.project, task['id'], task['result_set_id']],
kwargs={'check_errors': task['check_errors']},
routing_key=task['routing_key'])
parse_log.apply_async(
args=[
self.project,
task['log_url'],
task['job_guid'],
push_timestamp_lookup[task['result_set_id']]
],
kwargs={'check_errors': task['check_errors']},
routing_key=task['routing_key']
)
def store_job_artifact(self, artifact_placeholders):

Просмотреть файл

@ -426,10 +426,10 @@
"host": "read_host"
},
"get_result_set_by_id":{
"sql":"SELECT *
"get_result_set_push_timestamp":{
"sql":"SELECT id, push_timestamp
FROM result_set
WHERE id = ?",
WHERE id IN (REP0)",
"host": "read_host"
},
"get_result_set_job_list":{

Просмотреть файл

@ -8,16 +8,17 @@ from rest_framework.response import Response
from rest_framework.decorators import action, link
from rest_framework.reverse import reverse
from rest_framework.exceptions import ParseError
from rest_framework.authentication import SessionAuthentication
from treeherder.webapp.api.permissions import IsStaffOrReadOnly
from treeherder.model import models
from treeherder.model.derived import (JobsModel, DatasetNotFoundError,
ObjectNotFoundException)
RefDataManager, ObjectNotFoundException)
from treeherder.webapp.api.utils import UrlQueryFilter
from treeherder.etl.mixins import OAuthLoaderMixin
from treeherder.etl.oauth_utils import OAuthCredentials
def oauth_required(func):
@ -31,7 +32,7 @@ def oauth_required(func):
project = kwargs.get('project', None)
# Get the project credentials
project_credentials = OAuthLoaderMixin.get_credentials(project)
project_credentials = OAuthCredentials.get_credentials(project)
if not project_credentials:
msg = {
@ -40,7 +41,7 @@ def oauth_required(func):
}
return Response(msg, 500)
parameters = OAuthLoaderMixin.get_parameters(request.QUERY_PARAMS)
parameters = OAuthCredentials.get_parameters(request.QUERY_PARAMS)
oauth_body_hash = parameters.get('oauth_body_hash', None)
oauth_signature = parameters.get('oauth_signature', None)
@ -216,6 +217,25 @@ class ArtifactViewSet(viewsets.ViewSet):
objs = jm.get_job_artifact_list(offset, count, filter.conditions)
return Response(objs)
@with_jobs
@oauth_required
def create(self, request, project, jm):
artifact_data = []
job_guids = [x['job_guid'] for x in request.DATA]
job_id_lookup = jm.get_job_ids_by_guid(job_guids)
for datum in request.DATA:
artifact_data.append((
job_id_lookup[datum['job_guid']]['id'],
datum['name'],
datum['type'],
datum['blob']
))
jm.store_job_artifact(artifact_data)
return Response({'message': 'Artifacts stored successfully'})
class NoteViewSet(viewsets.ViewSet):
authentication_classes = (SessionAuthentication,)
@ -643,7 +663,6 @@ class BugJobMapViewSet(viewsets.ViewSet):
return Response(objs)
#####################
# Refdata ViewSets
#####################
@ -694,6 +713,33 @@ class BugscacheViewSet(viewsets.ReadOnlyModelViewSet):
"""ViewSet for the refdata Bugscache model"""
model = models.Bugscache
def list(self, request):
"""
Retrieves a list of bugs from the bugs cache
search -- Mandatory term of search
status -- Optional filter on the status. Can be 'open' or 'closed'. Open by default
"""
search_term = request.QUERY_PARAMS.get("search", None)
if not search_term:
return Response({"message": "the 'search' parameter is mandatory"}, status=400)
status = request.QUERY_PARAMS.get("status", "open")
if not status in ("open", "closed"):
return Response({"message": "status must be 'open' or 'closed'"}, status=400)
open_only = True if status == "open" else False
rdm = RefDataManager()
try:
suggested_bugs = rdm.get_suggested_bugs(search_term, open_only)
finally:
rdm.disconnect()
return Response(suggested_bugs)
class MachineViewSet(viewsets.ReadOnlyModelViewSet):
"""ViewSet for the refdata Machine model"""

1
vendor/thclient/__init__.py поставляемый
Просмотреть файл

@ -4,4 +4,5 @@
from .client import (TreeherderJob, TreeherderJobCollection,
TreeherderRevision, TreeherderResultSet, TreeherderResultSetCollection,
TreeherderArtifact, TreeherderArtifactCollection,
TreeherderClientError, TreeherderRequest)

72
vendor/thclient/client.py поставляемый
Просмотреть файл

@ -445,6 +445,50 @@ class TreeherderResultSet(TreeherderData, ValidatorMixin):
def get_revision(self, data={}):
return TreeherderRevision(data)
class TreeherderArtifact(TreeherderData, ValidatorMixin):
"""
Supports building a treeherder job artifact
"""
def __init__(self, data={}):
super(TreeherderArtifact, self).__init__(data)
# Provide minimal json structure validation
self.required_properties = {
'blob': {'cb': self.validate_existence},
'type': {'cb': self.validate_existence},
'name': {'cb': self.validate_existence},
'job_guid': {'cb': self.validate_existence}
}
def init_data(self):
self.data = {
# Stored in project_jobs_1.artifact.blob
'blob': '',
# Stored in project_jobs_1.artifact.type
'type': '',
# Stored in project_jobs_1.artifact.name
'name': '',
# Stored in project_jobs_1.artifact.job_guid
'job_guid': None
}
def add_blob(self, blob):
self.data['blob'] = blob
def add_type(self, type):
self.data['type'] = type
def add_name(self, name):
self.data['name'] = name
def add_job_guid(self, job_guid):
self.data['job_guid'] = job_guid
class TreeherderCollection(object):
"""
Base class for treeherder data collections
@ -519,6 +563,22 @@ class TreeherderResultSetCollection(TreeherderCollection):
return TreeherderResultSet(data)
class TreeherderArtifactCollection(TreeherderCollection):
"""
Collection of job artifacts
"""
def __init__(self, data=[]):
super(TreeherderArtifactCollection, self).__init__(data)
self.endpoint_base = 'artifact'
def get_artifact(self, data={}):
return TreeherderArtifact(data)
class TreeherderRequest(object):
"""
Treeherder request object that manages test submission.
@ -554,13 +614,9 @@ class TreeherderRequest(object):
Send given treeherder collection instance data to server; returns httplib Response.
"""
if (not isinstance(collection_inst, TreeherderResultSetCollection)) and \
(not isinstance(collection_inst, TreeherderJobCollection)):
msg = '{0} is an invalid collection class type, should be {1} or {2}'.format(
type(collection_inst),
type(TreeherderResultSetCollection),
type(TreeherderJobCollection))
if not isinstance(collection_inst, TreeherderCollection):
msg = '{0} should be an instance of TreeherderCollection'.format(type(collection_inst))
raise TreeherderClientError(msg, [])
@ -627,7 +683,7 @@ class TreeherderRequest(object):
)
except AssertionError, e:
print 'uri: %s' % uri
print 'body: %s' % body
print 'body: %s' % serialized_body
raise
signature_method = oauth.SignatureMethod_HMAC_SHA1()