Merge pull request #1051 from jgraham/autoclassify_integration

Add integration between autoclassification and manual starring.
This commit is contained in:
Mauro Doglio 2015-10-23 11:38:57 +01:00
Родитель 50172112e4 67b335f4c5
Коммит dda4ee2562
17 изменённых файлов: 496 добавлений и 92 удалений

Просмотреть файл

@ -1,20 +1,31 @@
from django.core.management import call_command
from treeherder.autoclassify.detectors import (ManualDetector,
TestFailureDetector)
from treeherder.autoclassify.matchers import PreciseTestMatcher
from treeherder.model.models import (Matcher,
Repository)
from .utils import (create_failure_lines,
from .utils import (create_bug_suggestions,
create_failure_lines,
log_line,
register_detectors,
register_matchers,
test_line)
def test_classify_test_failure(activate_responses, jm, eleven_jobs_stored, initial_data,
failure_lines, classified_failures):
def autoclassify(project, job, test_failure_lines):
register_matchers(PreciseTestMatcher)
call_command('autoclassify', job['job_guid'], project)
repository = Repository.objects.get(name=jm.project)
for item in test_failure_lines:
item.refresh_from_db()
def test_classify_test_failure(activate_responses, jm, test_project, test_repository,
eleven_jobs_stored, initial_data, failure_lines,
classified_failures):
job = jm.get_job(2)[0]
test_failure_lines = create_failure_lines(repository,
test_failure_lines = create_failure_lines(test_repository,
job["job_guid"],
[(test_line, {}),
(test_line, {"subtest": "subtest2"}),
@ -22,14 +33,7 @@ def test_classify_test_failure(activate_responses, jm, eleven_jobs_stored, initi
(test_line, {"expected": "ERROR"}),
(test_line, {"message": "message2"})])
# Poke some internal state so that we only use a single matcher for the test
Matcher._matcher_funcs = {}
Matcher.objects.register_matcher(PreciseTestMatcher)
call_command('autoclassify', job['job_guid'], jm.project)
for item in test_failure_lines:
item.refresh_from_db()
autoclassify(jm.project, job, test_failure_lines)
expected_classified = test_failure_lines[:2]
expected_unclassified = test_failure_lines[2:]
@ -39,3 +43,118 @@ def test_classify_test_failure(activate_responses, jm, eleven_jobs_stored, initi
for item in expected_unclassified:
assert item.classified_failures.count() == 0
def test_autoclassify_update_job_classification(activate_responses, jm, test_repository,
test_project, eleven_jobs_stored, initial_data,
failure_lines, classified_failures):
job = jm.get_job(2)[0]
for item in classified_failures:
item.bug_number = "1234"
item.save()
create_bug_suggestions(job, test_project, {"search": "TEST-UNEXPECTED-FAIL | test1 | message1"})
test_failure_lines = create_failure_lines(test_repository,
job["job_guid"],
[(test_line, {})])
autoclassify(test_project, job, test_failure_lines)
notes = jm.get_job_note_list(job["id"])
assert len(notes) == 1
bugs = jm.get_bug_job_map_list(0, 100, conditions={"job_id": set([("=", job["id"])])})
assert len(bugs) == 1
assert bugs[0]["bug_id"] == 1234
def test_autoclassify_no_update_job_classification(activate_responses, jm, test_repository,
test_project, eleven_jobs_stored,
initial_data, failure_lines,
classified_failures):
job = jm.get_job(2)[0]
create_bug_suggestions(job, test_project,
{"search": "TEST-UNEXPECTED-FAIL | test1 | message1"},
{"search": "Some error that isn't in the structured logs"})
test_failure_lines = create_failure_lines(test_repository,
job["job_guid"],
[(test_line, {})])
autoclassify(test_project, job, test_failure_lines)
notes = jm.get_job_note_list(job["id"])
assert len(notes) == 0
def test_autoclassified_after_manual_classification(activate_responses, jm, test_repository,
test_project, eleven_jobs_stored,
initial_data, failure_lines):
register_detectors(ManualDetector, TestFailureDetector)
job = jm.get_job(2)[0]
create_bug_suggestions(job, test_project, {"search": "TEST-UNEXPECTED-FAIL | test1 | message1"})
test_failure_lines = create_failure_lines(test_repository,
job["job_guid"],
[(test_line, {})])
jm.insert_job_note(job["id"], 4, "test", "")
for item in test_failure_lines:
item.refresh_from_db()
assert len(test_failure_lines[0].matches.all()) == 1
assert test_failure_lines[0].matches.all()[0].is_best
def test_autoclassified_no_update_after_manual_classification_1(activate_responses, jm,
test_repository, test_project,
eleven_jobs_stored,
initial_data):
register_detectors(ManualDetector, TestFailureDetector)
job = jm.get_job(2)[0]
create_bug_suggestions(job, test_project,
{"search": "TEST-UNEXPECTED-FAIL | test1 | message1"})
# Line type won't be detected by the detectors we have registered
test_failure_lines = create_failure_lines(test_repository,
job["job_guid"],
[(log_line, {})])
jm.insert_job_note(job["id"], 4, "test", "")
for item in test_failure_lines:
item.refresh_from_db()
assert len(test_failure_lines[0].matches.all()) == 0
def test_autoclassified_no_update_after_manual_classification_2(activate_responses, jm,
test_repository, test_project,
eleven_jobs_stored,
initial_data):
register_detectors(ManualDetector, TestFailureDetector)
job = jm.get_job(2)[0]
# Too many failure lines
test_failure_lines = create_failure_lines(test_repository,
job["job_guid"],
[(log_line, {}),
(test_line, {"subtest": "subtest2"})])
create_bug_suggestions(job, test_project, {"search": "TEST-UNEXPECTED-FAIL | test1 | message1"})
jm.insert_job_note(job["id"], 4, "test", "")
for item in test_failure_lines:
item.refresh_from_db()
assert len(test_failure_lines[0].matches.all()) == 0

Просмотреть файл

@ -3,7 +3,7 @@ from django.core.management import call_command
from treeherder.autoclassify.detectors import TestFailureDetector
from treeherder.autoclassify.matchers import PreciseTestMatcher
from treeherder.model.models import (ClassifiedFailure,
Matcher,
MatcherManager,
Repository)
from .utils import (create_failure_lines,
@ -26,11 +26,11 @@ def test_detect_intermittents(activate_responses, jm, eleven_jobs_stored, initia
old_failure_ids = set(item.id for item in ClassifiedFailure.objects.all())
# Poke some internal state so that we only use a single matcher for the test
Matcher._matcher_funcs = {}
Matcher.objects.register_matcher(PreciseTestMatcher)
MatcherManager._matcher_funcs = {}
MatcherManager.register_matcher(PreciseTestMatcher)
Matcher._detector_funcs = {}
detector = Matcher.objects.register_detector(TestFailureDetector)
MatcherManager._detector_funcs = {}
detector = MatcherManager.register_detector(TestFailureDetector)
call_command('detect_intermittents', retrigger['job_guid'], jm.project)

Просмотреть файл

@ -1,7 +1,13 @@
from treeherder.model.models import FailureLine
import json
import zlib
from treeherder.model.derived.artifacts import ArtifactsModel
from treeherder.model.models import (FailureLine,
MatcherManager)
test_line = {"action": "test_result", "test": "test1", "subtest": "subtest1",
"status": "FAIL", "expected": "PASS", "message": "message1"}
log_line = {"action": "log", "level": "ERROR", "message": "message1"}
def create_failure_lines(repository, job_guid, failure_line_list):
@ -17,3 +23,31 @@ def create_failure_lines(repository, job_guid, failure_line_list):
failure_lines.append(failure_line)
return failure_lines
def create_bug_suggestions(job, project, *bug_suggestions):
for item in bug_suggestions:
for key in ["search_terms", "bugs"]:
if key not in item:
item[key] = []
bug_suggestions_placeholders = [
job["id"], 'Bug suggestions',
'json', zlib.compress(json.dumps(bug_suggestions)),
job["id"], 'Bug suggestions',
]
with ArtifactsModel(project) as artifacts_model:
artifacts_model.store_job_artifact([bug_suggestions_placeholders])
def register_matchers(*args):
MatcherManager._matcher_funcs = {}
for item in args:
MatcherManager.register_matcher(item)
def register_detectors(*args):
MatcherManager._detector_funcs = {}
for item in args:
MatcherManager.register_detector(item)

Просмотреть файл

@ -450,17 +450,13 @@ def mock_error_summary(monkeypatch):
@pytest.fixture
def failure_lines(jm, eleven_jobs_stored, initial_data):
from treeherder.model.models import RepositoryGroup, Repository
def failure_lines(jm, test_repository, eleven_jobs_stored, initial_data):
from tests.autoclassify.utils import test_line, create_failure_lines
test_repository.save()
job = jm.get_job(1)[0]
repository_group = RepositoryGroup.objects.create(name="repo_group")
repository = Repository.objects.create(name=jm.project,
repository_group=repository_group)
return create_failure_lines(repository,
return create_failure_lines(test_repository,
job["job_guid"],
[(test_line, {}),
(test_line, {"subtest": "subtest2"})])
@ -468,7 +464,7 @@ def failure_lines(jm, eleven_jobs_stored, initial_data):
@pytest.fixture
def classified_failures(request, jm, eleven_jobs_stored, initial_data, failure_lines):
from treeherder.model.models import ClassifiedFailure, FailureMatch, Matcher
from treeherder.model.models import ClassifiedFailure, FailureMatch, MatcherManager
from treeherder.autoclassify import detectors
job_1 = jm.get_job(1)[0]
@ -477,11 +473,13 @@ def classified_failures(request, jm, eleven_jobs_stored, initial_data, failure_l
def __call__(self, failure_lines):
pass
test_matcher = Matcher.objects.register_detector(TreeherderUnitTestDetector)
test_matcher = MatcherManager._detector_funcs = {}
test_matcher = MatcherManager._matcher_funcs = {}
test_matcher = MatcherManager.register_detector(TreeherderUnitTestDetector)
def finalize():
Matcher._detector_funcs = {}
Matcher._matcher_funcs = {}
MatcherManager._detector_funcs = {}
MatcherManager._matcher_funcs = {}
request.addfinalizer(finalize)
classified_failures = []

Просмотреть файл

@ -2,7 +2,7 @@ import logging
from abc import (ABCMeta,
abstractmethod)
from treeherder.model import models
from treeherder.model.models import MatcherManager
logger = logging.getLogger(__name__)
@ -34,6 +34,13 @@ class TestFailureDetector(Detector):
return rv
class ManualDetector(Detector):
"""Small hack; this ensures that there's a matcher object indicating that a match
was by manual association, but which never automatically matches any lines"""
def __call__(self, failure_lines):
return []
def register():
for obj in [TestFailureDetector]:
models.Matcher.objects.register_detector(obj)
for obj in [ManualDetector, TestFailureDetector]:
MatcherManager.register_detector(obj)

Просмотреть файл

@ -4,7 +4,7 @@ from collections import defaultdict
from django.core.management.base import (BaseCommand,
CommandError)
from treeherder.autoclassify import matchers
from treeherder.model.derived import JobsModel
from treeherder.model.models import (FailureLine,
FailureMatch,
Matcher)
@ -14,9 +14,6 @@ logger = logging.getLogger(__name__)
# The minimum goodness of match we need to mark a particular match as the best match
AUTOCLASSIFY_CUTOFF_RATIO = 0.8
# Initialisation needed to associate matcher functions with the matcher objects
matchers.register()
class Command(BaseCommand):
args = '<job_guid>, <repository>'
@ -26,12 +23,13 @@ class Command(BaseCommand):
if not len(args) == 2:
raise CommandError('3 arguments required, %s given' % len(args))
job_id, repository = args
job_guid, repository = args
match_errors(repository, job_id)
with JobsModel(repository) as jm:
match_errors(repository, jm, job_guid)
def match_errors(repository, job_guid):
def match_errors(repository, jm, job_guid):
unmatched_failures = FailureLine.objects.unmatched_for_job(repository, job_guid)
if not unmatched_failures:
@ -61,6 +59,10 @@ def match_errors(repository, job_guid):
best_match.is_best = True
best_match.save()
if all_matched:
job_id = jm.get_job_ids_by_guid([job_guid])[job_guid]["id"]
jm.update_after_autoclassification(job_id)
def all_lines_matched(failure_lines):
failure_score_dict = defaultdict(list)

Просмотреть файл

@ -3,7 +3,6 @@ import logging
from django.core.management.base import (BaseCommand,
CommandError)
from treeherder.autoclassify import detectors
from treeherder.model.derived import JobsModel
from treeherder.model.models import (FailureLine,
Matcher)
@ -12,8 +11,6 @@ from .autoclassify import match_errors
logger = logging.getLogger(__name__)
detectors.register()
class Command(BaseCommand):
args = '<job_guid>, <repository>'
@ -24,13 +21,12 @@ class Command(BaseCommand):
raise CommandError('2 arguments required, %s given' % len(args))
job_guid, repository = args
with JobsModel(repository) as jobs_model:
jobs = jobs_model.get_job_repeats(job_guid)
add_new_intermittents(repository, jobs)
with JobsModel(repository) as jm:
jobs = jm.get_job_repeats(job_guid)
add_new_intermittents(repository, jm, jobs)
def add_new_intermittents(repository, jobs):
def add_new_intermittents(repository, jm, jobs):
# The approach here is currently to look for new intermittents to add, one at a time
# and then rerun the matching on other jobs
# TODO: limit the possible matches to those that have just been added
@ -68,7 +64,7 @@ def add_new_intermittents(repository, jobs):
for index in line_indicies:
failure = unmatched_lines[index]
failure.create_new_classification(detector.db_object)
failure.set_classification(detector.db_object)
new_matches.add(failure.id)
if new_matches:
@ -76,4 +72,4 @@ def add_new_intermittents(repository, jobs):
if rematch_job == job:
continue
logger.debug("Trying rematch on job %s" % (rematch_job["job_guid"]))
match_errors(repository, rematch_job["job_guid"])
match_errors(repository, jm, rematch_job["job_guid"])

Просмотреть файл

@ -3,8 +3,8 @@ from abc import (ABCMeta,
abstractmethod)
from collections import namedtuple
from treeherder.model import models
from treeherder.model.models import FailureMatch
from treeherder.model.models import (FailureMatch,
MatcherManager)
logger = logging.getLogger(__name__)
@ -59,4 +59,4 @@ class PreciseTestMatcher(Matcher):
def register():
for obj in [PreciseTestMatcher]:
models.Matcher.objects.register_matcher(obj)
MatcherManager.register_matcher(obj)

Просмотреть файл

@ -1,4 +1,5 @@
import logging
import re
import time
from datetime import datetime
@ -11,8 +12,12 @@ from treeherder.etl.common import get_guid_root
from treeherder.events.publisher import JobStatusPublisher
from treeherder.model import (error_summary,
utils)
from treeherder.model.models import (Datasource,
ExclusionProfile)
from treeherder.model.models import (ClassifiedFailure,
Datasource,
ExclusionProfile,
FailureClassification,
FailureLine,
Matcher)
from treeherder.model.tasks import (populate_error_summary,
publish_job_action,
publish_resultset,
@ -391,6 +396,12 @@ class JobsModel(TreeherderModelBase):
)
self.update_last_job_classification(job_id)
intermittent_ids = FailureClassification.objects.filter(
name__in=["intermittent", "intermittent needs filing"]).values_list('id', flat=True)
if who != "autoclassifier" and failure_classification_id in intermittent_ids:
self.update_autoclassification(job_id)
def delete_job_note(self, note_id, job_id):
"""
Delete a job note and updates the failure classification for that job
@ -404,7 +415,114 @@ class JobsModel(TreeherderModelBase):
debug_show=self.DEBUG
)
self.update_last_job_classification(job_id)
def update_autoclassification(self, job_id):
"""
If a job is manually classified and has a single line in the logs matching a single
FailureLine, but the FailureLine has not matched any ClassifiedFailure, add a
new match due to the manual classification.
"""
failure_line = self.manual_classification_line(job_id)
if failure_line is None:
return
manual_detector = Matcher.objects.get(name="ManualDetector")
failure_line.set_classification(manual_detector)
def manual_classification_line(self, job_id):
"""
Return the FailureLine from a job if it can be manually classified as a side effect
of the overall job being classified.
Otherwise return None.
"""
job = self.get_job(job_id)[0]
try:
failure_lines = [FailureLine.objects.get(job_guid=job["job_guid"])]
except (FailureLine.DoesNotExist, FailureLine.MultipleObjectsReturned):
return None
bug_suggestion_lines = self.filter_bug_suggestions(self.bug_suggestions(job_id))
if len(bug_suggestion_lines) != 1:
return None
# Check that some detector would match this. This is being used as an indication
# that the autoclassifier will be able to work on this classification
if not any(detector(failure_lines)
for detector in Matcher.objects.registered_detectors()):
return None
return failure_lines[0]
def filter_bug_suggestions(self, suggestion_lines):
remove = [re.compile("Return code: \d+")]
rv = []
for item in suggestion_lines:
if not any(regexp.match(item["search"]) for regexp in remove):
rv.append(item)
return rv
def update_after_autoclassification(self, job_id):
if self.fully_autoclassified(job_id) and len(self.get_job_note_list(job_id)) == 0:
self.insert_autoclassify_job_note(job_id)
def fully_autoclassified(self, job_id):
job = self.get_job(job_id)[0]
if FailureLine.objects.filter(job_guid=job["job_guid"],
action="truncated").count() > 0:
return False
num_failure_lines = FailureLine.objects.filter(job_guid=job["job_guid"],
matches__is_best=True).count()
if num_failure_lines == 0:
return False
bug_suggestion_lines = self.filter_bug_suggestions(self.bug_suggestions(job_id))
return num_failure_lines == len(bug_suggestion_lines)
def insert_autoclassify_job_note(self, job_id):
job = self.get_job(job_id)[0]
failure_lines = FailureLine.objects.filter(
job_guid=job["job_guid"], matches__is_best=True).prefetch_related(
'classified_failures')
bugs = set()
for line in failure_lines:
for classified_failure in line.classified_failures.all():
bugs.add(classified_failure.bug_number)
if len(bugs) == 1 and None not in bugs:
bug_number = bugs.pop()
logger.info("Autoclassifier adding bug")
self.insert_bug_job_map(job_id, bug_number, "autoclassification",
int(time.time()), "autoclassifier")
classification = FailureClassification.objects.get(name="autoclassified intermittent")
logger.info("Autoclassifier adding job note")
self.insert_job_note(job_id, classification.id, "autoclassifier", "")
def bug_suggestions(self, job_id):
"""Get the list of log lines and associated bug suggestions for a job"""
with ArtifactsModel(self.project) as artifacts_model:
# TODO: Filter some junk from this
objs = artifacts_model.get_job_artifact_list(
offset=0,
limit=1,
conditions={"job_id": set([("=", job_id)]),
"name": set([("=", "Bug suggestions")]),
"type": set([("=", "json")])})
lines = objs[0]["blob"] if objs else []
return lines
def insert_bug_job_map(self, job_id, bug_id, assignment_type, submit_timestamp, who):
"""
@ -442,6 +560,9 @@ class JobsModel(TreeherderModelBase):
routing_key='classification_mirroring'
)
if who != 'autoclassifier':
self.update_autoclassification_bug(job_id, bug_id)
def delete_bug_job_map(self, job_id, bug_id):
"""
Delete a bug-job entry identified by bug_id and job_id
@ -455,6 +576,17 @@ class JobsModel(TreeherderModelBase):
debug_show=self.DEBUG
)
def update_autoclassification_bug(self, job_id, bug_id):
failure_line = self.manual_classification_line(job_id)
if failure_line is None:
return
failure = ClassifiedFailure.objects.best_for_line(failure_line)
if failure and failure.bug_number is None:
failure.bug_number = bug_id
failure.save()
def calculate_eta(self, sample_window_seconds, debug):
# Get the most recent timestamp from jobs

Просмотреть файл

@ -52,5 +52,14 @@
"description": "",
"active_status": "active"
}
},
{
"pk": 7,
"model": "model.failureclassification",
"fields": {
"name": "autoclassified intermittent",
"description": "",
"active_status": "active"
}
}
]

Просмотреть файл

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import treeherder.model.fields
class Migration(migrations.Migration):
dependencies = [
('model', '0002_auto_20151014_0900'),
]
operations = [
migrations.AlterField(
model_name='failurematch',
name='classified_failure',
field=treeherder.model.fields.FlexibleForeignKey(related_name='matches', to='model.ClassifiedFailure'),
),
]

Просмотреть файл

@ -13,7 +13,8 @@ from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db import (connection,
models)
models,
transaction)
from django.db.models import Q
from django.utils.encoding import python_2_unicode_compatible
from jsonfield import JSONField
@ -530,8 +531,9 @@ class FailureLineManager(models.Manager):
classified_failures=None,
)
def for_jobs(self, *jobs):
failures = FailureLine.objects.filter(job_guid__in=[item["job_guid"] for item in jobs])
def for_jobs(self, *jobs, **filters):
failures = FailureLine.objects.filter(job_guid__in=[item["job_guid"] for item in jobs],
**filters)
failures_by_job = defaultdict(list)
for item in failures:
failures_by_job[item.job_guid].append(item)
@ -584,20 +586,40 @@ class FailureLine(models.Model):
if match and match.score > min_score:
return match
def create_new_classification(self, matcher):
new_classification = ClassifiedFailure()
new_classification.save()
def set_classification(self, matcher, bug_number=None):
with transaction.atomic():
if bug_number:
classification, _ = ClassifiedFailure.objects.get_or_create(
bug_number=bug_number)
else:
classification = ClassifiedFailure.objects.create()
new_link = FailureMatch(
failure_line=self,
classified_failure=new_classification,
matcher=matcher,
score=1,
is_best=True)
new_link.save()
FailureMatch.objects.filter(
failure_line=self,
is_best=True).update(is_best=False)
new_link = FailureMatch(
failure_line=self,
classified_failure=classification,
matcher=matcher,
score=1,
is_best=True)
new_link.save()
class ClassifiedFailureManager(models.Manager):
def best_for_line(self, failure_line):
try:
return ClassifiedFailure.objects.get(
failure_lines__id=failure_line.id,
matches__is_best=True)
except ClassifiedFailure.DoesNotExist:
return None
class ClassifiedFailure(models.Model):
objects = ClassifiedFailureManager()
id = BigAutoField(primary_key=True)
failure_lines = models.ManyToManyField(FailureLine, through='FailureMatch',
related_name='classified_failures')
@ -611,39 +633,81 @@ class ClassifiedFailure(models.Model):
db_table = 'classified_failure'
class LazyClassData(object):
def __init__(self, type_func, setter):
"""Descriptor object for class-level data that is lazily initialized.
See https://docs.python.org/2/howto/descriptor.html for details of the descriptor
protocol.
:param type_func: Callable of zero arguments used to initalize the data storage on
first access.
:param setter: Callable of zero arguments used to populate the data storage
after it has been initialized. Unlike type_func this can safely
be used reentrantly i.e. the setter function may itself access the
attribute being set.
"""
self.type_func = type_func
self.setter = setter
self.value = None
def __get__(self, obj, objtype):
if self.value is None:
self.value = self.type_func()
self.setter()
return self.value
def __set__(self, obj, val):
self.value = val
def _init_matchers():
from treeherder.autoclassify import matchers
matchers.register()
def _init_detectors():
from treeherder.autoclassify import detectors
detectors.register()
class MatcherManager(models.Manager):
def register_matcher(self, cls):
return self._register(cls, Matcher._matcher_funcs)
_detector_funcs = LazyClassData(OrderedDict, _init_detectors)
_matcher_funcs = LazyClassData(OrderedDict, _init_matchers)
def register_detector(self, cls):
return self._register(cls, Matcher._detector_funcs)
@classmethod
def register_matcher(cls, matcher_cls):
assert cls._matcher_funcs is not None
return cls._register(matcher_cls, cls._matcher_funcs)
def _register(self, cls, dest):
if cls.__name__ in dest:
@classmethod
def register_detector(cls, detector_cls):
assert cls._detector_funcs is not None
return cls._register(detector_cls, cls._detector_funcs)
@classmethod
def _register(cls, matcher_cls, dest):
if matcher_cls.__name__ in dest:
return dest[cls.__name__]
obj = Matcher.objects.get_or_create(name=cls.__name__)[0]
obj, _ = Matcher.objects.get_or_create(name=matcher_cls.__name__)
instance = cls(obj)
dest[cls.__name__] = instance
instance = matcher_cls(obj)
dest[matcher_cls.__name__] = instance
return instance
def registered_matchers(self):
for matcher in Matcher._matcher_funcs.values():
for matcher in self._matcher_funcs.values():
yield matcher
def registered_detectors(self):
for matcher in Matcher._detector_funcs.values():
for matcher in self._detector_funcs.values():
yield matcher
class Matcher(models.Model):
name = models.CharField(max_length=50, unique=True)
_detector_funcs = OrderedDict()
_matcher_funcs = OrderedDict()
objects = MatcherManager()
class Meta:
@ -658,7 +722,8 @@ class Matcher(models.Model):
class FailureMatch(models.Model):
id = BigAutoField(primary_key=True)
failure_line = FlexibleForeignKey(FailureLine, related_name="matches")
classified_failure = FlexibleForeignKey(ClassifiedFailure)
classified_failure = FlexibleForeignKey(ClassifiedFailure, related_name="matches")
matcher = models.ForeignKey(Matcher)
score = models.DecimalField(max_digits=3, decimal_places=2, blank=True, null=True)
is_best = models.BooleanField(default=False)

Просмотреть файл

@ -172,7 +172,8 @@ div#info-panel .navbar.navbar-dark .navbar-nav > li.active a:focus {
margin: 2px 0px;
}
#job-details-panel .star {
#job-details-panel .star,
#job-details-panel .star-o {
color: #f0ad4e;
}

Просмотреть файл

@ -144,7 +144,21 @@
.btn-pink-classified-count::after,
.btn-purple-classified::after,
.btn-purple-classified-count::after {
content: "*";
font-family: 'Glyphicons Halflings';
font-weight: 400;
line-height: 1;
content: "\e006";
font-size: 75%;
vertical-align: super;
}
.autoclassified::after {
font-family: 'Glyphicons Halflings';
font-weight: 400;
line-height: 1;
content: "\e007";
font-size: 75%;
vertical-align: super;
}
/* Orange, testfailed */

Просмотреть файл

@ -59,11 +59,12 @@ treeherder.directive('thFailureClassification', [
scope.classification = thClassificationTypes.classifications[newVal];
scope.badgeColorClass=scope.classification.star;
scope.hoverText=scope.classification.name;
scope.iconCls = (newVal == 7 ? "glyphicon-star-empty" : "glyphicon glyphicon-star") + " star";
}
});
},
template: '<span title="{{hoverText}}">' +
'<i class="glyphicon glyphicon-star star"></i>' +
'<i class="glyphicon {{iconCls}}"></i>' +
'</span> {{hoverText}}'
};
}]);

Просмотреть файл

@ -141,8 +141,13 @@ treeherder.provider('thResultStatusInfo', function() {
}
// handle if a job is classified
if (parseInt(failure_classification_id, 10) > 1) {
var classificationId = parseInt(failure_classification_id, 10);
if (classificationId > 1) {
resultStatusInfo.btnClass = resultStatusInfo.btnClass + "-classified";
// autoclassification-only case
if (classificationId == 7) {
resultStatusInfo.btnClass += " autoclassified";
}
resultStatusInfo.countText = "classified " + resultStatusInfo.countText;
}
return resultStatusInfo;

Просмотреть файл

@ -13,7 +13,8 @@ treeherder.factory('thClassificationTypes', [
3: "label-success", // fixed by backout",
4: "label-warning", // intermittent",
5: "label-default", // infra",
6: "label-danger" // intermittent needs filing",
6: "label-danger", // intermittent needs filing",
7: "label-warning" // autoclassified intermittent
};
var addClassification = function(cl) {