зеркало из https://github.com/mozilla/treeherder.git
Bug 1923923 - Make bugzilla reference optional on Bugscache model (#8407)
* Extend Bugscache model to count occurrences Introduce optional reference to bugzilla ID * Add unique constraint for bugzilla_id * Update references to bugzilla_id * Update bug serializer * Suggestions * Update tests * Support bug occurrences through a M2M table * Do not serialize occurrences in the current serializers * Directly manipulate the M2M * Add constraint to the M2M * Exclude user from the unique constraint
This commit is contained in:
Родитель
911662ee2e
Коммит
02e5fd177b
|
@ -808,7 +808,7 @@ def bugs(mock_bugzilla_api_request):
|
|||
process = BzApiBugProcess()
|
||||
process.run()
|
||||
|
||||
return th_models.Bugscache.objects.all().order_by("id")
|
||||
return th_models.Bugscache.objects.filter(bugzilla_id__isnull=False).order_by("bugzilla_id")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -1143,7 +1143,7 @@ def generic_reference_data(test_repository):
|
|||
@pytest.fixture
|
||||
def bug_data(eleven_jobs_stored, test_repository, test_push, bugs):
|
||||
jobs = th_models.Job.objects.all().order_by("id")
|
||||
bug_id = bugs[0].id
|
||||
bug_id = bugs[0].bugzilla_id
|
||||
job_id = jobs[0].id
|
||||
th_models.BugJobMap.create(job_id=job_id, bug_id=bug_id)
|
||||
query_string = f"?startday=2012-05-09&endday=2018-05-10&tree={test_repository.name}"
|
||||
|
@ -1161,7 +1161,7 @@ def bug_data(eleven_jobs_stored, test_repository, test_push, bugs):
|
|||
@pytest.fixture
|
||||
def bug_data_with_5_failures(eleven_jobs_stored, test_repository, test_push, bugs):
|
||||
jobs = th_models.Job.objects.all().order_by("id")
|
||||
bug_id = bugs[0].id
|
||||
bug_id = bugs[0].bugzilla_id
|
||||
for index, job in enumerate(jobs[:5]):
|
||||
th_models.BugJobMap.create(job_id=job.id, bug_id=bug_id)
|
||||
if index > 2:
|
||||
|
|
|
@ -51,7 +51,7 @@ def test_bz_reopen_bugs(
|
|||
]:
|
||||
submit_obj = {
|
||||
"job_id": test_jobs[idx].id,
|
||||
"bug_id": bug.id,
|
||||
"bug_id": bug.bugzilla_id,
|
||||
"type": "manual",
|
||||
"bug_open": False,
|
||||
}
|
||||
|
@ -68,10 +68,14 @@ def test_bz_reopen_bugs(
|
|||
# always closed
|
||||
# as we only reopen a single instance of a bug, we choose the most recent instance
|
||||
# since the reopen code queries and then `.order_by("-created")`
|
||||
bug_job_map = BugJobMap.objects.filter(job_id=test_jobs[4].id, bug_id=incomplete_bugs[0].id)[0]
|
||||
bug_job_map = BugJobMap.objects.filter(
|
||||
job_id=test_jobs[4].id, bug_id=incomplete_bugs[0].bugzilla_id
|
||||
)[0]
|
||||
assert bug_job_map.bug_open is False
|
||||
|
||||
bug_job_map = BugJobMap.objects.filter(job_id=test_jobs[3].id, bug_id=incomplete_bugs[2].id)[0]
|
||||
bug_job_map = BugJobMap.objects.filter(
|
||||
job_id=test_jobs[3].id, bug_id=incomplete_bugs[2].bugzilla_id
|
||||
)[0]
|
||||
assert bug_job_map.bug_open is False
|
||||
|
||||
process = BzApiBugProcess()
|
||||
|
@ -79,10 +83,14 @@ def test_bz_reopen_bugs(
|
|||
process.run()
|
||||
|
||||
# reopens based on minimum_failures_to_reopen
|
||||
bug_job_map = BugJobMap.objects.filter(job_id=test_jobs[4].id, bug_id=incomplete_bugs[0].id)[0]
|
||||
bug_job_map = BugJobMap.objects.filter(
|
||||
job_id=test_jobs[4].id, bug_id=incomplete_bugs[0].bugzilla_id
|
||||
)[0]
|
||||
assert bug_job_map.bug_open is True
|
||||
|
||||
bug_job_map = BugJobMap.objects.filter(job_id=test_jobs[3].id, bug_id=incomplete_bugs[2].id)[0]
|
||||
bug_job_map = BugJobMap.objects.filter(
|
||||
job_id=test_jobs[3].id, bug_id=incomplete_bugs[2].bugzilla_id
|
||||
)[0]
|
||||
if minimum_failures_to_reopen < 3:
|
||||
assert bug_job_map.bug_open is True
|
||||
else:
|
||||
|
|
|
@ -113,13 +113,15 @@ def populate_bugscache():
|
|||
return Bugscache.objects.bulk_create(
|
||||
[
|
||||
Bugscache(
|
||||
id=1234567,
|
||||
id=1,
|
||||
bugzilla_id=1234567,
|
||||
status="NEW",
|
||||
summary="intermittent devtools/client/framework/test/test1.js | single tracking bug",
|
||||
modified="2014-01-01 00:00:00",
|
||||
),
|
||||
Bugscache(
|
||||
id=2345678,
|
||||
id=2,
|
||||
bugzilla_id=2345678,
|
||||
status="NEW",
|
||||
summary="intermittent devtools/client/framework/test/test2.js | single tracking bug",
|
||||
modified="2014-01-01 00:00:00",
|
||||
|
|
|
@ -92,7 +92,7 @@ def test_bug_suggestion_line(
|
|||
job = Job.objects.get(id=1)
|
||||
|
||||
Bugscache.objects.create(
|
||||
id=1775819,
|
||||
bugzilla_id=1775819,
|
||||
status="2",
|
||||
keywords="intermittent-failure,regression,test-verify-fail",
|
||||
whiteboard="[retriggered][stockwell unknown]",
|
||||
|
@ -109,7 +109,7 @@ def test_bug_suggestion_line(
|
|||
Bugscache.objects.bulk_create(
|
||||
[
|
||||
Bugscache(
|
||||
id=100 + i,
|
||||
bugzilla_id=100 + i,
|
||||
status="2",
|
||||
keywords="intermittent-failure,intermittent-testcase",
|
||||
summary=(
|
||||
|
|
|
@ -22,7 +22,7 @@ def _update_bugscache(bug_list):
|
|||
|
||||
for bug in bug_list:
|
||||
Bugscache.objects.create(
|
||||
id=bug["id"],
|
||||
bugzilla_id=bug["id"],
|
||||
status=bug["status"],
|
||||
resolution=bug["resolution"],
|
||||
summary=bug["summary"][:max_summary_length],
|
||||
|
@ -153,7 +153,7 @@ def test_import(mock_bugscache_bugzilla_request):
|
|||
|
||||
BzApiBugProcess().run()
|
||||
|
||||
bug = Bugscache.objects.get(id=1652208)
|
||||
bug = Bugscache.objects.get(bugzilla_id=1652208)
|
||||
assert bug.status == "RESOLVED"
|
||||
assert bug.resolution == "DUPLICATE"
|
||||
assert bug.crash_signature == "[@ some::mock_signature]"
|
||||
|
@ -174,10 +174,10 @@ def test_import(mock_bugscache_bugzilla_request):
|
|||
}
|
||||
|
||||
for open_bug, duplicates in expected_bug_dupe_of_data.items():
|
||||
assert Bugscache.objects.get(id=open_bug).dupe_of is None
|
||||
assert set(Bugscache.objects.filter(dupe_of=open_bug).values_list("id", flat=True)) == set(
|
||||
duplicates
|
||||
)
|
||||
assert Bugscache.objects.get(bugzilla_id=open_bug).dupe_of is None
|
||||
assert set(
|
||||
Bugscache.objects.filter(dupe_of=open_bug).values_list("bugzilla_id", flat=True)
|
||||
) == set(duplicates)
|
||||
|
||||
expected_bug_count = sum(
|
||||
[1 + len(duplicates) for duplicates in expected_bug_dupe_of_data.values()]
|
||||
|
|
|
@ -63,14 +63,14 @@ def test_bug_job_map_list(client, test_repository, eleven_jobs_stored, test_user
|
|||
for i, job in enumerate(jobs):
|
||||
bjm = BugJobMap.create(
|
||||
job_id=job.id,
|
||||
bug_id=bugs[i].id,
|
||||
bug_id=bugs[i].bugzilla_id,
|
||||
user=test_user,
|
||||
)
|
||||
|
||||
expected.append(
|
||||
{
|
||||
"job_id": job.id,
|
||||
"bug_id": bugs[i].id,
|
||||
"bug_id": bugs[i].bugzilla_id,
|
||||
"created": bjm.created.isoformat(),
|
||||
"who": test_user.email,
|
||||
}
|
||||
|
|
|
@ -23,7 +23,9 @@ def reopen_intermittent_bugs(minimum_failures_to_reopen=1):
|
|||
return
|
||||
|
||||
incomplete_bugs = set(
|
||||
Bugscache.objects.filter(resolution="INCOMPLETE").values_list("id", flat=True)
|
||||
Bugscache.objects.filter(resolution="INCOMPLETE", bugzilla_id__isnull=False).values_list(
|
||||
"bugzilla_id", flat=True
|
||||
)
|
||||
)
|
||||
# Intermittent bugs get closed after 3 weeks of inactivity if other conditions don't apply:
|
||||
# https://github.com/mozilla/relman-auto-nag/blob/c7439e247677333c1cd8c435234b3ef3adc49680/auto_nag/scripts/close_intermittents.py#L17
|
||||
|
@ -102,7 +104,10 @@ class BzApiBugProcess:
|
|||
def run(self):
|
||||
year_ago = datetime.utcnow() - timedelta(days=365)
|
||||
last_change_time_max = (
|
||||
Bugscache.objects.all().aggregate(Max("modified"))["modified__max"] or None
|
||||
Bugscache.objects.filter(bugzilla_id__isnull=False).aggregate(Max("modified"))[
|
||||
"modified__max"
|
||||
]
|
||||
or None
|
||||
)
|
||||
if last_change_time_max:
|
||||
last_change_time_max -= timedelta(minutes=10)
|
||||
|
@ -143,7 +148,9 @@ class BzApiBugProcess:
|
|||
bugs_to_process = list(
|
||||
bugs_to_process
|
||||
- set(
|
||||
Bugscache.objects.filter(processed_update=True).values_list("id", flat=True)
|
||||
Bugscache.objects.filter(
|
||||
processed_update=True, bugzilla_id__isnull=False
|
||||
).values_list("bugzilla_id", flat=True)
|
||||
)
|
||||
)
|
||||
if len(bugs_to_process) == 0:
|
||||
|
@ -186,8 +193,12 @@ class BzApiBugProcess:
|
|||
|
||||
if bug_list:
|
||||
if duplicate_chain_length == 0:
|
||||
Bugscache.objects.filter(modified__lt=year_ago).delete()
|
||||
Bugscache.objects.all().update(processed_update=False)
|
||||
Bugscache.objects.filter(
|
||||
modified__lt=year_ago, bugzilla_id__isnull=False
|
||||
).delete()
|
||||
Bugscache.objects.filter(bugzilla_id__isnull=False).update(
|
||||
processed_update=False
|
||||
)
|
||||
|
||||
for bug in bug_list:
|
||||
# we currently don't support timezones in treeherder, so
|
||||
|
@ -196,7 +207,7 @@ class BzApiBugProcess:
|
|||
try:
|
||||
dupe_of = bug.get("dupe_of", None)
|
||||
Bugscache.objects.update_or_create(
|
||||
id=bug["id"],
|
||||
bugzilla_id=bug["id"],
|
||||
defaults={
|
||||
"status": bug.get("status", ""),
|
||||
"resolution": bug.get("resolution", ""),
|
||||
|
@ -243,7 +254,9 @@ class BzApiBugProcess:
|
|||
BugJobMap.objects.all().values_list("bug_id", flat=True)
|
||||
)
|
||||
bugs_to_process = bugs_to_process_next - set(
|
||||
Bugscache.objects.filter(processed_update=True).values_list("id", flat=True)
|
||||
Bugscache.objects.filter(processed_update=True).values_list(
|
||||
"bugzilla_id", flat=True
|
||||
)
|
||||
)
|
||||
if duplicate_chain_length == 5 and len(bugs_to_process):
|
||||
logger.warn(
|
||||
|
@ -258,7 +271,9 @@ class BzApiBugProcess:
|
|||
bugs_to_process_next = duplicates_to_check
|
||||
duplicates_to_check = set()
|
||||
bugs_to_process = bugs_to_process_next - set(
|
||||
Bugscache.objects.filter(processed_update=True).values_list("id", flat=True)
|
||||
Bugscache.objects.filter(
|
||||
processed_update=True, bugzilla_id__isnull=False
|
||||
).values_list("bugzilla_id", flat=True)
|
||||
)
|
||||
if len(bugs_to_process) == 0:
|
||||
break
|
||||
|
@ -273,22 +288,24 @@ class BzApiBugProcess:
|
|||
# from getting dropped - they are still needed to match the failure line
|
||||
# against the bug summary.
|
||||
for bug_duplicate, bug_openish in duplicates_to_bugs.items():
|
||||
bug_openish_object = Bugscache.objects.filter(id=bug_openish)
|
||||
bug_openish_object = Bugscache.objects.filter(bugzilla_id=bug_openish)
|
||||
if len(bug_openish_object) == 0:
|
||||
# Script does not have access to open bug but to duplicate
|
||||
continue
|
||||
Bugscache.objects.filter(id=bug_duplicate).update(
|
||||
Bugscache.objects.filter(bugzilla_id=bug_duplicate).update(
|
||||
dupe_of=bug_openish, modified=bug_openish_object[0].modified
|
||||
)
|
||||
|
||||
# Switch classifications from duplicate bugs to open ones.
|
||||
duplicates_db = set(
|
||||
Bugscache.objects.filter(dupe_of__isnull=False).values_list("id", flat=True)
|
||||
Bugscache.objects.filter(dupe_of__isnull=False, bugzilla_id__isnull=False).values_list(
|
||||
"bugzilla_id", flat=True
|
||||
)
|
||||
)
|
||||
bugs_used = set(BugJobMap.objects.all().values_list("bug_id", flat=True))
|
||||
duplicates_used = duplicates_db & bugs_used
|
||||
for bug_id in duplicates_used:
|
||||
dupe_of = Bugscache.objects.get(id=bug_id).dupe_of
|
||||
dupe_of = Bugscache.objects.get(bugzilla_id=bug_id).dupe_of
|
||||
# Jobs both already classified with new duplicate and its open bug.
|
||||
jobs_openish = list(
|
||||
BugJobMap.objects.filter(bug_id=dupe_of).values_list("job_id", flat=True)
|
||||
|
@ -298,7 +315,7 @@ class BzApiBugProcess:
|
|||
|
||||
# Delete open bugs and related duplicates if modification date (of open
|
||||
# bug) is too old.
|
||||
Bugscache.objects.filter(modified__lt=year_ago).delete()
|
||||
Bugscache.objects.filter(modified__lt=year_ago, bugzilla_id__isnull=False).delete()
|
||||
|
||||
if insert_errors_observed:
|
||||
logger.error(
|
||||
|
@ -307,8 +324,8 @@ class BzApiBugProcess:
|
|||
# Move modification date of bugs inserted/updated during this
|
||||
# run back to attempt to ingest bug data which failed during
|
||||
# this insert/update in the next run.
|
||||
Bugscache.objects.filter(modified__gt=last_change_time_max).update(
|
||||
modified=last_change_time_max
|
||||
)
|
||||
Bugscache.objects.filter(
|
||||
modified__gt=last_change_time_max, bugzilla_id__isnull=False
|
||||
).update(modified=last_change_time_max)
|
||||
|
||||
reopen_intermittent_bugs(self.minimum_failures_to_reopen)
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
# Generated by Django 5.1.1 on 2025-01-10 15:40
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.models import F
|
||||
|
||||
|
||||
def set_bugzilla_ids(apps, schema_editor):
|
||||
Bugscache = apps.get_model("model", "Bugscache")
|
||||
Bugscache.objects.all().update(bugzilla_id=F("id"))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("model", "0034_bugjobmap_bug_open"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="bugscache",
|
||||
name="bugzilla_id",
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.RunPython(
|
||||
set_bugzilla_ids,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="bugscache",
|
||||
name="id",
|
||||
field=models.BigAutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="bugscache",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(("bugzilla_id__isnull", False)),
|
||||
fields=("bugzilla_id",),
|
||||
name="unique_bugzilla_id",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="BugscacheOccurrence",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"bug",
|
||||
models.ForeignKey(
|
||||
on_delete=models.deletion.CASCADE,
|
||||
to="model.bugscache",
|
||||
related_name="occurrences",
|
||||
),
|
||||
),
|
||||
(
|
||||
"failure_line",
|
||||
models.ForeignKey(
|
||||
on_delete=models.deletion.CASCADE,
|
||||
to="model.failureline",
|
||||
related_name="bug_occurrences",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="BugscacheOccurrence",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("failure_line", "bug"),
|
||||
name="unique_failureline_bug_occurrence",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -207,8 +207,35 @@ class MachinePlatform(models.Model):
|
|||
return f"{self.os_name} {self.platform} {self.architecture}"
|
||||
|
||||
|
||||
class BugscacheOccurrence(models.Model):
|
||||
"""
|
||||
M2M used to reference a bug being reported form a Failure Line locally.
|
||||
Once the same bug is reported multiple times, the user is prompted to fill a Bugzilla ticket.
|
||||
The same user cannot report the same failure line multiple times.
|
||||
"""
|
||||
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
failure_line = models.ForeignKey(
|
||||
"FailureLine", on_delete=models.CASCADE, related_name="bug_occurrences"
|
||||
)
|
||||
bug = models.ForeignKey("Bugscache", on_delete=models.CASCADE, related_name="occurrences")
|
||||
created = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["failure_line", "bug"],
|
||||
name="unique_failureline_bug_occurrence",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class Bugscache(models.Model):
|
||||
id = models.PositiveIntegerField(primary_key=True)
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
|
||||
# Optional reference towards a bug in Bugzilla, once is has been reported as occurring many times in a week
|
||||
bugzilla_id = models.PositiveIntegerField(null=True, blank=True)
|
||||
|
||||
status = models.CharField(max_length=64, db_index=True)
|
||||
resolution = models.CharField(max_length=64, blank=True, db_index=True)
|
||||
# Is covered by a FULLTEXT index created via a migrations RunSQL operation.
|
||||
|
@ -226,10 +253,25 @@ class Bugscache(models.Model):
|
|||
indexes = [
|
||||
models.Index(fields=["summary"]),
|
||||
]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["bugzilla_id"],
|
||||
name="unique_bugzilla_id",
|
||||
condition=Q(bugzilla_id__isnull=False),
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.id}"
|
||||
|
||||
def serialize(self):
|
||||
exclude_fields = ["modified", "processed_update", "occurrences"]
|
||||
|
||||
attrs = model_to_dict(self, exclude=exclude_fields)
|
||||
# Serialize bug ID as the bugzilla number for compatibility reasons
|
||||
attrs["id"] = attrs.pop("bugzilla_id")
|
||||
return attrs
|
||||
|
||||
@classmethod
|
||||
def search(cls, search_term):
|
||||
max_size = 50
|
||||
|
@ -246,11 +288,8 @@ class Bugscache(models.Model):
|
|||
.order_by("-similarity")[0:max_size]
|
||||
)
|
||||
|
||||
exclude_fields = ["modified", "processed_update"]
|
||||
try:
|
||||
open_recent_match_string = [
|
||||
model_to_dict(item, exclude=exclude_fields) for item in recent_qs
|
||||
]
|
||||
open_recent_match_string = [item.serialize() for item in recent_qs]
|
||||
all_data = [
|
||||
match
|
||||
for match in open_recent_match_string
|
||||
|
@ -1093,7 +1132,7 @@ class ClassifiedFailure(models.Model):
|
|||
def bug(self):
|
||||
# Putting this here forces one query per object; there should be a way
|
||||
# to make things more efficient
|
||||
return Bugscache.objects.filter(id=self.bug_number).first()
|
||||
return Bugscache.objects.filter(bugzilla_id=self.bug_number).first()
|
||||
|
||||
def set_bug(self, bug_number):
|
||||
"""
|
||||
|
|
Загрузка…
Ссылка в новой задаче