Bug 1919588 - Add a new testing alert table, along with new fields. (#8200)

* Bug 1919588 - Add a new testing alert table, along with new fields.

* Change naming of PerformanceAlertSummary* class variable.

* Check if reassigned alerts are part of current summary when autodetermining status.

* Resolve broken tests.
This commit is contained in:
Gregory Mierzwinski 2024-10-09 08:35:37 -04:00 коммит произвёл GitHub
Родитель 57537ac0cb
Коммит fb59b00868
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
4 изменённых файлов: 350 добавлений и 37 удалений

Просмотреть файл

@ -101,9 +101,9 @@ def test_reassigning_regression(
assert s.status == PerformanceAlertSummary.UNTRIAGED
# reassigning a regression to the initial summary
# which contains only an improvement
create_perf_alert(
# reassigning a regression that was in the first summary
# to the second summary should leave the status as UNTRIAGED
reassigned_alert = create_perf_alert(
summary=s,
series_signature=signature1,
related_summary=test_perf_alert_summary_2,
@ -117,9 +117,12 @@ def test_reassigning_regression(
# the regression alert will keep it's status of REASSIGNED
untriaged_improvement_alert.status = PerformanceAlert.ACKNOWLEDGED
untriaged_improvement_alert.save()
assert reassigned_alert.status == PerformanceAlert.REASSIGNED
# Status of the summary with only improvements should automatically
# have a status of IMPROVEMENT
s = PerformanceAlertSummary.objects.get(id=1)
assert s.status == PerformanceAlertSummary.INVESTIGATING
assert s.status == PerformanceAlertSummary.IMPROVEMENT
def test_improvement_summary_status_after_reassigning_regression(

Просмотреть файл

@ -0,0 +1,220 @@
# Generated by Django 4.2.16 on 2024-09-18 16:26
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
(
"model",
"0032_rename_failureline_job_guid_repository_failure_lin_job_gui_b67c6d_idx_and_more",
),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
(
"perf",
"0052_rename_performancedatum_repository_signature_push_timestamp_performance_reposit_c9d328_idx_and_more",
),
]
operations = [
migrations.CreateModel(
name="PerformanceAlertSummaryTesting",
fields=[
("id", models.AutoField(primary_key=True, serialize=False)),
("manually_created", models.BooleanField(default=False)),
("notes", models.TextField(blank=True, null=True)),
("created", models.DateTimeField(auto_now_add=True, db_index=True)),
("triage_due_date", models.DateTimeField(default=None, null=True)),
("first_triaged", models.DateTimeField(default=None, null=True)),
("last_updated", models.DateTimeField(auto_now=True, null=True)),
(
"status",
models.IntegerField(
choices=[
(0, "Untriaged"),
(1, "Downstream"),
(2, "Reassigned"),
(3, "Invalid"),
(4, "Improvement"),
(5, "Investigating"),
(6, "Won't fix"),
(7, "Fixed"),
(8, "Backed out"),
],
default=0,
),
),
("bug_number", models.PositiveIntegerField(null=True)),
("bug_due_date", models.DateTimeField(default=None, null=True)),
("bug_updated", models.DateTimeField(null=True)),
(
"assignee",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="assigned_alerts_testing",
to=settings.AUTH_USER_MODEL,
),
),
(
"framework",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="perf.performanceframework",
),
),
(
"issue_tracker",
models.ForeignKey(
default=1,
on_delete=django.db.models.deletion.PROTECT,
to="perf.issuetracker",
),
),
(
"prev_push",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="model.push",
),
),
(
"push",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="model.push",
),
),
(
"repository",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="model.repository",
),
),
],
options={
"db_table": "performance_alert_summary_testing",
"unique_together": {("repository", "framework", "prev_push", "push")},
},
),
migrations.CreateModel(
name="PerformanceAlertTesting",
fields=[
("id", models.AutoField(primary_key=True, serialize=False)),
("is_regression", models.BooleanField()),
("starred", models.BooleanField(default=False)),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("first_triaged", models.DateTimeField(default=None, null=True)),
("last_updated", models.DateTimeField(auto_now=True, null=True)),
(
"status",
models.IntegerField(
choices=[
(0, "Untriaged"),
(1, "Downstream"),
(2, "Reassigned"),
(3, "Invalid"),
(4, "Acknowledged"),
],
default=0,
),
),
(
"amount_pct",
models.FloatField(
help_text="Amount in percentage that series has changed"
),
),
(
"amount_abs",
models.FloatField(
help_text="Absolute amount that series has changed"
),
),
(
"prev_value",
models.FloatField(
help_text="Previous value of series before change"
),
),
(
"new_value",
models.FloatField(help_text="New value of series after change"),
),
(
"t_value",
models.FloatField(
help_text="t value out of analysis indicating confidence that change is 'real'",
null=True,
),
),
(
"noise_profile",
models.CharField(
choices=[
(
"SKEWED",
"Samples are heavily found on one side of the mean.",
),
(
"OUTLIERS",
"There are more outliers than should be expected from a normal distribution.",
),
(
"MODAL",
"There are multiple areas where most values are found rather than only one.",
),
("OK", "No issues were found."),
("N/A", "Could not compute a noise profile."),
],
default="N/A",
help_text="The noise profile of the data which precedes this alert.",
max_length=30,
),
),
("manually_created", models.BooleanField(default=False)),
(
"classifier",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
(
"related_summary",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="related_alerts",
to="perf.performancealertsummarytesting",
),
),
(
"series_signature",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="perf.performancesignature",
),
),
(
"summary",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="alerts",
to="perf.performancealertsummarytesting",
),
),
],
options={
"db_table": "performance_alert_testing",
"unique_together": {("summary", "series_signature")},
},
),
]

Просмотреть файл

@ -0,0 +1,39 @@
# Generated by Django 4.2.16 on 2024-09-18 16:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("perf", "0053_performancealertsummarytesting_and_more"),
]
operations = [
migrations.AddField(
model_name="performancealert",
name="confidence",
field=models.FloatField(
help_text="A value that indicates the confidence of the alert (specific to the detection method used)",
null=True,
),
),
migrations.AddField(
model_name="performancealert",
name="detection_method",
field=models.CharField(max_length=100, null=True),
),
migrations.AddField(
model_name="performancealerttesting",
name="confidence",
field=models.FloatField(
help_text="A value that indicates the confidence of the alert (specific to the detection method used)",
null=True,
),
),
migrations.AddField(
model_name="performancealerttesting",
name="detection_method",
field=models.CharField(max_length=100, null=True),
),
]

Просмотреть файл

@ -256,7 +256,7 @@ class IssueTracker(models.Model):
return f"{self.name} (tasks via {self.task_base_url})"
class PerformanceAlertSummary(models.Model):
class PerformanceAlertSummaryBase(models.Model):
"""
A summarization of performance alerts
@ -351,61 +351,64 @@ class PerformanceAlertSummary(models.Model):
self.status = self.autodetermine_status()
self.save(using=using)
def autodetermine_status(self):
alerts = PerformanceAlert.objects.filter(summary=self) | PerformanceAlert.objects.filter(
def autodetermine_status(self, alert_model=None):
summary_class = self.__class__
if not alert_model:
alert_model = PerformanceAlert
alerts = alert_model.objects.filter(summary=self) | alert_model.objects.filter(
related_summary=self
)
# if no alerts yet, we'll say untriaged
if not alerts:
return PerformanceAlertSummary.UNTRIAGED
return summary_class.UNTRIAGED
# if any untriaged, then set to untriaged
if any(alert.status == PerformanceAlert.UNTRIAGED for alert in alerts):
return PerformanceAlertSummary.UNTRIAGED
if any(alert.status == alert_model.UNTRIAGED for alert in alerts):
return summary_class.UNTRIAGED
# if the summary's status is IMPROVEMENT, but a regression is
# reassigned to that summary then set the summary's status to untriaged
# and change all acknowledged statuses to untriaged
if self.status == PerformanceAlertSummary.IMPROVEMENT:
if self.status == summary_class.IMPROVEMENT:
if any(
alert.status == PerformanceAlert.REASSIGNED and alert.is_regression
for alert in alerts
alert.status == alert_model.REASSIGNED and alert.is_regression for alert in alerts
):
acknowledged_alerts = [
alert for alert in alerts if alert.status == PerformanceAlert.ACKNOWLEDGED
alert for alert in alerts if alert.status == alert_model.ACKNOWLEDGED
]
for alert in acknowledged_alerts:
alert.status = PerformanceAlert.UNTRIAGED
alert.status = alert_model.UNTRIAGED
alert.save()
return PerformanceAlertSummary.UNTRIAGED
return summary_class.UNTRIAGED
# if all invalid, then set to invalid
if all(alert.status == PerformanceAlert.INVALID for alert in alerts):
return PerformanceAlertSummary.INVALID
if all(alert.status == alert_model.INVALID for alert in alerts):
return summary_class.INVALID
# otherwise filter out invalid alerts
alerts = [a for a in alerts if a.status != PerformanceAlert.INVALID]
alerts = [a for a in alerts if a.status != alert_model.INVALID]
# if there are any "acknowledged" alerts, then set to investigating
# if not one of the resolved statuses and there are regressions,
# otherwise we'll say it's an improvement
if any(alert.status == PerformanceAlert.ACKNOWLEDGED for alert in alerts):
if any(alert.status == alert_model.ACKNOWLEDGED for alert in alerts):
if all(
not alert.is_regression
for alert in alerts
if alert.status == PerformanceAlert.ACKNOWLEDGED
or alert.status == PerformanceAlert.REASSIGNED
if alert.status == alert_model.ACKNOWLEDGED
or (alert.status == alert_model.REASSIGNED and alert.related_summary.id == self.id)
):
return PerformanceAlertSummary.IMPROVEMENT
return summary_class.IMPROVEMENT
elif self.status not in (
PerformanceAlertSummary.IMPROVEMENT,
PerformanceAlertSummary.INVESTIGATING,
PerformanceAlertSummary.WONTFIX,
PerformanceAlertSummary.FIXED,
PerformanceAlertSummary.BACKED_OUT,
summary_class.IMPROVEMENT,
summary_class.INVESTIGATING,
summary_class.WONTFIX,
summary_class.FIXED,
summary_class.BACKED_OUT,
):
return PerformanceAlertSummary.INVESTIGATING
return summary_class.INVESTIGATING
# keep status if one of the investigating ones
return self.status
@ -413,10 +416,10 @@ class PerformanceAlertSummary(models.Model):
# alerts of its own: all alerts should be either reassigned,
# downstream, or invalid (but not all invalid, that case is covered
# above)
if any(alert.status == PerformanceAlert.REASSIGNED for alert in alerts):
return PerformanceAlertSummary.REASSIGNED
if any(alert.status == alert_model.REASSIGNED for alert in alerts):
return summary_class.REASSIGNED
return PerformanceAlertSummary.DOWNSTREAM
return summary_class.DOWNSTREAM
def timestamp_first_triage(self):
# called for summary specific updates (e.g. notes, bug linking)
@ -425,14 +428,32 @@ class PerformanceAlertSummary(models.Model):
return self
class Meta:
db_table = "performance_alert_summary"
unique_together = ("repository", "framework", "prev_push", "push")
abstract = True
def __str__(self):
return f"{self.framework} {self.repository} {self.prev_push.revision}-{self.push.revision}"
class PerformanceAlert(models.Model):
class PerformanceAlertSummary(PerformanceAlertSummaryBase):
class Meta:
db_table = "performance_alert_summary"
unique_together = ("repository", "framework", "prev_push", "push")
class PerformanceAlertSummaryTesting(PerformanceAlertSummaryBase):
assignee = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True, related_name="assigned_alerts_testing"
)
def autodetermine_status(self, alert_model=None):
super().autodetermine_status(alert_model=PerformanceAlertTesting)
class Meta:
db_table = "performance_alert_summary_testing"
unique_together = ("repository", "framework", "prev_push", "push")
class PerformanceAlertBase(models.Model):
"""
A single performance alert
@ -496,6 +517,15 @@ class PerformanceAlert(models.Model):
null=True,
)
confidence = models.FloatField(
help_text=(
"A value that indicates the confidence of the alert (specific to "
"the detection method used)"
),
null=True,
)
detection_method = models.CharField(max_length=100, null=True)
SKEWED = "SKEWED"
OUTLIERS = "OUTLIERS"
MODAL = "MODAL"
@ -587,13 +617,34 @@ class PerformanceAlert(models.Model):
return self
class Meta:
db_table = "performance_alert"
unique_together = ("summary", "series_signature")
abstract = True
def __str__(self):
return f"{self.summary} {self.series_signature} {self.amount_pct}%"
class PerformanceAlert(PerformanceAlertBase):
class Meta:
db_table = "performance_alert"
unique_together = ("summary", "series_signature")
class PerformanceAlertTesting(PerformanceAlertBase):
summary = models.ForeignKey(
PerformanceAlertSummaryTesting, on_delete=models.CASCADE, related_name="alerts"
)
related_summary = models.ForeignKey(
PerformanceAlertSummaryTesting,
on_delete=models.CASCADE,
related_name="related_alerts",
null=True,
)
class Meta:
db_table = "performance_alert_testing"
unique_together = ("summary", "series_signature")
class PerformanceTag(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=30, unique=True)