Update pre-commit shellcheck, markdownlint and ruff + lint fixes

This commit is contained in:
Heitor Neiva 2025-01-09 15:04:06 -08:00 коммит произвёл Sebastian Hengst
Родитель 96868279f4
Коммит 481424ac3c
50 изменённых файлов: 131 добавлений и 171 удалений

Просмотреть файл

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.9.0.5
rev: v0.10.0.1
hooks:
- id: shellcheck
- repo: https://github.com/pre-commit/mirrors-prettier
@ -8,7 +8,7 @@ repos:
hooks:
- id: prettier
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.32.2
rev: v0.43.0
hooks:
- id: markdownlint
args: [--fix]
@ -19,7 +19,7 @@ repos:
args: [--profile=black]
name: isort (python)
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.14
rev: v0.9.0
hooks:
- id: ruff
args: [--fix, --show-fixes]

Просмотреть файл

@ -1,9 +1,10 @@
#!/usr/bin/env python
""" Script to compare pushes from a Treeherder instance against production.
"""Script to compare pushes from a Treeherder instance against production.
This is useful to compare if pushes between two different instances have been
ingested differently.
"""
import argparse
import logging

Просмотреть файл

@ -1,5 +1,6 @@
#!/usr/bin/env python
""" Script to compare tasks from pushes on different Treeherder instances"""
"""Script to compare tasks from pushes on different Treeherder instances"""
import argparse
import logging
import pprint

Просмотреть файл

@ -849,10 +849,7 @@ def mock_file_bugzilla_map_request(monkeypatch):
import treeherder.etl.files_bugzilla_map
def _fetch_data(self, project):
url = (
"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/gecko.v2.%s.latest.source.source-bugzilla-info/artifacts/public/components.json"
% project
)
url = f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/gecko.v2.{project}.latest.source.source-bugzilla-info/artifacts/public/components.json"
files_bugzilla_data = None
file_name = f"files_bugzilla_map_{project}_{self.run_id}.json"
exception = None
@ -883,7 +880,7 @@ def mock_bugscache_bugzilla_request(monkeypatch):
def _fetch_intermittent_bugs(additional_params, limit, duplicate_chain_length):
tests_folder = os.path.dirname(__file__)
file_name = "run-%s.json" % str(duplicate_chain_length)
file_name = f"run-{str(duplicate_chain_length)}.json"
data_path = os.path.join(tests_folder, "sample_data", "bugscache_population", file_name)
with open(data_path) as f:
bugzilla_data = json.load(f)
@ -1209,7 +1206,7 @@ def group_data(transactional_db, eleven_job_blobs, create_jobs):
job = eleven_job_blobs[i]
job["job"].update(
{
"taskcluster_task_id": "V3SVuxO8TFy37En_6HcXL%s" % i,
"taskcluster_task_id": f"V3SVuxO8TFy37En_6HcXL{i}",
"taskcluster_retry_id": "0",
"name": jt[i].name,
}

Просмотреть файл

@ -161,9 +161,9 @@ def test_ingest_pulse_job_with_long_job_type_name(
job = pulse_jobs[0]
jl = JobLoader()
revision = push_stored[0]["revision"]
job["display"][
"jobName"
] = "this is a very long string that exceeds the 100 character size that was the previous limit by just a little bit"
job["display"]["jobName"] = (
"this is a very long string that exceeds the 100 character size that was the previous limit by just a little bit"
)
job["origin"]["revision"] = revision
jl.process_job(job, "https://firefox-ci-tc.services.mozilla.com")

Просмотреть файл

@ -220,7 +220,7 @@ def test_irrelevant_repos_data_removal(
relevant_repository.save()
# hack after changing tests.settings.TREEHERDER_TEST_REPOSITORY_NAME to be m-c
test_repository.name = "%s-test" % test_repository.name
test_repository.name = f"{test_repository.name}-test"
test_repository.save()
six_months_ago_timestamp = datetime.now() - timedelta(days=(6 * 30))
@ -311,12 +311,12 @@ def test_total_emails_sent(
for n in range(0, total_signatures):
PerformanceSignature.objects.create(
repository=test_perf_signature.repository,
signature_hash=(20 * ("t%s" % n)),
signature_hash=(20 * (f"t{n}")),
framework=test_perf_signature.framework,
platform=test_perf_signature.platform,
option_collection=test_perf_signature.option_collection,
suite="mysuite%s" % n,
test="mytest%s" % n,
suite=f"mysuite{n}",
test=f"mytest{n}",
application="firefox",
has_subtests=test_perf_signature.has_subtests,
extra_options=test_perf_signature.extra_options,
@ -326,12 +326,12 @@ def test_total_emails_sent(
for n in range(0, 10):
PerformanceSignature.objects.create(
repository=try_repository,
signature_hash=(20 * ("e%s" % n)),
signature_hash=(20 * (f"e{n}")),
framework=test_perf_signature.framework,
platform=test_perf_signature.platform,
option_collection=test_perf_signature.option_collection,
suite="mysuite%s" % n,
test="mytest%s" % n,
suite=f"mysuite{n}",
test=f"mytest{n}",
application="firefox",
has_subtests=test_perf_signature.has_subtests,
extra_options=test_perf_signature.extra_options,

Просмотреть файл

@ -34,14 +34,22 @@ class TestDeletionNotificationWriter:
@staticmethod
def __prepare_expected_content(test_perf_signature):
expected_content = DeletionReportContent.DESCRIPTION + DeletionReportContent.TABLE_HEADERS
expected_content += """| {repository} | {framework} | {platform} | {suite} | {application} | {last_updated} |""".format(
repository=test_perf_signature.repository.name,
framework=test_perf_signature.framework.name,
platform=test_perf_signature.platform.platform,
suite=test_perf_signature.suite,
application=test_perf_signature.application,
last_updated=test_perf_signature.last_updated.date(),
expected_content = "".join(
(
DeletionReportContent.DESCRIPTION,
DeletionReportContent.TABLE_HEADERS,
"| ",
" | ".join(
(
test_perf_signature.repository.name,
test_perf_signature.framework.name,
test_perf_signature.platform.platform,
test_perf_signature.suite,
test_perf_signature.application,
str(test_perf_signature.last_updated.date()),
)
),
" |\n",
)
)
expected_content += "\n"
return expected_content

Просмотреть файл

@ -59,7 +59,7 @@ def _generate_performance_data(
):
push, _ = Push.objects.get_or_create(
repository=test_repository,
revision="1234abcd%s" % t,
revision=f"1234abcd{t}",
defaults={
"author": "foo@bar.com",
"time": datetime.datetime.fromtimestamp(base_timestamp + t),

Просмотреть файл

@ -2,6 +2,7 @@
Functions for flexible generation of sample input job JSON.
"""
import time
from datetime import timedelta

Просмотреть файл

@ -19,9 +19,7 @@ class SampleData:
self.text_log_summary = json.load(f)
with open(
"{}/sample_data/pulse_consumer/taskcluster_pulse_messages.json".format(
os.path.dirname(__file__)
)
f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/taskcluster_pulse_messages.json"
) as f:
self.taskcluster_pulse_messages = json.load(f)
@ -31,9 +29,7 @@ class SampleData:
self.taskcluster_tasks = json.load(f)
with open(
"{}/sample_data/pulse_consumer/taskcluster_transformed_jobs.json".format(
os.path.dirname(__file__)
)
f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/taskcluster_transformed_jobs.json"
) as f:
self.taskcluster_transformed_jobs = json.load(f)

Просмотреть файл

@ -9,7 +9,7 @@ def test_future_date(group_data, client):
today = datetime.datetime.today().date()
tomorrow = today + datetime.timedelta(days=1)
url = reverse("groupsummary") + "?startdate=%s" % tomorrow
url = reverse("groupsummary") + f"?startdate={tomorrow}"
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == expected
@ -29,7 +29,8 @@ def test_default_date(group_data, client):
# test jobname chunk removal and aggregation
def test_summarized(group_data, client):
expected = group_data["expected"]
url = reverse("groupsummary") + "?startdate=%s" % str(group_data["date"]).split(" ")[0]
startdate = str(group_data["date"]).split(" ")[0]
url = reverse("groupsummary") + f"?startdate={startdate}"
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == expected

Просмотреть файл

@ -100,7 +100,7 @@ def test_create_note(client, test_job, test_user, test_no_auth):
assert resp.status_code == 200
content = json.loads(resp.content)
assert content["message"] == "note stored for job %s" % test_job.id
assert content["message"] == f"note stored for job {test_job.id}"
note_list = JobNote.objects.filter(job=test_job)

Просмотреть файл

@ -885,8 +885,9 @@ def test_revision_is_not_found(client, test_perf_signature, test_perfcomp_push):
response = client.get(reverse("perfcompare-results") + query_params)
assert response.status_code == 400
assert response.json() == "No base push with revision {} from repo {}.".format(
non_existent_revision, test_perf_signature.repository.name
assert (
response.json()
== f"No base push with revision {non_existent_revision} from repo {test_perf_signature.repository.name}."
)
query_params = (
@ -902,8 +903,9 @@ def test_revision_is_not_found(client, test_perf_signature, test_perfcomp_push):
response = client.get(reverse("perfcompare-results") + query_params)
assert response.status_code == 400
assert response.json() == "No new push with revision {} from repo {}.".format(
non_existent_revision, test_perf_signature.repository.name
assert (
response.json()
== f"No new push with revision {non_existent_revision} from repo {test_perf_signature.repository.name}."
)

Просмотреть файл

@ -201,7 +201,7 @@ def test_filter_signatures_by_framework(
# Filter by original framework
resp = client.get(
reverse("performance-signatures-list", kwargs={"project": test_repository.name})
+ "?framework=%s" % test_perf_signature.framework.id,
+ f"?framework={test_perf_signature.framework.id}",
)
assert resp.status_code == 200
assert len(resp.data.keys()) == 1
@ -210,7 +210,7 @@ def test_filter_signatures_by_framework(
# Filter by new framework
resp = client.get(
reverse("performance-signatures-list", kwargs={"project": test_repository.name})
+ "?framework=%s" % signature2.framework.id,
+ f"?framework={signature2.framework.id}",
)
assert resp.status_code == 200
assert len(resp.data.keys()) == 1
@ -305,9 +305,7 @@ def test_filter_data_by_framework(
# Filtering by first framework
resp = client.get(
reverse("performance-data-list", kwargs={"project": test_repository.name})
+ "?signatures={}&framework={}".format(
test_perf_signature.signature_hash, test_perf_signature.framework.id
)
+ f"?signatures={test_perf_signature.signature_hash}&framework={test_perf_signature.framework.id}"
)
assert resp.status_code == 200
datums = resp.data[test_perf_signature.signature_hash]
@ -371,7 +369,7 @@ def test_filter_data_by_interval(
):
push = Push.objects.create(
repository=test_repository,
revision="abcdefgh%s" % i,
revision=f"abcdefgh{i}",
author="foo@bar.com",
time=timestamp,
)
@ -409,7 +407,7 @@ def test_filter_data_by_range(
):
push = Push.objects.create(
repository=test_repository,
revision="abcdefgh%s" % i,
revision=f"abcdefgh{i}",
author="foo@bar.com",
time=timestamp,
)
@ -479,17 +477,9 @@ def test_filter_data_by_signature(
def test_perf_summary(client, test_perf_signature, test_perf_data):
query_params1 = (
"?repository={}&framework={}&interval=172800&no_subtests=true&revision={}".format(
test_perf_signature.repository.name,
test_perf_signature.framework_id,
test_perf_data[0].push.revision,
)
)
query_params1 = f"?repository={test_perf_signature.repository.name}&framework={test_perf_signature.framework_id}&interval=172800&no_subtests=true&revision={test_perf_data[0].push.revision}"
query_params2 = "?repository={}&framework={}&interval=172800&no_subtests=true&startday=2013-11-01T23%3A28%3A29&endday=2013-11-30T23%3A28%3A29".format(
test_perf_signature.repository.name, test_perf_signature.framework_id
)
query_params2 = f"?repository={test_perf_signature.repository.name}&framework={test_perf_signature.framework_id}&interval=172800&no_subtests=true&startday=2013-11-01T23%3A28%3A29&endday=2013-11-30T23%3A28%3A29"
expected = [
{
@ -536,9 +526,7 @@ def test_data_points_from_same_push_are_ordered_chronologically(
As job ids are auto incremented, older jobs have smaller ids than newer ones.
Thus, these ids are sufficient to check for chronological order.
"""
query_params = "?repository={}&framework={}&interval=172800&no_subtests=true&startday=2013-11-01T23%3A28%3A29&endday=2013-11-30T23%3A28%3A29".format(
test_perf_signature.repository.name, test_perf_signature.framework_id
)
query_params = f"?repository={test_perf_signature.repository.name}&framework={test_perf_signature.framework_id}&interval=172800&no_subtests=true&startday=2013-11-01T23%3A28%3A29&endday=2013-11-30T23%3A28%3A29"
response = client.get(reverse("performance-summary") + query_params)
assert response.status_code == 200
@ -551,12 +539,7 @@ def test_no_retriggers_perf_summary(
client, push_stored, test_perf_signature, test_perf_signature_2, test_perf_data
):
push = Push.objects.get(id=1)
query_params = "?repository={}&framework={}&no_subtests=true&revision={}&all_data=true&signature={}".format(
test_perf_signature.repository.name,
test_perf_signature.framework_id,
push.revision,
test_perf_signature.id,
)
query_params = f"?repository={test_perf_signature.repository.name}&framework={test_perf_signature.framework_id}&no_subtests=true&revision={push.revision}&all_data=true&signature={test_perf_signature.id}"
PerformanceDatum.objects.create(
repository=test_perf_signature.repository,

Просмотреть файл

@ -1,5 +1,5 @@
""" Collector, grabs changes in various sources and put them in a DB.
"""
"""Collector, grabs changes in various sources and put them in a DB."""
import json
import os

Просмотреть файл

@ -13,7 +13,7 @@ def update_changelog(days=1):
"""
Collect changes and update the DB.
"""
logger.info("Updating unified changelog (days=%d)" % days)
logger.info(f"Updating unified changelog (days={days})")
# collecting last day of changes across all sources
since = datetime.datetime.now() - datetime.timedelta(days=days)
since = since.strftime("%Y-%m-%dT%H:%M:%S")
@ -34,6 +34,4 @@ def update_changelog(days=1):
created += 1
[ChangelogFile.objects.create(name=name, changelog=changelog) for name in files]
logger.info(
"Found %d items, %d existed and %d where created." % (created + existed, existed, created)
)
logger.info(f"Found {created + existed} items, {existed} existed and {created} where created.")

Просмотреть файл

@ -369,9 +369,9 @@ CELERY_TASK_TIME_LIMIT = CELERY_TASK_SOFT_TIME_LIMIT + 30
# Periodically publish runtime statistics on statsd (in minutes)
CELERY_STATS_PUBLICATION_DELAY = 5
assert (
0 < CELERY_STATS_PUBLICATION_DELAY < 60 and 60 % 10 == 0
), "Celery task must be a valid cron delay in minutes"
assert 0 < CELERY_STATS_PUBLICATION_DELAY < 60 and 60 % 10 == 0, (
"Celery task must be a valid cron delay in minutes"
)
CELERY_BEAT_SCHEDULE = {
# this is just a failsafe in case the Pulse ingestion misses something

Просмотреть файл

@ -6,6 +6,7 @@ and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
"""
import os
from django.core.wsgi import get_wsgi_application

Просмотреть файл

@ -78,8 +78,7 @@ def store_job_artifacts(artifact_data):
job_guid = artifact.get("job_guid")
if not job_guid:
logger.error(
"load_job_artifacts: Artifact '%s' with no " "job guid set, skipping",
artifact_name,
f"load_job_artifacts: Artifact '{artifact_name}' with no job guid set, skipping"
)
continue

Просмотреть файл

@ -32,9 +32,9 @@ class ClassificationLoader:
task_id = pulse_job["status"]["taskId"]
task_definition = get_task_definition(root_url, task_id)
assert (
"routes" in task_definition and len(task_definition["routes"]) > 0
), "A route containing the push project and revision is needed to save the mozci classification"
assert "routes" in task_definition and len(task_definition["routes"]) > 0, (
"A route containing the push project and revision is needed to save the mozci classification"
)
# Retrieving a Push object thanks to the project/revision parsed from the task first route
try:
push = self.get_push(task_definition["routes"][0])
@ -53,9 +53,9 @@ class ClassificationLoader:
# Saving the mozci classification in the database
results = dict(MozciClassification.CLASSIFICATION_RESULT)
classification = classification_json["push"]["classification"]
assert (
classification in results.keys()
), "Classification result should be a value in BAD, GOOD, UNKNOWN"
assert classification in results.keys(), (
"Classification result should be a value in BAD, GOOD, UNKNOWN"
)
logger.info(
"Storing mozci classification calculated as %s for push %s on repository %s",

Просмотреть файл

@ -84,10 +84,7 @@ class FilesBugzillaMapProcess:
)
def fetch_data(self, project):
url = (
"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/gecko.v2.%s.latest.source.source-bugzilla-info/artifacts/public/components.json"
% project
)
url = f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/gecko.v2.{project}.latest.source.source-bugzilla-info/artifacts/public/components.json"
files_bugzilla_data = None
exception = None
try:

Просмотреть файл

@ -104,7 +104,7 @@ def _ingest_hg_push(project, revision, fetch_push_id=None):
# get reference to repo
repo = Repository.objects.get(name=project, active_status="active")
# get hg pushlog
pushlog_url = "%s/json-pushes/?full=1&version=2" % repo.url
pushlog_url = f"{repo.url}/json-pushes/?full=1&version=2"
# ingest this particular revision for this project
process = HgPushlogProcess()
# Use the actual push SHA, in case the changeset specified was a tag

Просмотреть файл

@ -157,13 +157,12 @@ def _load_perf_datum(job: Job, perf_datum: dict):
if perf_datum["framework"]["name"] == "job_resource_usage":
return
logger.warning(
"Performance framework %s does not exist, skipping " "load of performance artifacts",
perf_datum["framework"]["name"],
f"Performance framework {perf_datum['framework']['name']} does not exist, skipping load of performance artifacts"
)
return
if not framework.enabled:
logger.info(
"Performance framework %s is not enabled, skipping", perf_datum["framework"]["name"]
f"Performance framework {perf_datum['framework']['name']} is not enabled, skipping"
)
return
application = _get_application_name(perf_datum)

Просмотреть файл

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def store_push(repository, push_dict):
push_revision = push_dict.get("revision")
if not push_dict.get("revision"):
raise ValueError("Push must have a revision " "associated with it!")
raise ValueError("Push must have a revision associated with it!")
with transaction.atomic():
push, _ = Push.objects.update_or_create(
repository=repository,

Просмотреть файл

@ -24,7 +24,7 @@ class PushLoader:
newrelic.agent.add_custom_attribute("branch", transformer.branch)
repos = Repository.objects
if transformer.branch:
repos = repos.filter(branch__regex="(^|,)%s($|,)" % transformer.branch)
repos = repos.filter(branch__regex=f"(^|,){transformer.branch}($|,)")
else:
repos = repos.filter(branch=None)
repo = repos.get(url=transformer.repo_url, active_status="active")

Просмотреть файл

@ -16,7 +16,7 @@ ONE_WEEK_IN_SECONDS = 604800
def last_push_id_from_server(repo):
"""Obtain the last push ID from a ``Repository`` instance."""
url = "%s/json-pushes/?version=2" % repo.url
url = f"{repo.url}/json-pushes/?version=2"
data = fetch_json(url)
return data["lastpushid"]
@ -92,15 +92,12 @@ class HgPushlogProcess:
else:
if changeset:
logger.info(
"Getting all pushes for '%s' corresponding to " "changeset '%s'",
repository_name,
changeset,
f"Getting all pushes for '{repository_name}' corresponding to changeset '{changeset}'"
)
extracted_content = self.extract(source_url + "&changeset=" + changeset)
extracted_content = self.extract(f"{source_url}&changeset={changeset}")
else:
logger.warning(
"Unable to get last push from cache for '%s', " "getting all pushes",
repository_name,
f"Unable to get last push from cache for '{repository_name}', getting all pushes",
)
extracted_content = self.extract(source_url)

Просмотреть файл

@ -1,6 +1,7 @@
"""
This module contains tasks related to pulse job ingestion
"""
import asyncio
import newrelic.agent

Просмотреть файл

@ -92,9 +92,7 @@ class ArtifactBuilderCollection:
)
if download_size_in_bytes > MAX_DOWNLOAD_SIZE_IN_BYTES:
raise LogSizeError(
"Download size of %i bytes exceeds limit" % download_size_in_bytes
)
raise LogSizeError(f"Download size of {download_size_in_bytes} bytes exceeds limit")
# Lines must be explicitly decoded since `iter_lines()`` returns bytes by default
# and we cannot use its `decode_unicode=True` mode, since otherwise Unicode newline

Просмотреть файл

@ -43,6 +43,6 @@ class Command(BaseCommand):
print(f"{name}, {json.dumps(artifact, indent=2)}")
if options["profile"]:
print("Timings: %s" % times)
print("Average: %s" % (sum(times) / len(times)))
print("Total: %s" % sum(times))
print(f"Timings: {times}")
print(f"Average: {sum(times) / len(times)}")
print(f"Total: {sum(times)}")

Просмотреть файл

@ -201,11 +201,9 @@ class PerformanceParser(ParserBase):
validate_perf_data(data)
self.artifact.append(data)
except ValueError:
logger.warning("Unable to parse Perfherder data from line: %s", line)
logger.warning(f"Unable to parse Perfherder data from line: {line}")
except jsonschema.ValidationError as e:
logger.warning(
"Perfherder line '%s' does not comply with " "json schema: %s", line, e
)
logger.warning(f"Perfherder line '{line}' does not comply with json schema: {e}")
# Don't mark the parser as complete, in case there are multiple performance artifacts.

Просмотреть файл

@ -39,7 +39,7 @@ def parse_logs(job_id, job_log_ids, priority):
first_exception = None
completed_names = set()
for job_log in job_logs:
newrelic.agent.add_custom_attribute("job_log_%s_url" % job_log.name, job_log.url)
newrelic.agent.add_custom_attribute(f"job_log_{job_log.name}_url", job_log.url)
logger.info("parser_task for %s", job_log.id)
# Only parse logs which haven't yet been processed or else failed on the last attempt.

Просмотреть файл

@ -253,7 +253,7 @@ def get_error_search_term_and_path(error_line):
test_name_or_path = REFTEST_RE.sub("", test_name_or_path).replace("\\", "/")
# split marionette paths to only include the filename
if MARIONETTE_RE.search(test_name_or_path):
test_name_or_path = "%s.py" % test_name_or_path.split(".py ")[0]
test_name_or_path = f"{test_name_or_path.split('.py ')[0]}.py"
path_end = test_name_or_path
# if this is a path, we are interested in the last part
search_term = test_name_or_path.split("/")[-1]

Просмотреть файл

@ -38,9 +38,7 @@ class Command(BaseCommand):
TextLogError.objects.bulk_update(chunked_queryset, ["job"])
logger.warning(
"successfully added job_id in TextLogError table to rows {} to {}".format(
chunked_queryset[0].id, chunked_queryset[-1].id
)
f"successfully added job_id in TextLogError table to rows {chunked_queryset[0].id} to {chunked_queryset[-1].id}"
)
logger.warning("successfully finished backfilling job_ids in the TextLogError table")

Просмотреть файл

@ -45,9 +45,7 @@ class Command(BaseCommand):
dest="chunk_size",
default=100,
type=int,
help=(
"Define the size of the chunks " "Split the job deletes into chunks of this size"
),
help=("Define the size of the chunks Split the job deletes into chunks of this size"),
)
parser.add_argument(
"--sleep-time",

Просмотреть файл

@ -265,9 +265,7 @@ class Bugscache(models.Model):
except ProgrammingError as e:
newrelic.agent.notice_error()
logger.error(
"Failed to execute FULLTEXT search on Bugscache, error={}, SQL={}".format(
e, recent_qs.query.__str__()
)
f"Failed to execute FULLTEXT search on Bugscache, error={e}, SQL={recent_qs.query.__str__()}"
)
open_recent = []
all_others = []
@ -399,7 +397,6 @@ class FailureClassification(NamedModel):
class ReferenceDataSignatures(models.Model):
"""
A collection of all the possible combinations of reference data,
populated on data ingestion. signature is a hash of the data it refers to
@ -946,7 +943,7 @@ class FailureLine(models.Model):
def _serialized_components(self):
if self.action == "test_result":
return ["TEST-UNEXPECTED-%s" % self.status.upper(), self.test]
return [f"TEST-UNEXPECTED-{self.status.upper()}", self.test]
if self.action == "log":
return [self.level.upper(), self.message.split("\n")[0]]

Просмотреть файл

@ -112,7 +112,7 @@ def generate_new_alerts_in_series(signature):
if not isinstance(noise_profile, str):
raise Exception(
"Expecting a string as a " f"noise profile, got: {type(noise_profile)}"
f"Expecting a string as a noise profile, got: {type(noise_profile)}"
)
except Exception:
# Fail without breaking the alert computation

Просмотреть файл

@ -264,9 +264,7 @@ class IdentifyAlertRetriggerables:
retrigger_range = len(data_points_to_retrigger)
if retrigger_range < self._range_width:
self.log.warning(
"Found small backfill range (of size {} instead of {})".format(
retrigger_range, self._range_width
)
f"Found small backfill range (of size {retrigger_range} instead of {self._range_width})"
)

Просмотреть файл

@ -5,6 +5,7 @@ Its clients should only instantiate their writer of choice &
provide it with some basic data to include in the email.
They then get an email that's ready-to-send via taskcluster.Notify service.
"""
import logging
import re
import urllib.parse

Просмотреть файл

@ -7,6 +7,7 @@ The subcommand's sole purpose is to act as a smoke test
harness that quickly does an end-to-end check over the
functionality of the `BackfillTool`.
"""
from django.core.management.base import BaseCommand
from treeherder.perf.auto_perf_sheriffing.factories import backfill_tool_factory

Просмотреть файл

@ -26,7 +26,7 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if not options["project"]:
raise CommandError("Must specify at least one project with " "--project")
raise CommandError("Must specify at least one project with --project")
for project in options["project"]:
repository = models.Repository.objects.get(name=project)

Просмотреть файл

@ -4,6 +4,7 @@ This subcommand isn't intended for regular use.
The subcommand's sole purpose is to clean the database of dirty
performance_datum, in case a revert migration is needed.
"""
from django.core.management.base import BaseCommand
from treeherder.perf.models import MultiCommitDatum, PerformanceDatum

Просмотреть файл

@ -4,6 +4,7 @@ TODO: Remove this module entirely once all vcs data
environments (from prototype up to & including production).
See bug https://bugzilla.mozilla.org/show_bug.cgi?id=1694335
"""
import time
from datetime import timedelta

Просмотреть файл

@ -48,7 +48,7 @@ class Command(BaseCommand):
def handle(self, *args, **options):
if not options["project"]:
raise CommandError("Must specify at least one project with " "--project")
raise CommandError("Must specify at least one project with --project")
pc = PerfherderClient(server_url=options["server"])

Просмотреть файл

@ -497,7 +497,7 @@ class PerformanceAlert(models.Model):
prev_value = models.FloatField(help_text="Previous value of series before change")
new_value = models.FloatField(help_text="New value of series after change")
t_value = models.FloatField(
help_text="t value out of analysis indicating confidence " "that change is 'real'",
help_text="t value out of analysis indicating confidence that change is 'real'",
null=True,
)
@ -549,7 +549,7 @@ class PerformanceAlert(models.Model):
# or absence of a related summary
if self.related_summary and self.status not in self.RELATIONAL_STATUS_IDS:
raise ValidationError(
"Related summary set but status not in " "'{}'!".format(
"Related summary set but status not in '{}'!".format(
", ".join(
[
STATUS[1]
@ -561,7 +561,7 @@ class PerformanceAlert(models.Model):
)
if not self.related_summary and self.status not in self.UNRELATIONAL_STATUS_IDS:
raise ValidationError(
"Related summary not set but status not in " "'{}'!".format(
"Related summary not set but status not in '{}'!".format(
", ".join(
[
STATUS[1]

Просмотреть файл

@ -106,14 +106,9 @@ class RevisionDatum:
return self.push_timestamp < o.push_timestamp
def __repr__(self):
values_str = "[ %s ]" % ", ".join(["%.3f" % value for value in self.values])
return "<{}: {}, {}, {:.3f}, {}>".format(
self.push_timestamp,
self.push_id,
values_str,
self.t,
self.change_detected,
)
values_csv = ", ".join([f"{value:.3f}" for value in self.values])
values_str = f"[ {values_csv} ]"
return f"<{self.push_timestamp}: {self.push_id}, {values_str}, {self.t:.3f}, {self.change_detected}>"
def detect_changes(data, min_back_window=12, max_back_window=24, fore_window=12, t_threshold=7):

Просмотреть файл

@ -23,11 +23,11 @@ def clean_test(test, signature, message):
left, right, *rest = clean_name.split(splitter)
if "tests/layout/" in left and "tests/layout/" in right:
left = "layout%s" % left.split("tests/layout")[1]
right = "layout%s" % right.split("tests/layout")[1]
left = f"layout{left.split('tests/layout')[1]}"
right = f"layout{right.split('tests/layout')[1]}"
elif "build/tests/reftest/tests/" in left and "build/tests/reftest/tests/" in right:
left = "%s" % left.split("build/tests/reftest/tests/")[1]
right = "%s" % right.split("build/tests/reftest/tests/")[1]
left = left.split("build/tests/reftest/tests/")[1]
right = right.split("build/tests/reftest/tests/")[1]
elif clean_name.startswith("http://10.0"):
left = "/tests/".join(left.split("/tests/")[1:])
right = "/tests/".join(right.split("/tests/")[1:])

Просмотреть файл

@ -236,11 +236,11 @@ class PerformanceDatumViewSet(viewsets.ViewSet):
if not (signature_ids or signature_hashes or push_ids or job_ids):
raise exceptions.ValidationError(
"Need to specify either " "signature_id, signatures, " "push_id, or job_id"
"Need to specify either signature_id, signatures, push_id, or job_id"
)
if signature_ids and signature_hashes:
raise exceptions.ValidationError(
"Can't specify both signature_id " "and signatures in same query"
"Can't specify both signature_id and signatures in same query"
)
datums = PerformanceDatum.objects.filter(repository=repository).select_related(
@ -554,7 +554,7 @@ class PerformanceAlertViewSet(viewsets.ModelViewSet):
data = request.data
if "summary_id" not in data or "signature_id" not in data:
return Response(
{"message": "Summary and signature ids necessary " "to create alert"},
{"message": "Summary and signature ids necessary to create alert"},
status=HTTP_400_BAD_REQUEST,
)
@ -1194,24 +1194,26 @@ class PerfCompareResults(generics.ListAPIView):
highlighted_revisions_params.append((highlighted_revision_key, base_revision[:12]))
highlighted_revisions_params.append((highlighted_revision_key, new_revision[:12]))
graph_link = "graphs?%s" % urlencode(highlighted_revisions_params)
encoded = urlencode(highlighted_revisions_params)
graph_link = f"graphs?{encoded}"
if new_repo_name == base_repo_name:
# if repo for base and new are not the same then make diff
# series data one for each repo, else generate one
repo_value = ",".join([new_repo_name, signature, "1", framework])
graph_link = graph_link + "&%s" % urlencode({series_key: repo_value})
encoded = urlencode({series_key: repo_value})
graph_link = f"{graph_link}&{encoded}"
else:
# if repos selected are not the same
base_repo_value = ",".join([base_repo_name, signature, "1", framework])
new_repo_value = ",".join([new_repo_name, signature, "1", framework])
encoded = urlencode([(series_key, base_repo_value), (series_key, new_repo_value)])
graph_link = f"{graph_link}&{encoded}"
graph_link = graph_link + "&%s" % encoded
encoded = urlencode({time_range_key: time_range})
graph_link = f"{graph_link}&{encoded}"
graph_link = graph_link + "&%s" % urlencode({time_range_key: time_range})
return "https://treeherder.mozilla.org/perfherder/%s" % graph_link
return f"https://treeherder.mozilla.org/perfherder/{graph_link}"
@staticmethod
def _get_interval(base_push, new_push):

Просмотреть файл

@ -185,17 +185,13 @@ class PerformanceAlertSerializer(serializers.ModelSerializer):
and instance.summary.repository_id != related_summary.repository_id
):
raise exceptions.ValidationError(
"New summary's repository ({}) does not match existing "
"summary's repository ({})".format(
related_summary.repository, instance.summary.framework
)
f"New summary's repository ({related_summary.repository}) does not match existing "
f"summary's repository ({instance.summary.framework})"
)
elif instance.summary.framework_id != related_summary.framework_id:
raise exceptions.ValidationError(
"New summary's framework ({}) does not match existing "
"summary's framework ({})".format(
related_summary.framework, instance.summary.framework
)
f"New summary's framework ({related_summary.framework}) does not match existing "
f"summary's framework ({instance.summary.framework})"
)
status = validated_data.get("status")

Просмотреть файл

@ -2,7 +2,6 @@ from rest_framework import permissions
class IsStaffOrReadOnly(permissions.BasePermission):
"""
The request is authenticated as an admin staff (eg. sheriffs), or is a read-only request.
"""

Просмотреть файл

@ -11,7 +11,6 @@ from treeherder.webapp.api import serializers as th_serializers
class RepositoryViewSet(viewsets.ReadOnlyModelViewSet):
"""ViewSet for the refdata Repository model"""
queryset = models.Repository.objects.filter(active_status="active").select_related(
@ -21,7 +20,6 @@ class RepositoryViewSet(viewsets.ReadOnlyModelViewSet):
class OptionCollectionHashViewSet(viewsets.ViewSet):
"""ViewSet for the virtual OptionCollectionHash model"""
def list(self, request):
@ -39,7 +37,6 @@ class OptionCollectionHashViewSet(viewsets.ViewSet):
class FailureClassificationViewSet(viewsets.ReadOnlyModelViewSet):
"""ViewSet for the refdata FailureClassification model"""
queryset = models.FailureClassification.objects
@ -47,7 +44,6 @@ class FailureClassificationViewSet(viewsets.ReadOnlyModelViewSet):
class TaskclusterMetadataViewSet(viewsets.ReadOnlyModelViewSet):
"""ViewSet for the refdata TaskclusterMetadata model"""
serializer_class = th_serializers.TaskclusterMetadataSerializer
@ -63,7 +59,6 @@ class TaskclusterMetadataViewSet(viewsets.ReadOnlyModelViewSet):
class UserViewSet(viewsets.ReadOnlyModelViewSet):
"""
Info about a logged-in user.
Used by Treeherder's UI to inspect user properties