Change `Optional[...]` to `... | None` (#3910)

This commit is contained in:
Kaiji Fu 2023-12-09 17:52:11 -05:00 коммит произвёл GitHub
Родитель 6372518a2a
Коммит a4dca7c1aa
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
13 изменённых файлов: 44 добавлений и 47 удалений

Просмотреть файл

@ -9,7 +9,7 @@ import math
import re
from datetime import datetime
from logging import INFO, basicConfig, getLogger
from typing import Iterable, Iterator, NewType, Optional
from typing import Iterable, Iterator, NewType
import tenacity
from dateutil.relativedelta import relativedelta
@ -82,7 +82,7 @@ MAINTENANCE_EFFECTIVENESS_SEVERITY_DEFAULT_WEIGHT = 3
INCLUDE_FIELDS = ["_default", "filed_via"]
def get_bugs(include_invalid: Optional[bool] = False) -> Iterator[BugDict]:
def get_bugs(include_invalid: bool | None = False) -> Iterator[BugDict]:
yield from (
bug
for bug in db.read(BUGS_DB)

Просмотреть файл

@ -4,7 +4,7 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
from typing import Iterable, Optional
from typing import Iterable
import numpy as np
import xgboost
@ -33,7 +33,7 @@ TYPE_LIST = sorted(set(KEYWORD_DICT.values()))
def bug_to_types(
bug: bugzilla.BugDict, bug_map: Optional[dict[int, bugzilla.BugDict]] = None
bug: bugzilla.BugDict, bug_map: dict[int, bugzilla.BugDict] | None = None
) -> list[str]:
types = set()

Просмотреть файл

@ -11,7 +11,7 @@ import multiprocessing as mp
import pickle
import statistics
from functools import reduce
from typing import Any, Callable, Collection, Iterable, Optional, Sequence, Set
from typing import Any, Callable, Collection, Iterable, Sequence, Set
import numpy as np
import xgboost
@ -37,7 +37,7 @@ logger = logging.getLogger(__name__)
def get_commit_map(
revs: Optional[Set[test_scheduling.Revision]] = None,
revs: Set[test_scheduling.Revision] | None = None,
) -> dict[test_scheduling.Revision, repository.CommitDict]:
commit_map = {}
@ -562,7 +562,7 @@ class TestSelectModel(Model):
self,
commits: Sequence[repository.CommitDict],
confidence: float = 0.5,
push_num: Optional[int] = None,
push_num: int | None = None,
) -> dict[str, float]:
commit_data = commit_features.merge_commits(commits)
@ -692,9 +692,9 @@ class TestSelectModel(Model):
def do_eval(
executor: concurrent.futures.ProcessPoolExecutor,
confidence_threshold: float,
reduction: Optional[float],
cap: Optional[int],
minimum: Optional[int],
reduction: float | None,
cap: int | None,
minimum: int | None,
) -> None:
futures: dict[concurrent.futures.Future, dict[str, Any]] = {}
for push in test_pushes.values():

Просмотреть файл

@ -5,7 +5,7 @@
import logging
from datetime import datetime, timedelta
from typing import Collection, Iterator, NewType, Optional
from typing import Collection, Iterator, NewType
import tenacity
from libmozdata.phabricator import PhabricatorAPI
@ -69,7 +69,7 @@ def get_transactions(rev_phid: str) -> Collection[TransactionDict]:
def get(
rev_ids: Optional[Collection[int]] = None, modified_start: Optional[datetime] = None
rev_ids: Collection[int] | None = None, modified_start: datetime | None = None
) -> Collection[RevisionDict]:
assert PHABRICATOR_API is not None
@ -159,7 +159,7 @@ def download_modified_revisions():
db.append(REVISIONS_DB, modified_revisions)
def get_testing_project(rev: RevisionDict) -> Optional[str]:
def get_testing_project(rev: RevisionDict) -> str | None:
testing_projects = [
TESTING_PROJECTS[projectPHID]
for projectPHID in rev["attachments"]["projects"]["projectPHIDs"]
@ -177,7 +177,7 @@ def get_testing_project(rev: RevisionDict) -> Optional[str]:
def get_review_dates(
rev: RevisionDict,
) -> tuple[Optional[datetime], list[datetime], list[datetime], list[datetime]]:
) -> tuple[datetime | None, list[datetime], list[datetime], list[datetime]]:
creation_date = None
review_dates = []
@ -208,7 +208,7 @@ def get_review_dates(
return creation_date, review_dates, exclusion_start_dates, exclusion_end_dates
def get_first_review_time(rev: RevisionDict) -> Optional[timedelta]:
def get_first_review_time(rev: RevisionDict) -> timedelta | None:
(
creation_date,
review_dates,
@ -257,7 +257,7 @@ def get_first_review_time(rev: RevisionDict) -> Optional[timedelta]:
)
def get_pending_review_time(rev: RevisionDict) -> Optional[timedelta]:
def get_pending_review_time(rev: RevisionDict) -> timedelta | None:
if rev["fields"]["status"]["value"] != "needs-review":
return None

Просмотреть файл

@ -20,7 +20,7 @@ import sys
import threading
from datetime import datetime
from functools import lru_cache
from typing import Collection, Iterable, Iterator, NewType, Optional, Set, Union
from typing import Collection, Iterable, Iterator, NewType, Set, Union
import hglib
import lmdb
@ -35,7 +35,7 @@ logger = logging.getLogger(__name__)
CommitDict = NewType("CommitDict", dict)
code_analysis_server: Optional[rust_code_analysis_server.RustCodeAnalysisServer] = None
code_analysis_server: rust_code_analysis_server.RustCodeAnalysisServer | None = None
hg_servers = list()
hg_servers_lock = threading.Lock()
@ -173,7 +173,7 @@ class Commit:
author: str,
desc: str,
pushdate: datetime,
bug_id: Optional[int],
bug_id: int | None,
backsout: list[str],
backedoutby: str,
author_email: str,
@ -312,7 +312,7 @@ def get_commits(
)
def get_revision_id(commit: CommitDict) -> Optional[int]:
def get_revision_id(commit: CommitDict) -> int | None:
match = PHABRICATOR_REVISION_REGEX.search(commit["desc"])
if not match:
return None
@ -871,7 +871,7 @@ def _transform(commit):
def hg_log(
hg: hglib.client, revs: list[bytes], branch: Optional[str] = "tip"
hg: hglib.client, revs: list[bytes], branch: str | None = "tip"
) -> tuple[Commit, ...]:
if len(revs) == 0:
return tuple()
@ -1303,7 +1303,7 @@ def close_component_mapping():
def hg_log_multi(
repo_dir: str, revs: list[bytes], branch: Optional[str] = "tip"
repo_dir: str, revs: list[bytes], branch: str | None = "tip"
) -> tuple[Commit, ...]:
if len(revs) == 0:
return tuple()
@ -1338,7 +1338,7 @@ def download_commits(
repo_dir: str,
rev_start: str | None = None,
revs: list[bytes] | None = None,
branch: Optional[str] = "tip",
branch: str | None = "tip",
save: bool = True,
use_single_process: bool = False,
include_no_bug: bool = False,

Просмотреть файл

@ -6,7 +6,6 @@
import logging
import subprocess
import time
from typing import Optional
import requests
@ -20,7 +19,7 @@ HEADERS = {"Content-type": "application/octet-stream"}
class RustCodeAnalysisServer:
def __init__(self, thread_num: Optional[int] = None):
def __init__(self, thread_num: int | None = None):
for _ in range(START_RETRIES):
self.start_process(thread_num)
@ -41,7 +40,7 @@ class RustCodeAnalysisServer:
def base_url(self):
return f"http://127.0.0.1:{self.port}"
def start_process(self, thread_num: Optional[int] = None):
def start_process(self, thread_num: int | None = None):
self.port = utils.get_free_tcp_port()
try:

Просмотреть файл

@ -21,7 +21,6 @@ from typing import (
Iterable,
Iterator,
NewType,
Optional,
Set,
Union,
cast,
@ -646,7 +645,7 @@ def set_touched_together(f1: str, f2: str) -> None:
)
def update_touched_together() -> Generator[None, Optional[Revision], None]:
def update_touched_together() -> Generator[None, Revision | None, None]:
touched_together = get_touched_together_db(False)
last_analyzed = (
touched_together[b"last_analyzed"]

Просмотреть файл

@ -17,7 +17,7 @@ from collections import deque
from contextlib import contextmanager
from datetime import datetime
from functools import lru_cache
from typing import Any, Iterator, Optional
from typing import Any, Iterator
import boto3
import dateutil.parser
@ -207,7 +207,7 @@ def download_check_etag(url, path=None):
return True
def get_last_modified(url: str) -> Optional[datetime]:
def get_last_modified(url: str) -> datetime | None:
session = get_session(urllib.parse.urlparse(url).netloc)
r = session.head(url, allow_redirects=True)
@ -524,7 +524,7 @@ def extract_metadata(body: str) -> dict:
return dict(match_list)
def extract_private(issue_body: str) -> Optional[tuple]:
def extract_private(issue_body: str) -> tuple | None:
"""Extract private issue information from public issue body.
Parse public issue body and extract private issue number and

Просмотреть файл

@ -9,7 +9,7 @@ import os
import uuid
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from typing import Any, Callable, Optional, Sequence
from typing import Any, Callable, Sequence
import orjson
import zstandard
@ -175,14 +175,14 @@ def get_job_id() -> str:
return uuid.uuid4().hex
def init_job(job: JobInfo, job_id: Optional[str] = None) -> str:
def init_job(job: JobInfo, job_id: str | None = None) -> str:
job_id = job_id or get_job_id()
redis_conn.mset({job.mapping_key: job_id})
return job_id
def schedule_job(
job: JobInfo, job_id: Optional[str] = None, timeout: Optional[int] = None
job: JobInfo, job_id: str | None = None, timeout: int | None = None
) -> None:
job_id = init_job(job, job_id)
@ -197,7 +197,7 @@ def schedule_job(
def prepare_queue_job(
job: JobInfo, job_id: Optional[str] = None, timeout: Optional[int] = None
job: JobInfo, job_id: str | None = None, timeout: int | None = None
) -> Queue:
job_id = init_job(job, job_id)
return Queue.prepare_data(
@ -379,7 +379,7 @@ def clean_prediction_cache(job):
redis_conn.delete(job.change_time_key)
def get_result(job: JobInfo) -> Optional[Any]:
def get_result(job: JobInfo) -> Any | None:
LOGGER.debug(f"Checking for existing results at {job.result_key}")
result = redis_conn.get(job.result_key)

Просмотреть файл

@ -11,7 +11,7 @@ import re
import subprocess
from datetime import datetime
from logging import INFO, basicConfig, getLogger
from typing import Optional, cast
from typing import cast
import dateutil.parser
import hglib
@ -136,8 +136,8 @@ class CommitClassifier(object):
method_defect_predictor_dir: str,
use_single_process: bool,
skip_feature_importance: bool,
phabricator_deployment: Optional[str] = None,
diff_id: Optional[int] = None,
phabricator_deployment: str | None = None,
diff_id: int | None = None,
):
self.model_name = model_name
self.repo_dir = repo_dir
@ -582,8 +582,8 @@ class CommitClassifier(object):
def classify(
self,
revision: Optional[str] = None,
runnable_jobs_path: Optional[str] = None,
revision: str | None = None,
runnable_jobs_path: str | None = None,
) -> None:
self.update_commit_db()

Просмотреть файл

@ -18,7 +18,7 @@ import textwrap
import traceback
import urllib.parse
from datetime import datetime, timedelta, timezone
from typing import Any, Optional, Set, cast
from typing import Any, Set, cast
import bs4
import dateutil.parser
@ -1365,7 +1365,7 @@ def notification(days: int) -> None:
)
)
def get_top_crashes(team: str, channel: str) -> Optional[str]:
def get_top_crashes(team: str, channel: str) -> str | None:
top_crashes = []
if team in super_teams:

Просмотреть файл

@ -3,7 +3,6 @@
import argparse
from datetime import datetime, timezone
from logging import getLogger
from typing import Optional
import dateutil.parser
from dateutil.relativedelta import relativedelta
@ -18,7 +17,7 @@ class Retriever(object):
def retrieve_revisions(
self,
limit_months: int = 2,
limit_count: Optional[int] = None,
limit_count: int | None = None,
) -> None:
"""Retrieve revisions from Phabricator.

Просмотреть файл

@ -9,7 +9,7 @@ import logging
import time
import traceback
from datetime import datetime
from typing import Any, Optional
from typing import Any
import matplotlib.pyplot as plt
import mozci.push
@ -254,7 +254,7 @@ def plot_graphs(granularity: str) -> None:
def print_uncaught(
granularity: str, scheduler1: str, scheduler2: Optional[str] = None
granularity: str, scheduler1: str, scheduler2: str | None = None
) -> None:
push_data_db = (
test_scheduling.PUSH_DATA_GROUP_DB