2023-11-29 03:12:23 +03:00
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
2024-01-12 18:56:31 +03:00
import gzip
2023-12-22 17:35:14 +03:00
import io
import json
2023-11-29 03:12:23 +03:00
import logging
import os
2023-12-22 17:35:14 +03:00
import os . path
2023-11-29 03:12:23 +03:00
import pprint
2024-01-12 18:56:31 +03:00
import re
2023-12-22 17:35:14 +03:00
import sys
2024-01-12 18:56:31 +03:00
import tempfile
2023-11-29 03:12:23 +03:00
import urllib . parse
2024-01-27 01:10:45 +03:00
from copy import deepcopy
2023-12-22 17:35:14 +03:00
from pathlib import Path
2024-01-27 01:10:45 +03:00
from statistics import median
2024-01-12 18:56:31 +03:00
from xmlrpc . client import Fault
2023-12-22 17:35:14 +03:00
from yaml import load
try :
from yaml import CLoader as Loader
except ImportError :
from yaml import Loader
2023-11-29 03:12:23 +03:00
import bugzilla
import mozci . push
2023-12-22 17:35:14 +03:00
import requests
from manifestparser import ManifestParser
from manifestparser . toml import add_skip_if , alphabetize_toml_str , sort_paths
from mozci . task import TestTask
from mozci . util . taskcluster import get_task
2024-01-27 01:10:45 +03:00
from taskcluster . exceptions import TaskclusterRestFailure
2024-01-12 18:56:31 +03:00
TASK_LOG = " live_backing.log "
TASK_ARTIFACT = " public/logs/ " + TASK_LOG
ATTACHMENT_DESCRIPTION = " Compressed " + TASK_ARTIFACT + " for task "
ATTACHMENT_REGEX = (
r " .*Created attachment ([0-9]+) \ n.* "
+ ATTACHMENT_DESCRIPTION
+ " ([A-Za-z0-9_-]+) \n .* "
)
2023-12-22 17:35:14 +03:00
2024-01-27 01:10:45 +03:00
BUGZILLA_AUTHENTICATION_HELP = " Must create a Bugzilla API key per https://github.com/mozilla/mozci-tools/blob/main/citools/test_triage_bug_filer.py "
MS_PER_MINUTE = 60 * 1000 # ms per minute
DEBUG_THRESHOLD = 40 * MS_PER_MINUTE # 40 minutes in ms
OPT_THRESHOLD = 20 * MS_PER_MINUTE # 20 minutes in ms
2024-03-18 21:13:55 +03:00
ANYJS = " anyjs "
2024-01-27 01:10:45 +03:00
CC = " classification "
DEF = " DEFAULT "
2024-07-11 20:35:45 +03:00
DIFFERENCE = " difference "
2024-01-27 01:10:45 +03:00
DURATIONS = " durations "
2024-07-11 20:35:45 +03:00
EQEQ = " == "
ERROR = " error "
FAIL = " FAIL "
2024-03-18 21:13:55 +03:00
FAILED_RUNS = " runs_failed "
2024-01-27 01:10:45 +03:00
FAILURE_RATIO = 0.4 # more than this fraction of failures will disable
2024-07-11 20:35:45 +03:00
INTERMITTENT_RATIO_REFTEST = 0.4 # reftest low frequency intermittent
FAILURE_RATIO_REFTEST = 0.8 # disable ratio for reftest (high freq intermittent)
GROUP = " group "
KIND = " kind "
LINENO = " lineno "
2024-01-27 01:10:45 +03:00
LL = " label "
2024-03-18 21:13:55 +03:00
MEDIAN_DURATION = " duration_median "
2024-01-27 01:10:45 +03:00
MINIMUM_RUNS = 3 # mininum number of runs to consider success/failure
2024-02-08 23:22:04 +03:00
MOCK_BUG_DEFAULTS = { " blocks " : [ ] , " comments " : [ ] }
2024-03-18 21:13:55 +03:00
MOCK_TASK_DEFAULTS = { " extra " : { } , " failure_types " : { } , " results " : [ ] }
2024-02-08 23:22:04 +03:00
MOCK_TASK_INITS = [ " results " ]
2024-07-11 20:35:45 +03:00
MODIFIERS = " modifiers "
NOTEQ = " != "
2024-01-27 01:10:45 +03:00
OPT = " opt "
2024-07-11 20:35:45 +03:00
PASS = " PASS "
PIXELS = " pixels "
2024-01-27 01:10:45 +03:00
PP = " path "
2024-03-18 21:13:55 +03:00
QUERY = " query "
RR = " result "
2024-01-27 01:10:45 +03:00
RUNS = " runs "
2024-07-11 20:35:45 +03:00
STATUS = " status "
SUBTEST = " subtest "
SUBTEST_REGEX = (
r " image comparison, max difference: ([0-9]+), number of differing pixels: ([0-9]+) "
)
2024-01-27 01:10:45 +03:00
SUM_BY_LABEL = " sum_by_label "
2024-07-11 20:35:45 +03:00
TEST = " test "
TEST_TYPES = [ EQEQ , NOTEQ ]
2024-03-18 21:13:55 +03:00
TOTAL_DURATION = " duration_total "
TOTAL_RUNS = " runs_total "
WP = " testing/web-platform/ "
WPT0 = WP + " tests/infrastructure "
WPT_META0 = WP + " tests/infrastructure/metadata "
WPT_META0_CLASSIC = WP + " meta/infrastructure "
WPT1 = WP + " tests "
WPT_META1 = WPT1 . replace ( " tests " , " meta " )
WPT2 = WP + " mozilla/tests "
WPT_META2 = WPT2 . replace ( " tests " , " meta " )
WPT_MOZILLA = " /_mozilla "
2024-01-27 01:10:45 +03:00
2023-12-22 17:35:14 +03:00
2024-02-08 23:22:04 +03:00
class Mock ( object ) :
def __init__ ( self , data , defaults = { } , inits = [ ] ) :
self . _data = data
self . _defaults = defaults
for name in inits :
values = self . _data . get ( name , [ ] ) # assume type is an array
values = [ Mock ( value , defaults , inits ) for value in values ]
self . _data [ name ] = values
2023-12-22 17:35:14 +03:00
2024-02-08 23:22:04 +03:00
def __getattr__ ( self , name ) :
if name in self . _data :
return self . _data [ name ]
if name in self . _defaults :
return self . _defaults [ name ]
return " "
2023-11-29 03:12:23 +03:00
class Classification ( object ) :
" Classification of the failure (not the task result) "
2024-07-11 20:35:45 +03:00
DISABLE_INTERMITTENT = " disable_intermittent " # reftest [40%, 80%)
DISABLE_FAILURE = " disable_failure " # reftest (80%,100%] failure
2023-12-22 17:35:14 +03:00
DISABLE_MANIFEST = " disable_manifest " # crash found
DISABLE_RECOMMENDED = " disable_recommended " # disable first failing path
2024-01-27 01:10:45 +03:00
DISABLE_TOO_LONG = " disable_too_long " # runtime threshold exceeded
2023-11-29 03:12:23 +03:00
INTERMITTENT = " intermittent "
2023-12-22 17:35:14 +03:00
SECONDARY = " secondary " # secondary failing path
SUCCESS = " success " # path always succeeds
UNKNOWN = " unknown "
2023-11-29 03:12:23 +03:00
2024-07-11 20:35:45 +03:00
class Kind ( object ) :
" Kind of manifest "
2023-11-29 03:12:23 +03:00
2024-07-11 20:35:45 +03:00
LIST = " list "
TOML = " toml "
UNKNOWN = " unknown "
WPT = " wpt "
2023-11-29 03:12:23 +03:00
class Skipfails ( object ) :
" mach manifest skip-fails implementation: Update manifests to skip failing tests "
REPO = " repo "
REVISION = " revision "
TREEHERDER = " treeherder.mozilla.org "
BUGZILLA_SERVER_DEFAULT = " bugzilla.allizom.org "
def __init__ (
self ,
command_context = None ,
try_url = " " ,
2024-07-11 20:35:45 +03:00
verbose = True ,
2023-11-29 03:12:23 +03:00
bugzilla = None ,
dry_run = False ,
2023-12-22 17:35:14 +03:00
turbo = False ,
2024-07-11 20:35:45 +03:00
implicit_vars = False ,
2023-11-29 03:12:23 +03:00
) :
self . command_context = command_context
2023-12-22 17:35:14 +03:00
if self . command_context is not None :
self . topsrcdir = self . command_context . topsrcdir
else :
self . topsrcdir = Path ( __file__ ) . parent . parent
self . topsrcdir = os . path . normpath ( self . topsrcdir )
2023-11-29 03:12:23 +03:00
if isinstance ( try_url , list ) and len ( try_url ) == 1 :
self . try_url = try_url [ 0 ]
else :
self . try_url = try_url
self . dry_run = dry_run
2024-07-11 20:35:45 +03:00
self . implicit_vars = implicit_vars
2023-11-29 03:12:23 +03:00
self . verbose = verbose
2023-12-22 17:35:14 +03:00
self . turbo = turbo
2023-11-29 03:12:23 +03:00
if bugzilla is not None :
self . bugzilla = bugzilla
2024-01-27 01:10:45 +03:00
elif " BUGZILLA " in os . environ :
self . bugzilla = os . environ [ " BUGZILLA " ]
2023-11-29 03:12:23 +03:00
else :
2024-01-27 01:10:45 +03:00
self . bugzilla = Skipfails . BUGZILLA_SERVER_DEFAULT
2024-03-18 21:13:55 +03:00
if self . bugzilla == " disable " :
self . bugzilla = None # Bug filing disabled
2023-11-29 03:12:23 +03:00
self . component = " skip-fails "
self . _bzapi = None
2024-01-12 18:56:31 +03:00
self . _attach_rx = None
2023-12-22 17:35:14 +03:00
self . variants = { }
self . tasks = { }
self . pp = None
self . headers = { } # for Treeherder requests
self . headers [ " Accept " ] = " application/json "
self . headers [ " User-Agent " ] = " treeherder-pyclient "
self . jobs_url = " https://treeherder.mozilla.org/api/jobs/ "
self . push_ids = { }
self . job_ids = { }
2024-01-27 01:10:45 +03:00
self . extras = { }
2024-02-08 23:22:04 +03:00
self . bugs = [ ] # preloaded bugs, currently not an updated cache
2024-07-11 20:35:45 +03:00
self . error_summary = { }
self . _subtest_rx = None
self . lmp = None
self . failure_types = None
2023-11-29 03:12:23 +03:00
def _initialize_bzapi ( self ) :
2024-03-18 21:13:55 +03:00
""" Lazily initializes the Bugzilla API (returns True on success) """
if self . _bzapi is None and self . bugzilla is not None :
2023-11-29 03:12:23 +03:00
self . _bzapi = bugzilla . Bugzilla ( self . bugzilla )
2024-01-12 18:56:31 +03:00
self . _attach_rx = re . compile ( ATTACHMENT_REGEX , flags = re . M )
2024-03-18 21:13:55 +03:00
return self . _bzapi is not None
2023-11-29 03:12:23 +03:00
2023-12-22 17:35:14 +03:00
def pprint ( self , obj ) :
if self . pp is None :
self . pp = pprint . PrettyPrinter ( indent = 4 , stream = sys . stderr )
self . pp . pprint ( obj )
sys . stderr . flush ( )
2023-11-29 03:12:23 +03:00
def error ( self , e ) :
if self . command_context is not None :
self . command_context . log (
2024-07-11 20:35:45 +03:00
logging . ERROR , self . component , { ERROR : str ( e ) } , " ERROR: {error} "
2023-11-29 03:12:23 +03:00
)
else :
2023-12-22 17:35:14 +03:00
print ( f " ERROR: { e } " , file = sys . stderr , flush = True )
2023-11-29 03:12:23 +03:00
def warning ( self , e ) :
if self . command_context is not None :
self . command_context . log (
2024-07-11 20:35:45 +03:00
logging . WARNING , self . component , { ERROR : str ( e ) } , " WARNING: {error} "
2023-11-29 03:12:23 +03:00
)
else :
2023-12-22 17:35:14 +03:00
print ( f " WARNING: { e } " , file = sys . stderr , flush = True )
2023-11-29 03:12:23 +03:00
def info ( self , e ) :
if self . command_context is not None :
self . command_context . log (
2024-07-11 20:35:45 +03:00
logging . INFO , self . component , { ERROR : str ( e ) } , " INFO: {error} "
2023-11-29 03:12:23 +03:00
)
else :
2023-12-22 17:35:14 +03:00
print ( f " INFO: { e } " , file = sys . stderr , flush = True )
2023-11-29 03:12:23 +03:00
2024-01-12 18:56:31 +03:00
def vinfo ( self , e ) :
if self . verbose :
self . info ( e )
2024-03-18 21:13:55 +03:00
def full_path ( self , filename ) :
""" Returns full path for the relative filename """
return os . path . join ( self . topsrcdir , os . path . normpath ( filename ) )
def isdir ( self , filename ) :
""" Returns True if filename is a directory """
return os . path . isdir ( self . full_path ( filename ) )
def exists ( self , filename ) :
""" Returns True if filename exists """
return os . path . exists ( self . full_path ( filename ) )
2023-12-22 17:35:14 +03:00
def run (
self ,
meta_bug_id = None ,
save_tasks = None ,
use_tasks = None ,
save_failures = None ,
use_failures = None ,
2024-01-12 18:56:31 +03:00
max_failures = - 1 ,
2023-12-22 17:35:14 +03:00
) :
2023-11-29 03:12:23 +03:00
" Run skip-fails on try_url, return True on success "
2023-12-22 17:35:14 +03:00
try_url = self . try_url
revision , repo = self . get_revision ( try_url )
if use_tasks is not None :
2024-03-18 21:13:55 +03:00
tasks = self . read_tasks ( use_tasks )
self . vinfo ( f " use tasks: { use_tasks } " )
2024-07-11 20:35:45 +03:00
self . failure_types = None # do NOT cache failure_types
2023-12-22 17:35:14 +03:00
else :
tasks = self . get_tasks ( revision , repo )
2024-07-11 20:35:45 +03:00
self . failure_types = { } # cache failure_types
2023-12-22 17:35:14 +03:00
if use_failures is not None :
2024-03-18 21:13:55 +03:00
failures = self . read_failures ( use_failures )
self . vinfo ( f " use failures: { use_failures } " )
2023-12-22 17:35:14 +03:00
else :
failures = self . get_failures ( tasks )
if save_failures is not None :
self . write_json ( save_failures , failures )
2024-03-18 21:13:55 +03:00
self . vinfo ( f " save failures: { save_failures } " )
2023-12-22 17:35:14 +03:00
if save_tasks is not None :
self . write_tasks ( save_tasks , tasks )
2024-03-18 21:13:55 +03:00
self . vinfo ( f " save tasks: { save_tasks } " )
2024-01-12 18:56:31 +03:00
num_failures = 0
2024-07-11 20:35:45 +03:00
self . vinfo (
f " skip-fails assumes implicit-vars for reftest: { self . implicit_vars } "
)
2023-12-22 17:35:14 +03:00
for manifest in failures :
2024-07-11 20:35:45 +03:00
kind = failures [ manifest ] [ KIND ]
for label in failures [ manifest ] [ LL ] :
for path in failures [ manifest ] [ LL ] [ label ] [ PP ] :
classification = failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ CC ]
if classification . startswith ( " disable_ " ) or (
self . turbo and classification == Classification . SECONDARY
) :
anyjs = { } # anyjs alternate basename = False
differences = [ ]
pixels = [ ]
status = FAIL
lineno = failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] . get ( LINENO , 0 )
for task_id in failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ RUNS ] :
if kind == Kind . TOML :
break # just use the first task_id
elif kind == Kind . LIST :
difference = failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [
RUNS
] [ task_id ] . get ( DIFFERENCE , 0 )
if difference > 0 :
differences . append ( difference )
pixel = failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ RUNS ] [
task_id
] . get ( PIXELS , 0 )
if pixel > 0 :
pixels . append ( pixel )
status = failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ RUNS ] [
task_id
] . get ( STATUS , FAIL )
elif kind == Kind . WPT :
2024-03-18 21:13:55 +03:00
filename = os . path . basename ( path )
anyjs [ filename ] = False
if (
QUERY
in failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ RUNS ] [
task_id
]
) :
query = failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [
RUNS
] [ task_id ] [ QUERY ]
anyjs [ filename + query ] = False
else :
query = None
if (
ANYJS
in failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ RUNS ] [
task_id
]
) :
any_filename = os . path . basename (
failures [ manifest ] [ LL ] [ label ] [ PP ] [ path ] [ RUNS ] [
task_id
] [ ANYJS ]
)
anyjs [ any_filename ] = False
if query is not None :
anyjs [ any_filename + query ] = False
2024-07-11 20:35:45 +03:00
self . skip_failure (
manifest ,
kind ,
path ,
anyjs ,
differences ,
pixels ,
lineno ,
status ,
label ,
classification ,
task_id ,
try_url ,
revision ,
repo ,
meta_bug_id ,
)
num_failures + = 1
if max_failures > = 0 and num_failures > = max_failures :
self . warning (
f " max_failures= { max_failures } threshold reached: stopping. "
2024-01-27 01:10:45 +03:00
)
2024-07-11 20:35:45 +03:00
return True
2023-12-22 17:35:14 +03:00
return True
2023-11-29 03:12:23 +03:00
def get_revision ( self , url ) :
parsed = urllib . parse . urlparse ( url )
if parsed . scheme != " https " :
raise ValueError ( " try_url scheme not https " )
if parsed . netloc != Skipfails . TREEHERDER :
raise ValueError ( f " try_url server not { Skipfails . TREEHERDER } " )
if len ( parsed . query ) == 0 :
raise ValueError ( " try_url query missing " )
query = urllib . parse . parse_qs ( parsed . query )
if Skipfails . REVISION not in query :
raise ValueError ( " try_url query missing revision " )
revision = query [ Skipfails . REVISION ] [ 0 ]
if Skipfails . REPO in query :
repo = query [ Skipfails . REPO ] [ 0 ]
else :
repo = " try "
2024-01-12 18:56:31 +03:00
self . vinfo ( f " considering { repo } revision= { revision } " )
2023-11-29 03:12:23 +03:00
return revision , repo
def get_tasks ( self , revision , repo ) :
push = mozci . push . Push ( revision , repo )
return push . tasks
def get_failures ( self , tasks ) :
"""
find failures and create structure comprised of runs by path :
result :
* False ( failed )
2023-12-22 17:35:14 +03:00
* True ( passed )
2023-11-29 03:12:23 +03:00
classification : Classification
2023-12-22 17:35:14 +03:00
* unknown ( default ) < 3 runs
2024-01-27 01:10:45 +03:00
* intermittent ( not enough failures )
* disable_recommended ( enough repeated failures ) > 3 runs > = 4
2023-12-22 17:35:14 +03:00
* disable_manifest ( disable DEFAULT if no other failures )
2023-11-29 03:12:23 +03:00
* secondary ( not first failure in group )
2023-12-22 17:35:14 +03:00
* success
2023-11-29 03:12:23 +03:00
"""
2024-01-27 01:10:45 +03:00
ff = { }
2023-12-22 17:35:14 +03:00
manifest_paths = { }
2024-01-27 01:10:45 +03:00
manifest_ = {
2024-07-11 20:35:45 +03:00
KIND : Kind . UNKNOWN ,
2024-01-27 01:10:45 +03:00
LL : { } ,
}
label_ = {
DURATIONS : { } ,
MEDIAN_DURATION : 0 ,
2024-03-18 21:13:55 +03:00
OPT : False ,
2024-01-27 01:10:45 +03:00
PP : { } ,
2024-07-11 20:35:45 +03:00
SUM_BY_LABEL : { } , # All sums implicitly zero
2024-01-27 01:10:45 +03:00
TOTAL_DURATION : 0 ,
}
path_ = {
CC : Classification . UNKNOWN ,
FAILED_RUNS : 0 ,
RUNS : { } ,
TOTAL_RUNS : 0 ,
}
2024-03-18 21:13:55 +03:00
run_ = {
RR : False ,
}
2024-01-27 01:10:45 +03:00
2024-03-18 21:13:55 +03:00
for task in tasks : # add explicit failures
2023-11-29 03:12:23 +03:00
try :
2023-12-22 17:35:14 +03:00
if len ( task . results ) == 0 :
continue # ignore aborted tasks
2024-07-11 20:35:45 +03:00
failure_types = task . failure_types # call magic property once
if self . failure_types is not None :
self . failure_types [ task . id ] = failure_types
self . vinfo ( f " Getting failure_types from task: { task . id } " )
for manifest in failure_types :
2024-03-18 21:13:55 +03:00
mm = manifest
2024-01-27 01:10:45 +03:00
ll = task . label
2024-07-11 20:35:45 +03:00
kind = Kind . UNKNOWN
2024-03-18 21:13:55 +03:00
if mm . endswith ( " .ini " ) :
self . warning (
f " cannot analyze skip-fails on INI manifests: { mm } "
)
continue
elif mm . endswith ( " .list " ) :
2024-07-11 20:35:45 +03:00
kind = Kind . LIST
elif mm . endswith ( " .toml " ) :
kind = Kind . TOML
else :
kind = Kind . WPT
2024-03-18 21:13:55 +03:00
path , mm , _query , _anyjs = self . wpt_paths ( mm )
if path is None : # not WPT
self . warning (
f " cannot analyze skip-fails on unknown manifest type: { manifest } "
)
continue
2024-07-11 20:35:45 +03:00
if kind != Kind . WPT :
2024-03-18 21:13:55 +03:00
if mm not in manifest_paths :
manifest_paths [ mm ] = [ ]
if mm not in ff :
ff [ mm ] = deepcopy ( manifest_ )
2024-07-11 20:35:45 +03:00
ff [ mm ] [ KIND ] = kind
2024-03-18 21:13:55 +03:00
if ll not in ff [ mm ] [ LL ] :
ff [ mm ] [ LL ] [ ll ] = deepcopy ( label_ )
2024-07-11 20:35:45 +03:00
for path_type in failure_types [ manifest ] :
2023-12-22 17:35:14 +03:00
path , _type = path_type
2024-07-11 20:35:45 +03:00
query = None
anyjs = None
allpaths = [ ]
if kind == Kind . WPT :
2024-03-18 21:13:55 +03:00
path , mmpath , query , anyjs = self . wpt_paths ( path )
if path is None :
self . warning (
f " non existant failure path: { path_type [ 0 ] } "
)
break
2024-07-11 20:35:45 +03:00
allpaths = [ path ]
2024-03-18 21:13:55 +03:00
mm = os . path . dirname ( mmpath )
if mm not in manifest_paths :
manifest_paths [ mm ] = [ ]
if mm not in ff :
ff [ mm ] = deepcopy ( manifest_ )
2024-07-11 20:35:45 +03:00
ff [ mm ] [ KIND ] = kind
2024-03-18 21:13:55 +03:00
if ll not in ff [ mm ] [ LL ] :
ff [ mm ] [ LL ] [ ll ] = deepcopy ( label_ )
2024-07-11 20:35:45 +03:00
elif kind == Kind . LIST :
words = path . split ( )
if len ( words ) != 3 or words [ 1 ] not in TEST_TYPES :
self . warning ( f " reftest type not supported: { path } " )
continue
allpaths = self . get_allpaths ( task . id , mm , path )
elif kind == Kind . TOML :
if path == mm :
path = DEF # refers to the manifest itself
allpaths = [ path ]
for path in allpaths :
if path in manifest_paths [ mm ] :
continue # duplicate path for this task
2024-01-27 01:10:45 +03:00
manifest_paths [ mm ] . append ( path )
2024-07-11 20:35:45 +03:00
self . vinfo (
f " Getting failure info in manifest: { mm } , path: { path } "
)
if path not in ff [ mm ] [ LL ] [ ll ] [ PP ] :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] = deepcopy ( path_ )
if task . id not in ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] = deepcopy ( run_ )
2024-03-18 21:13:55 +03:00
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ RR ] = False
if query is not None :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ QUERY ] = query
if anyjs is not None :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ ANYJS ] = anyjs
2024-01-27 01:10:45 +03:00
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ TOTAL_RUNS ] + = 1
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ FAILED_RUNS ] + = 1
2024-07-11 20:35:45 +03:00
if kind == Kind . LIST :
(
lineno ,
difference ,
pixels ,
status ,
) = self . get_lineno_difference_pixels_status (
task . id , mm , path
)
if lineno > 0 :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ LINENO ] = lineno
else :
self . vinfo ( f " ERROR no lineno for { path } " )
if status != FAIL :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [
STATUS
] = status
if status == FAIL and difference == 0 and pixels == 0 :
# intermittent, not error
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ RR ] = True
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ FAILED_RUNS ] - = 1
elif difference > 0 :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [
DIFFERENCE
] = difference
if pixels > 0 :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [
PIXELS
] = pixels
except AttributeError :
pass # self.warning(f"unknown attribute in task (#1): {ae}")
2023-12-22 17:35:14 +03:00
2024-01-27 01:10:45 +03:00
for task in tasks : # add results
2023-12-22 17:35:14 +03:00
try :
if len ( task . results ) == 0 :
continue # ignore aborted tasks
2024-07-11 20:35:45 +03:00
self . vinfo ( f " Getting results from task: { task . id } " )
2023-12-22 17:35:14 +03:00
for result in task . results :
2024-01-27 01:10:45 +03:00
mm = result . group
2024-07-11 20:35:45 +03:00
ll = task . label
kind = Kind . UNKNOWN
2024-03-18 21:13:55 +03:00
if mm . endswith ( " .ini " ) :
self . warning (
f " cannot analyze skip-fails on INI manifests: { mm } "
)
continue
elif mm . endswith ( " .list " ) :
2024-07-11 20:35:45 +03:00
kind = Kind . LIST
elif mm . endswith ( " .toml " ) :
kind = Kind . TOML
else :
kind = Kind . WPT
2024-03-18 21:13:55 +03:00
path , mm , _query , _anyjs = self . wpt_paths ( mm )
if path is None : # not WPT
self . warning (
f " cannot analyze skip-fails on unknown manifest type: { result . group } "
)
continue
if mm not in manifest_paths :
continue
2024-01-27 01:10:45 +03:00
if mm not in ff :
ff [ mm ] = deepcopy ( manifest_ )
if ll not in ff [ mm ] [ LL ] :
ff [ mm ] [ LL ] [ ll ] = deepcopy ( label_ )
if task . id not in ff [ mm ] [ LL ] [ ll ] [ DURATIONS ] :
# duration may be None !!!
ff [ mm ] [ LL ] [ ll ] [ DURATIONS ] [ task . id ] = result . duration or 0
if ff [ mm ] [ LL ] [ ll ] [ OPT ] is None :
ff [ mm ] [ LL ] [ ll ] [ OPT ] = self . get_opt_for_task ( task . id )
for path in manifest_paths [ mm ] : # all known paths
2024-07-11 20:35:45 +03:00
self . vinfo ( f " Getting result for manifest: { mm } , path: { path } " )
2024-01-27 01:10:45 +03:00
if path not in ff [ mm ] [ LL ] [ ll ] [ PP ] :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] = deepcopy ( path_ )
if task . id not in ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] :
2024-03-18 21:13:55 +03:00
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] = deepcopy ( run_ )
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ RR ] = result . ok
2024-01-27 01:10:45 +03:00
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ TOTAL_RUNS ] + = 1
if not result . ok :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ FAILED_RUNS ] + = 1
2024-07-11 20:35:45 +03:00
if kind == Kind . LIST :
(
lineno ,
difference ,
pixels ,
status ,
) = self . get_lineno_difference_pixels_status (
task . id , mm , path
)
if lineno > 0 :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ LINENO ] = lineno
else :
self . vinfo ( f " ERROR no lineno for { path } " )
if status != FAIL :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [
STATUS
] = status
if (
status == FAIL
and difference == 0
and pixels == 0
and not result . ok
) :
# intermittent, not error
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ RR ] = True
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ FAILED_RUNS ] - = 1
if difference > 0 :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [
DIFFERENCE
] = difference
if pixels > 0 :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [
PIXELS
] = pixels
except AttributeError :
pass # self.warning(f"unknown attribute in task (#2): {ae}")
2024-01-27 01:10:45 +03:00
for mm in ff : # determine classifications
2024-07-11 20:35:45 +03:00
kind = ff [ mm ] [ KIND ]
2024-01-27 01:10:45 +03:00
for label in ff [ mm ] [ LL ] :
2024-03-18 21:13:55 +03:00
ll = label
opt = ff [ mm ] [ LL ] [ ll ] [ OPT ]
2024-01-27 01:10:45 +03:00
durations = [ ] # summarize durations
2024-03-18 21:13:55 +03:00
for task_id in ff [ mm ] [ LL ] [ ll ] [ DURATIONS ] :
duration = ff [ mm ] [ LL ] [ ll ] [ DURATIONS ] [ task_id ]
2024-01-27 01:10:45 +03:00
durations . append ( duration )
if len ( durations ) > 0 :
total_duration = sum ( durations )
median_duration = median ( durations )
2024-03-18 21:13:55 +03:00
ff [ mm ] [ LL ] [ ll ] [ TOTAL_DURATION ] = total_duration
ff [ mm ] [ LL ] [ ll ] [ MEDIAN_DURATION ] = median_duration
2024-01-27 01:10:45 +03:00
if ( opt and median_duration > OPT_THRESHOLD ) or (
( not opt ) and median_duration > DEBUG_THRESHOLD
) :
2024-07-11 20:35:45 +03:00
if kind == Kind . TOML :
2024-03-18 21:13:55 +03:00
paths = [ DEF ]
2024-07-11 20:35:45 +03:00
else :
paths = ff [ mm ] [ LL ] [ ll ] [ PP ] . keys ( )
2024-03-18 21:13:55 +03:00
for path in paths :
if path not in ff [ mm ] [ LL ] [ ll ] [ PP ] :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] = deepcopy ( path_ )
if task_id not in ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] :
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] = deepcopy ( run_ )
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task . id ] [ RR ] = False
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ TOTAL_RUNS ] + = 1
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ FAILED_RUNS ] + = 1
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [
CC
] = Classification . DISABLE_TOO_LONG
2024-01-27 01:10:45 +03:00
primary = True # we have not seen the first failure
2024-03-18 21:13:55 +03:00
for path in sort_paths ( ff [ mm ] [ LL ] [ ll ] [ PP ] ) :
classification = ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ CC ]
2024-01-27 01:10:45 +03:00
if classification == Classification . UNKNOWN :
2024-03-18 21:13:55 +03:00
failed_runs = ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ FAILED_RUNS ]
total_runs = ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ TOTAL_RUNS ]
2024-07-11 20:35:45 +03:00
status = FAIL # default status, only one run could be PASS
for task_id in ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] :
status = ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ RUNS ] [ task_id ] . get (
STATUS , status
)
if kind == Kind . LIST :
failure_ratio = INTERMITTENT_RATIO_REFTEST
else :
failure_ratio = FAILURE_RATIO
2024-01-27 01:10:45 +03:00
if total_runs > = MINIMUM_RUNS :
2024-07-11 20:35:45 +03:00
if failed_runs / total_runs < failure_ratio :
2024-01-27 01:10:45 +03:00
if failed_runs == 0 :
classification = Classification . SUCCESS
else :
classification = Classification . INTERMITTENT
2024-07-11 20:35:45 +03:00
elif kind == Kind . LIST :
if failed_runs / total_runs < FAILURE_RATIO_REFTEST :
classification = Classification . DISABLE_INTERMITTENT
else :
classification = Classification . DISABLE_FAILURE
2024-01-27 01:10:45 +03:00
elif primary :
if path == DEF :
classification = Classification . DISABLE_MANIFEST
else :
classification = Classification . DISABLE_RECOMMENDED
primary = False
2023-11-29 03:12:23 +03:00
else :
2024-01-27 01:10:45 +03:00
classification = Classification . SECONDARY
2024-03-18 21:13:55 +03:00
ff [ mm ] [ LL ] [ ll ] [ PP ] [ path ] [ CC ] = classification
2024-07-11 20:35:45 +03:00
if classification not in ff [ mm ] [ LL ] [ ll ] [ SUM_BY_LABEL ] :
ff [ mm ] [ LL ] [ ll ] [ SUM_BY_LABEL ] [ classification ] = 0
2024-03-18 21:13:55 +03:00
ff [ mm ] [ LL ] [ ll ] [ SUM_BY_LABEL ] [ classification ] + = 1
2024-01-27 01:10:45 +03:00
return ff
2023-11-29 03:12:23 +03:00
2023-12-22 17:35:14 +03:00
def _get_os_version ( self , os , platform ) :
""" Return the os_version given the label platform string """
i = platform . find ( os )
j = i + len ( os )
yy = platform [ j : j + 2 ]
mm = platform [ j + 2 : j + 4 ]
return yy + " . " + mm
def get_bug_by_id ( self , id ) :
""" Get bug by bug id """
2024-02-08 23:22:04 +03:00
bug = None
for b in self . bugs :
if b . id == id :
bug = b
break
2024-03-18 21:13:55 +03:00
if bug is None and self . _initialize_bzapi ( ) :
2024-02-08 23:22:04 +03:00
bug = self . _bzapi . getbug ( id )
2023-12-22 17:35:14 +03:00
return bug
def get_bugs_by_summary ( self , summary ) :
""" Get bug by bug summary """
2023-11-29 03:12:23 +03:00
2024-02-08 23:22:04 +03:00
bugs = [ ]
for b in self . bugs :
if b . summary == summary :
bugs . append ( b )
if len ( bugs ) > 0 :
return bugs
2024-03-18 21:13:55 +03:00
if self . _initialize_bzapi ( ) :
query = self . _bzapi . build_query ( short_desc = summary )
query [ " include_fields " ] = [
" id " ,
" product " ,
" component " ,
" status " ,
" resolution " ,
" summary " ,
" blocks " ,
]
bugs = self . _bzapi . query ( query )
2023-12-22 17:35:14 +03:00
return bugs
def create_bug (
self ,
summary = " Bug short description " ,
description = " Bug description " ,
product = " Testing " ,
component = " General " ,
version = " unspecified " ,
bugtype = " task " ,
) :
""" Create a bug """
2024-03-18 21:13:55 +03:00
bug = None
if self . _initialize_bzapi ( ) :
if not self . _bzapi . logged_in :
self . error (
" Must create a Bugzilla API key per https://github.com/mozilla/mozci-tools/blob/main/citools/test_triage_bug_filer.py "
)
raise PermissionError ( f " Not authenticated for Bugzilla { self . bugzilla } " )
createinfo = self . _bzapi . build_createbug (
product = product ,
component = component ,
summary = summary ,
version = version ,
description = description ,
2023-12-22 17:35:14 +03:00
)
2024-03-18 21:13:55 +03:00
createinfo [ " type " ] = bugtype
bug = self . _bzapi . createbug ( createinfo )
2023-11-29 03:12:23 +03:00
return bug
2023-12-22 17:35:14 +03:00
def add_bug_comment ( self , id , comment , meta_bug_id = None ) :
""" Add a comment to an existing bug """
2024-03-18 21:13:55 +03:00
if self . _initialize_bzapi ( ) :
if not self . _bzapi . logged_in :
self . error ( BUGZILLA_AUTHENTICATION_HELP )
raise PermissionError ( " Not authenticated for Bugzilla " )
if meta_bug_id is not None :
blocks_add = [ meta_bug_id ]
else :
blocks_add = None
updateinfo = self . _bzapi . build_update (
comment = comment , blocks_add = blocks_add
)
self . _bzapi . update_bugs ( [ id ] , updateinfo )
2023-12-22 17:35:14 +03:00
def skip_failure (
self ,
manifest ,
2024-07-11 20:35:45 +03:00
kind ,
2023-12-22 17:35:14 +03:00
path ,
2024-03-18 21:13:55 +03:00
anyjs ,
2024-07-11 20:35:45 +03:00
differences ,
pixels ,
lineno ,
status ,
2023-12-22 17:35:14 +03:00
label ,
classification ,
task_id ,
try_url ,
revision ,
repo ,
meta_bug_id = None ,
) :
2024-03-18 21:13:55 +03:00
"""
2024-07-11 20:35:45 +03:00
Skip a failure ( for TOML , WPT and REFTEST manifests )
2024-03-18 21:13:55 +03:00
For wpt anyjs is a dictionary mapping from alternate basename to
a boolean ( indicating if the basename has been handled in the manifest )
"""
2023-12-22 17:35:14 +03:00
2024-07-11 20:35:45 +03:00
self . vinfo ( f " \n \n ===== Skip failure in manifest: { manifest } ===== " )
self . vinfo ( f " path: { path } " )
2024-01-27 01:10:45 +03:00
if task_id is None :
skip_if = " true "
else :
2024-07-11 20:35:45 +03:00
skip_if = self . task_to_skip_if ( task_id , kind )
2023-12-22 17:35:14 +03:00
if skip_if is None :
self . warning (
f " Unable to calculate skip-if condition from manifest= { manifest } from failure label= { label } "
)
return
2024-07-11 20:35:45 +03:00
if kind == Kind . TOML :
filename = DEF
elif kind == Kind . WPT :
_path , manifest , _query , _anyjs = self . wpt_paths ( path )
filename = os . path . basename ( path )
elif kind == Kind . LIST :
filename = path
if status == PASS :
self . info ( f " Unexpected status: { status } " )
if status == PASS or classification == Classification . DISABLE_INTERMITTENT :
zero = True # refest lower ranges should include zero
else :
zero = False
2023-12-22 17:35:14 +03:00
bug_reference = " "
if classification == Classification . DISABLE_MANIFEST :
comment = " Disabled entire manifest due to crash result "
2024-01-27 01:10:45 +03:00
elif classification == Classification . DISABLE_TOO_LONG :
comment = " Disabled entire manifest due to excessive run time "
2023-12-22 17:35:14 +03:00
else :
2024-07-11 20:35:45 +03:00
if kind == Kind . TOML :
2024-03-18 21:13:55 +03:00
filename = self . get_filename_in_manifest ( manifest , path )
comment = f ' Disabled test due to failures in test file: " { filename } " '
2023-12-22 17:35:14 +03:00
if classification == Classification . SECONDARY :
comment + = " (secondary) "
2024-07-11 20:35:45 +03:00
if kind != Kind . WPT :
2024-03-18 21:13:55 +03:00
bug_reference = " (secondary) "
2024-07-11 20:35:45 +03:00
if kind != Kind . LIST :
self . vinfo ( f " filename: { filename } " )
if kind == Kind . WPT and len ( anyjs ) > 1 :
2024-03-18 21:13:55 +03:00
comment + = " \n Additional WPT wildcard paths: "
for p in sorted ( anyjs . keys ( ) ) :
if p != filename :
comment + = f ' \n " { p } " '
platform , testname = self . label_to_platform_testname ( label )
if platform is not None :
comment + = " \n Command line to reproduce (experimental): \n "
2024-07-11 20:35:45 +03:00
comment + = f " \" mach try fuzzy -q ' { platform } ' { testname } \" "
2023-12-22 17:35:14 +03:00
comment + = f " \n Try URL = { try_url } "
comment + = f " \n revision = { revision } "
comment + = f " \n repo = { repo } "
comment + = f " \n label = { label } "
2024-01-27 01:10:45 +03:00
if task_id is not None :
comment + = f " \n task_id = { task_id } "
2024-07-11 20:35:45 +03:00
if kind != Kind . LIST :
push_id = self . get_push_id ( revision , repo )
if push_id is not None :
comment + = f " \n push_id = { push_id } "
job_id = self . get_job_id ( push_id , task_id )
if job_id is not None :
comment + = f " \n job_id = { job_id } "
(
suggestions_url ,
line_number ,
line ,
log_url ,
) = self . get_bug_suggestions ( repo , job_id , path , anyjs )
if log_url is not None :
comment + = f " \n Bug suggestions: { suggestions_url } "
comment + = f " \n Specifically see at line { line_number } in the attached log: { log_url } "
comment + = f ' \n \n " { line } " \n '
2023-12-22 17:35:14 +03:00
bug_summary = f " MANIFEST { manifest } "
2024-01-12 18:56:31 +03:00
attachments = { }
2024-03-18 21:13:55 +03:00
bugid = " TBD "
if self . bugzilla is None :
2024-07-11 20:35:45 +03:00
self . vinfo ( " Bugzilla has been disabled: no bugs created or updated " )
2024-03-18 21:13:55 +03:00
else :
bugs = self . get_bugs_by_summary ( bug_summary )
if len ( bugs ) == 0 :
description = (
f " This bug covers excluded failing tests in the MANIFEST { manifest } "
2023-12-22 17:35:14 +03:00
)
2024-03-18 21:13:55 +03:00
description + = " \n (generated by `mach manifest skip-fails`) "
product , component = self . get_file_info ( path )
if self . dry_run :
self . warning (
f ' Dry-run NOT creating bug: { product } :: { component } " { bug_summary } " '
)
else :
bug = self . create_bug ( bug_summary , description , product , component )
bugid = bug . id
self . vinfo (
f ' Created Bug { bugid } { product } :: { component } : " { bug_summary } " '
)
elif len ( bugs ) == 1 :
bugid = bugs [ 0 ] . id
product = bugs [ 0 ] . product
component = bugs [ 0 ] . component
self . vinfo ( f ' Found Bug { bugid } { product } :: { component } " { bug_summary } " ' )
if meta_bug_id is not None :
if meta_bug_id in bugs [ 0 ] . blocks :
2024-01-12 18:56:31 +03:00
self . vinfo (
2024-03-18 21:13:55 +03:00
f " Bug { bugid } already blocks meta bug { meta_bug_id } "
2024-01-12 18:56:31 +03:00
)
2024-03-18 21:13:55 +03:00
meta_bug_id = None # no need to add again
comments = bugs [ 0 ] . getcomments ( )
for i in range ( len ( comments ) ) :
text = comments [ i ] [ " text " ]
m = self . _attach_rx . findall ( text )
if len ( m ) == 1 :
a_task_id = m [ 0 ] [ 1 ]
attachments [ a_task_id ] = m [ 0 ] [ 0 ]
if a_task_id == task_id :
self . vinfo (
f " Bug { bugid } already has the compressed log attached for this task "
)
else :
self . error ( f ' More than one bug found for summary: " { bug_summary } " ' )
return
bug_reference = f " Bug { bugid } " + bug_reference
2024-07-11 20:35:45 +03:00
extra = self . get_extra ( task_id )
json . dumps ( extra )
if kind == Kind . LIST :
comment + = (
f " \n fuzzy-if condition on line { lineno } : { skip_if } # { bug_reference } "
)
2024-03-18 21:13:55 +03:00
else :
2024-07-11 20:35:45 +03:00
comment + = f " \n skip-if condition: { skip_if } # { bug_reference } "
2024-03-18 21:13:55 +03:00
manifest_path = self . full_path ( manifest )
2024-07-11 20:35:45 +03:00
manifest_str = " "
additional_comment = " "
if kind == Kind . WPT :
2024-03-18 21:13:55 +03:00
if os . path . exists ( manifest_path ) :
manifest_str = io . open ( manifest_path , " r " , encoding = " utf-8 " ) . read ( )
else :
# ensure parent directories exist
os . makedirs ( os . path . dirname ( manifest_path ) , exist_ok = True )
2024-07-11 20:35:45 +03:00
manifest_str , additional_comment = self . wpt_add_skip_if (
2024-03-18 21:13:55 +03:00
manifest_str , anyjs , skip_if , bug_reference
)
2024-07-11 20:35:45 +03:00
elif kind == Kind . TOML :
2024-03-18 21:13:55 +03:00
mp = ManifestParser ( use_toml = True , document = True )
mp . read ( manifest_path )
document = mp . source_documents [ manifest_path ]
2024-07-11 20:35:45 +03:00
additional_comment = add_skip_if ( document , filename , skip_if , bug_reference )
2024-03-18 21:13:55 +03:00
manifest_str = alphabetize_toml_str ( document )
2024-07-11 20:35:45 +03:00
elif kind == Kind . LIST :
if lineno == 0 :
self . error (
f " cannot determine line to edit in manifest: { manifest_path } "
)
# elif skip_if.find("winWidget") >= 0 and skip_if.find("!is64Bit") >= 0:
# self.error(
# "Skipping failures for Windows 32-bit are temporarily disabled"
# )
elif not os . path . exists ( manifest_path ) :
self . error ( f " manifest does not exist: { manifest_path } " )
else :
manifest_str = io . open ( manifest_path , " r " , encoding = " utf-8 " ) . read ( )
manifest_str , additional_comment = self . reftest_add_fuzzy_if (
manifest_str ,
filename ,
skip_if ,
differences ,
pixels ,
lineno ,
zero ,
bug_reference ,
)
if not manifest_str and additional_comment :
self . warning ( additional_comment )
if additional_comment :
comment + = " \n " + additional_comment
if len ( manifest_str ) > 0 :
fp = io . open ( manifest_path , " w " , encoding = " utf-8 " , newline = " \n " )
fp . write ( manifest_str )
fp . close ( )
self . info ( f ' Edited [ " { filename } " ] in manifest: " { manifest } " ' )
if kind != Kind . LIST :
self . info ( f ' added skip-if condition: " { skip_if } " # { bug_reference } ' )
if self . dry_run :
self . info ( f " Dry-run NOT adding comment to Bug { bugid } : \n { comment } " )
self . info (
f ' Dry-run NOT editing [ " { filename } " ] in manifest: " { manifest } " '
)
self . info ( f ' would add skip-if condition: " { skip_if } " # { bug_reference } ' )
if task_id is not None and task_id not in attachments :
self . info ( " would add compressed log for this task " )
return
elif self . bugzilla is None :
self . warning ( f " NOT adding comment to Bug { bugid } : \n { comment } " )
else :
self . add_bug_comment ( bugid , comment , meta_bug_id )
self . info ( f " Added comment to Bug { bugid } : \n { comment } " )
if meta_bug_id is not None :
self . info ( f " Bug { bugid } blocks meta Bug: { meta_bug_id } " )
if task_id is not None and task_id not in attachments :
self . add_attachment_log_for_task ( bugid , task_id )
self . info ( " Added compressed log for this task " )
else :
self . error ( f ' Error editing [ " { filename } " ] in manifest: " { manifest } " ' )
2023-12-22 17:35:14 +03:00
def get_variants ( self ) :
""" Get mozinfo for each test variants """
if len ( self . variants ) == 0 :
Bug 1868440 - [ci] Upgrade taskcluster-taskgraph to v7.4.0, r=perftest-reviewers,geckoview-reviewers,ci-and-tooling,devtools-reviewers,taskgraph-reviewers,releng-reviewers,mach-reviewers,omc-reviewers,jmaher,hneiva,aminomancer,jari,dom-storage-reviewers,afinder,asuth,bhearsum,owlish
Differential Revision: https://phabricator.services.mozilla.com/D206260
2024-04-18 23:39:09 +03:00
variants_file = " taskcluster/kinds/test/variants.yml "
2024-03-18 21:13:55 +03:00
variants_path = self . full_path ( variants_file )
2023-12-22 17:35:14 +03:00
fp = io . open ( variants_path , " r " , encoding = " utf-8 " )
raw_variants = load ( fp , Loader = Loader )
fp . close ( )
for k , v in raw_variants . items ( ) :
mozinfo = k
if " mozinfo " in v :
mozinfo = v [ " mozinfo " ]
self . variants [ k ] = mozinfo
return self . variants
2024-01-27 01:10:45 +03:00
def get_task_details ( self , task_id ) :
2023-12-22 17:35:14 +03:00
""" Download details for task task_id """
if task_id in self . tasks : # if cached
task = self . tasks [ task_id ]
else :
2024-07-11 20:35:45 +03:00
self . vinfo ( f " get_task_details for task: { task_id } " )
2024-01-27 01:10:45 +03:00
try :
task = get_task ( task_id )
except TaskclusterRestFailure :
self . warning ( f " Task { task_id } no longer exists. " )
return None
2023-12-22 17:35:14 +03:00
self . tasks [ task_id ] = task
return task
2024-01-27 01:10:45 +03:00
def get_extra ( self , task_id ) :
""" Calculate extra for task task_id """
2023-12-22 17:35:14 +03:00
2024-01-27 01:10:45 +03:00
if task_id in self . extras : # if cached
extra = self . extras [ task_id ]
else :
self . get_variants ( )
task = self . get_task_details ( task_id ) or { }
arch = None
bits = None
2024-03-18 21:13:55 +03:00
build = None
build_types = [ ]
2024-01-27 01:10:45 +03:00
display = None
2024-03-18 21:13:55 +03:00
os = None
os_version = None
2024-01-27 01:10:45 +03:00
runtimes = [ ]
test_setting = task . get ( " extra " , { } ) . get ( " test-setting " , { } )
platform = test_setting . get ( " platform " , { } )
platform_os = platform . get ( " os " , { } )
2023-12-22 17:35:14 +03:00
opt = False
debug = False
2024-01-27 01:10:45 +03:00
if " name " in platform_os :
os = platform_os [ " name " ]
if os == " windows " :
os = " win "
if os == " macosx " :
os = " mac "
if " version " in platform_os :
os_version = platform_os [ " version " ]
if len ( os_version ) == 4 :
os_version = os_version [ 0 : 2 ] + " . " + os_version [ 2 : 4 ]
2024-03-18 21:13:55 +03:00
if " build " in platform_os :
build = platform_os [ " build " ]
2024-01-27 01:10:45 +03:00
if " arch " in platform :
arch = platform [ " arch " ]
if arch == " x86 " or arch . find ( " 32 " ) > = 0 :
bits = " 32 "
2024-03-18 21:13:55 +03:00
arch = " x86 "
else :
2024-01-27 01:10:45 +03:00
bits = " 64 "
2024-03-18 21:13:55 +03:00
if arch != " aarch64 " and arch != " ppc " :
arch = " x86_64 "
2024-01-27 01:10:45 +03:00
if " display " in platform :
display = platform [ " display " ]
if " runtime " in test_setting :
for k in test_setting [ " runtime " ] :
2024-07-11 20:35:45 +03:00
if k == " no-fission " and test_setting [ " runtime " ] [ k ] :
runtimes . append ( " no-fission " )
elif k in self . variants : # draw-snapshot -> snapshot
2024-01-27 01:10:45 +03:00
runtimes . append ( self . variants [ k ] ) # adds mozinfo
if " build " in test_setting :
tbuild = test_setting [ " build " ]
for k in tbuild :
if k == " type " :
if tbuild [ k ] == " opt " :
opt = True
elif tbuild [ k ] == " debug " :
debug = True
build_types . append ( tbuild [ k ] )
else :
build_types . append ( k )
unknown = None
extra = {
" arch " : arch or unknown ,
" bits " : bits or unknown ,
2024-03-18 21:13:55 +03:00
" build " : build or unknown ,
" build_types " : build_types ,
" debug " : debug ,
2024-01-27 01:10:45 +03:00
" display " : display or unknown ,
" opt " : opt ,
2024-03-18 21:13:55 +03:00
" os " : os or unknown ,
" os_version " : os_version or unknown ,
" runtimes " : runtimes ,
2024-01-27 01:10:45 +03:00
}
self . extras [ task_id ] = extra
return extra
def get_opt_for_task ( self , task_id ) :
extra = self . get_extra ( task_id )
return extra [ " opt " ]
2024-07-11 20:35:45 +03:00
def task_to_skip_if ( self , task_id , kind ) :
2024-01-27 01:10:45 +03:00
""" Calculate the skip-if condition for failing task task_id """
2024-07-11 20:35:45 +03:00
if kind == Kind . WPT :
2024-03-18 21:13:55 +03:00
qq = ' " '
aa = " and "
nn = " not "
2024-07-11 20:35:45 +03:00
elif kind == Kind . LIST :
qq = " ' "
aa = " && "
nn = " ! "
2024-03-18 21:13:55 +03:00
else :
qq = " ' "
aa = " && "
nn = " ! "
eq = " == "
arch = " processor "
version = " os_version "
2024-01-27 01:10:45 +03:00
extra = self . get_extra ( task_id )
2023-12-22 17:35:14 +03:00
skip_if = None
2024-07-11 20:35:45 +03:00
os = extra . get ( " os " )
if os is not None :
if kind == Kind . LIST :
if os == " linux " :
skip_if = " gtkWidget "
elif os == " win " :
skip_if = " winWidget "
elif os == " mac " :
skip_if = " cocoaWidget "
elif os == " android " :
skip_if = " Android "
else :
self . error ( f " cannot calculate skip-if for unknown OS: ' { os } ' " )
return None
elif extra . get ( " os_version " ) is not None :
2024-03-18 21:13:55 +03:00
if (
2024-07-11 20:35:45 +03:00
extra . get ( " build " ) is not None
and os == " win "
2024-03-18 21:13:55 +03:00
and extra [ " os_version " ] == " 11 "
and extra [ " build " ] == " 2009 "
) :
skip_if = " win11_2009 " # mozinfo.py:137
else :
2024-07-11 20:35:45 +03:00
skip_if = " os " + eq + qq + os + qq
2024-03-18 21:13:55 +03:00
skip_if + = aa + version + eq + qq + extra [ " os_version " ] + qq
2024-07-11 20:35:45 +03:00
if kind != Kind . LIST and extra . get ( " arch " ) is not None :
2024-03-18 21:13:55 +03:00
skip_if + = aa + arch + eq + qq + extra [ " arch " ] + qq
# since we always give arch/processor, bits are not required
# if extra["bits"] is not None:
# skip_if += aa + "bits" + eq + extra["bits"]
2024-07-11 20:35:45 +03:00
debug = extra . get ( " debug " , False )
runtimes = extra . get ( " runtimes " , [ ] )
fission = " no-fission " not in runtimes
snapshot = " snapshot " in runtimes
swgl = " swgl " in runtimes
build_types = extra . get ( " build_types " , [ ] )
asan = " asan " in build_types
ccov = " ccov " in build_types
tsan = " tsan " in build_types
optimized = ( not debug ) and ( not ccov ) and ( not asan ) and ( not tsan )
skip_if + = aa
if kind == Kind . LIST :
if optimized :
skip_if + = " optimized "
elif debug :
skip_if + = " isDebugBuild "
elif ccov :
skip_if + = " isCoverageBuild "
elif asan :
skip_if + = " AddressSanitizer "
elif tsan :
skip_if + = " ThreadSanitizer "
# See implicit VARIANT_DEFAULTS in
# https://searchfox.org/mozilla-central/source/layout/tools/reftest/manifest.sys.mjs#30
if not self . implicit_vars and fission :
skip_if + = aa + " fission "
elif not fission : # implicit default: fission
skip_if + = aa + nn + " fission "
if extra . get ( " bits " ) is not None :
if extra [ " bits " ] == " 32 " :
skip_if + = aa + nn + " is64Bit " # override implicit is64Bit
elif not self . implicit_vars and os == " winWidget " :
skip_if + = aa + " is64Bit "
if not self . implicit_vars and not swgl :
skip_if + = aa + nn + " swgl "
elif swgl : # implicit default: !swgl
skip_if + = aa + " swgl "
if os == " gtkWidget " :
if not self . implicit_vars and not snapshot :
skip_if + = aa + nn + " useDrawSnapshot "
elif snapshot : # implicit default: !useDrawSnapshot
skip_if + = aa + " useDrawSnapshot "
2024-03-18 21:13:55 +03:00
else :
2024-07-11 20:35:45 +03:00
if not debug :
skip_if + = nn
skip_if + = " debug "
if extra . get ( " display " ) is not None :
skip_if + = aa + " display " + eq + qq + extra [ " display " ] + qq
for runtime in extra . get ( " runtimes " , [ ] ) :
skip_if + = aa + runtime
for build_type in extra . get ( " build_types " , [ ] ) :
# note: lite will not evaluate on non-android platforms
if build_type not in [ " debug " , " lite " , " opt " , " shippable " ] :
skip_if + = aa + build_type
2023-12-22 17:35:14 +03:00
return skip_if
def get_file_info ( self , path , product = " Testing " , component = " General " ) :
"""
Get bugzilla product and component for the path .
Provide defaults ( in case command_context is not defined
or there isn ' t file info available).
"""
2024-01-27 01:10:45 +03:00
if path != DEF and self . command_context is not None :
2023-12-22 17:35:14 +03:00
reader = self . command_context . mozbuild_reader ( config_mode = " empty " )
info = reader . files_info ( [ path ] )
cp = info [ path ] [ " BUG_COMPONENT " ]
product = cp . product
component = cp . component
return product , component
def get_filename_in_manifest ( self , manifest , path ) :
""" return relative filename for path in manifest """
filename = os . path . basename ( path )
2024-01-27 01:10:45 +03:00
if filename == DEF :
2023-12-22 17:35:14 +03:00
return filename
manifest_dir = os . path . dirname ( manifest )
i = 0
j = min ( len ( manifest_dir ) , len ( path ) )
while i < j and manifest_dir [ i ] == path [ i ] :
i + = 1
if i < len ( manifest_dir ) :
for _ in range ( manifest_dir . count ( " / " , i ) + 1 ) :
filename = " ../ " + filename
elif i < len ( path ) :
filename = path [ i + 1 : ]
return filename
def get_push_id ( self , revision , repo ) :
""" Return the push_id for revision and repo (or None) """
2024-01-12 18:56:31 +03:00
self . vinfo ( f " Retrieving push_id for { repo } revision: { revision } ... " )
2023-12-22 17:35:14 +03:00
if revision in self . push_ids : # if cached
push_id = self . push_ids [ revision ]
else :
push_id = None
push_url = f " https://treeherder.mozilla.org/api/project/ { repo } /push/ "
params = { }
params [ " full " ] = " true "
params [ " count " ] = 10
params [ " revision " ] = revision
r = requests . get ( push_url , headers = self . headers , params = params )
if r . status_code != 200 :
self . warning ( f " FAILED to query Treeherder = { r } for { r . url } " )
else :
response = r . json ( )
if " results " in response :
results = response [ " results " ]
if len ( results ) > 0 :
r0 = results [ 0 ]
if " id " in r0 :
push_id = r0 [ " id " ]
self . push_ids [ revision ] = push_id
return push_id
def get_job_id ( self , push_id , task_id ) :
""" Return the job_id for push_id, task_id (or None) """
2024-01-12 18:56:31 +03:00
self . vinfo ( f " Retrieving job_id for push_id: { push_id } , task_id: { task_id } ... " )
2024-03-18 21:13:55 +03:00
k = f " { push_id } : { task_id } "
if k in self . job_ids : # if cached
job_id = self . job_ids [ k ]
2023-12-22 17:35:14 +03:00
else :
job_id = None
params = { }
params [ " push_id " ] = push_id
r = requests . get ( self . jobs_url , headers = self . headers , params = params )
if r . status_code != 200 :
self . warning ( f " FAILED to query Treeherder = { r } for { r . url } " )
else :
response = r . json ( )
if " results " in response :
results = response [ " results " ]
if len ( results ) > 0 :
for result in results :
if len ( result ) > 14 :
if result [ 14 ] == task_id :
job_id = result [ 1 ]
break
2024-03-18 21:13:55 +03:00
self . job_ids [ k ] = job_id
2023-12-22 17:35:14 +03:00
return job_id
2024-03-18 21:13:55 +03:00
def get_bug_suggestions ( self , repo , job_id , path , anyjs = None ) :
2023-12-22 17:35:14 +03:00
"""
Return the ( suggestions_url , line_number , line , log_url )
for the given repo and job_id
"""
2024-01-12 18:56:31 +03:00
self . vinfo (
2023-12-22 17:35:14 +03:00
f " Retrieving bug_suggestions for { repo } job_id: { job_id } , path: { path } ... "
)
suggestions_url = f " https://treeherder.mozilla.org/api/project/ { repo } /jobs/ { job_id } /bug_suggestions/ "
line_number = None
line = None
log_url = None
r = requests . get ( suggestions_url , headers = self . headers )
if r . status_code != 200 :
self . warning ( f " FAILED to query Treeherder = { r } for { r . url } " )
else :
2024-03-18 21:13:55 +03:00
if anyjs is not None :
pathdir = os . path . dirname ( path ) + " / "
paths = [ pathdir + f for f in anyjs . keys ( ) ]
else :
paths = [ path ]
2023-12-22 17:35:14 +03:00
response = r . json ( )
if len ( response ) > 0 :
for sugg in response :
2024-03-18 21:13:55 +03:00
for p in paths :
path_end = sugg . get ( " path_end " , None )
# handles WPT short paths
if path_end is not None and p . endswith ( path_end ) :
line_number = sugg [ " line_number " ] + 1
line = sugg [ " search " ]
log_url = f " https://treeherder.mozilla.org/logviewer?repo= { repo } &job_id= { job_id } &lineNumber= { line_number } "
break
2023-12-22 17:35:14 +03:00
rv = ( suggestions_url , line_number , line , log_url )
return rv
def read_json ( self , filename ) :
""" read data as JSON from filename """
2024-03-18 21:13:55 +03:00
2023-12-22 17:35:14 +03:00
fp = io . open ( filename , " r " , encoding = " utf-8 " )
data = json . load ( fp )
fp . close ( )
return data
2024-03-18 21:13:55 +03:00
def read_tasks ( self , filename ) :
""" read tasks as JSON from filename """
if not os . path . exists ( filename ) :
msg = f " use-tasks JSON file does not exist: { filename } "
raise OSError ( 2 , msg , filename )
tasks = self . read_json ( filename )
tasks = [ Mock ( task , MOCK_TASK_DEFAULTS , MOCK_TASK_INITS ) for task in tasks ]
for task in tasks :
if len ( task . extra ) > 0 : # pre-warm cache for extra information
self . extras [ task . id ] = task . extra
return tasks
def read_failures ( self , filename ) :
""" read failures as JSON from filename """
if not os . path . exists ( filename ) :
msg = f " use-failures JSON file does not exist: { filename } "
raise OSError ( 2 , msg , filename )
failures = self . read_json ( filename )
return failures
def read_bugs ( self , filename ) :
""" read bugs as JSON from filename """
if not os . path . exists ( filename ) :
msg = f " bugs JSON file does not exist: { filename } "
raise OSError ( 2 , msg , filename )
bugs = self . read_json ( filename )
bugs = [ Mock ( bug , MOCK_BUG_DEFAULTS ) for bug in bugs ]
return bugs
2023-12-22 17:35:14 +03:00
def write_json ( self , filename , data ) :
""" saves data as JSON to filename """
fp = io . open ( filename , " w " , encoding = " utf-8 " )
json . dump ( data , fp , indent = 2 , sort_keys = True )
fp . close ( )
def write_tasks ( self , save_tasks , tasks ) :
""" saves tasks as JSON to save_tasks """
jtasks = [ ]
for task in tasks :
if not isinstance ( task , TestTask ) :
continue
jtask = { }
jtask [ " id " ] = task . id
jtask [ " label " ] = task . label
jtask [ " duration " ] = task . duration
jtask [ " result " ] = task . result
jtask [ " state " ] = task . state
2024-01-27 01:10:45 +03:00
jtask [ " extra " ] = self . get_extra ( task . id )
2023-12-22 17:35:14 +03:00
jtags = { }
for k , v in task . tags . items ( ) :
if k == " createdForUser " :
jtags [ k ] = " ci@mozilla.com "
else :
jtags [ k ] = v
jtask [ " tags " ] = jtags
jtask [ " tier " ] = task . tier
jtask [ " results " ] = [
{ " group " : r . group , " ok " : r . ok , " duration " : r . duration }
for r in task . results
]
jtask [ " errors " ] = None # Bug with task.errors property??
jft = { }
2024-07-11 20:35:45 +03:00
if self . failure_types is not None and task . id in self . failure_types :
failure_types = self . failure_types [ task . id ] # use cache
else :
failure_types = task . failure_types
for k in failure_types :
2023-12-22 17:35:14 +03:00
jft [ k ] = [ [ f [ 0 ] , f [ 1 ] . value ] for f in task . failure_types [ k ] ]
jtask [ " failure_types " ] = jft
jtasks . append ( jtask )
self . write_json ( save_tasks , jtasks )
2024-01-12 18:56:31 +03:00
def label_to_platform_testname ( self , label ) :
""" convert from label to platform, testname for mach command line """
platform = None
testname = None
platform_details = label . split ( " / " )
if len ( platform_details ) == 2 :
platform , details = platform_details
words = details . split ( " - " )
if len ( words ) > 2 :
platform + = " / " + words . pop ( 0 ) # opt or debug
try :
_chunk = int ( words [ - 1 ] )
words . pop ( )
except ValueError :
pass
words . pop ( ) # remove test suffix
testname = " - " . join ( words )
else :
platform = None
return platform , testname
def add_attachment_log_for_task ( self , bugid , task_id ) :
""" Adds compressed log for this task to bugid """
log_url = f " https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/ { task_id } /artifacts/public/logs/live_backing.log "
r = requests . get ( log_url , headers = self . headers )
if r . status_code != 200 :
2024-07-11 20:35:45 +03:00
self . error ( f " Unable to get log for task: { task_id } " )
2024-01-12 18:56:31 +03:00
return
attach_fp = tempfile . NamedTemporaryFile ( )
fp = gzip . open ( attach_fp , " wb " )
fp . write ( r . text . encode ( " utf-8 " ) )
fp . close ( )
2024-03-18 21:13:55 +03:00
if self . _initialize_bzapi ( ) :
description = ATTACHMENT_DESCRIPTION + task_id
file_name = TASK_LOG + " .gz "
comment = " Added compressed log "
content_type = " application/gzip "
try :
self . _bzapi . attachfile (
[ bugid ] ,
attach_fp . name ,
description ,
file_name = file_name ,
comment = comment ,
content_type = content_type ,
is_private = False ,
)
except Fault :
pass # Fault expected: Failed to fetch key 9372091 from network storage: The specified key does not exist.
def get_wpt_path_meta ( self , shortpath ) :
if shortpath . startswith ( WPT0 ) :
path = shortpath
meta = shortpath . replace ( WPT0 , WPT_META0 , 1 )
elif shortpath . startswith ( WPT1 ) :
path = shortpath
meta = shortpath . replace ( WPT1 , WPT_META1 , 1 )
elif shortpath . startswith ( WPT2 ) :
path = shortpath
meta = shortpath . replace ( WPT2 , WPT_META2 , 1 )
elif shortpath . startswith ( WPT_MOZILLA ) :
shortpath = shortpath [ len ( WPT_MOZILLA ) : ]
path = WPT2 + shortpath
meta = WPT_META2 + shortpath
else :
path = WPT1 + shortpath
meta = WPT_META1 + shortpath
return ( path , meta )
def wpt_paths ( self , shortpath ) :
"""
Analyzes the WPT short path for a test and returns
( path , manifest , query , anyjs ) where
path is the relative path to the test file
manifest is the relative path to the file metadata
query is the test file query paramters ( or None )
anyjs is the html test file as reported by mozci ( or None )
"""
query = None
anyjs = None
i = shortpath . find ( " ? " )
if i > 0 :
query = shortpath [ i : ]
shortpath = shortpath [ 0 : i ]
path , manifest = self . get_wpt_path_meta ( shortpath )
failure_type = not self . isdir ( path )
if failure_type :
i = path . find ( " .any. " )
if i > 0 :
anyjs = path # orig path
manifest = manifest . replace ( path [ i : ] , " .any.js " )
path = path [ 0 : i ] + " .any.js "
else :
i = path . find ( " .window. " )
if i > 0 :
anyjs = path # orig path
manifest = manifest . replace ( path [ i : ] , " .window.js " )
path = path [ 0 : i ] + " .window.js "
else :
i = path . find ( " .worker. " )
if i > 0 :
anyjs = path # orig path
manifest = manifest . replace ( path [ i : ] , " .worker.js " )
path = path [ 0 : i ] + " .worker.js "
manifest + = " .ini "
manifest_classic = " "
if manifest . startswith ( WPT_META0 ) :
manifest_classic = manifest . replace ( WPT_META0 , WPT_META0_CLASSIC , 1 )
if self . exists ( manifest_classic ) :
if self . exists ( manifest ) :
self . warning (
f " Both classic { manifest_classic } and metadata { manifest } manifests exist "
)
else :
self . warning (
f " Using the classic { manifest_classic } manifest as the metadata manifest { manifest } does not exist "
)
manifest = manifest_classic
if not self . exists ( path ) :
return ( None , None , None , None )
return ( path , manifest , query , anyjs )
def wpt_add_skip_if ( self , manifest_str , anyjs , skip_if , bug_reference ) :
"""
Edits a WPT manifest string to add disabled condition
anyjs is a dictionary mapping from filename and any alternate basenames to
2024-07-11 20:35:45 +03:00
a boolean ( indicating if the file has been handled in the manifest ) .
Returns additional_comment ( if any )
2024-03-18 21:13:55 +03:00
"""
2024-07-11 20:35:45 +03:00
additional_comment = " "
2024-03-18 21:13:55 +03:00
disabled_key = False
disabled = " disabled: "
condition_start = " if "
condition = condition_start + skip_if + " : " + bug_reference
lines = manifest_str . splitlines ( )
section = None # name of the section
i = 0
n = len ( lines )
while i < n :
line = lines [ i ]
if line . startswith ( " [ " ) :
if section is not None and not anyjs [ section ] : # not yet handled
if not disabled_key :
lines . insert ( i , disabled )
i + = 1
lines . insert ( i , condition )
lines . insert ( i + 1 , " " ) # blank line after condition
i + = 2
n + = 2
anyjs [ section ] = True
section = line [ 1 : - 1 ]
if section in anyjs and not anyjs [ section ] :
disabled_key = False
else :
section = None # ignore section we are not interested in
elif section is not None :
if line == disabled :
disabled_key = True
elif line . startswith ( " [ " ) :
if i > 0 and i - 1 < n and lines [ i - 1 ] == " " :
del lines [ i - 1 ]
i - = 1
n - = 1
if not disabled_key :
lines . insert ( i , disabled )
i + = 1
n + = 1
lines . insert ( i , condition )
lines . insert ( i + 1 , " " ) # blank line after condition
i + = 2
n + = 2
anyjs [ section ] = True
section = None
elif line . startswith ( " " ) and not line . startswith ( " " ) :
if disabled_key : # insert condition above new key
lines . insert ( i , condition )
i + = 1
n + = 1
anyjs [ section ] = True
section = None
disabled_key = False
elif line . startswith ( " " ) :
if disabled_key and line == condition :
anyjs [ section ] = True # condition already present
section = None
i + = 1
if section is not None and not anyjs [ section ] : # not yet handled
if i > 0 and i - 1 < n and lines [ i - 1 ] == " " :
del lines [ i - 1 ]
if not disabled_key :
lines . append ( disabled )
i + = 1
n + = 1
lines . append ( condition )
lines . append ( " " ) # blank line after condition
i + = 2
n + = 2
anyjs [ section ] = True
for section in anyjs :
if not anyjs [ section ] :
if i > 0 and i - 1 < n and lines [ i - 1 ] != " " :
lines . append ( " " ) # blank line before condition
i + = 1
n + = 1
lines . append ( " [ " + section + " ] " )
lines . append ( disabled )
lines . append ( condition )
lines . append ( " " ) # blank line after condition
i + = 4
n + = 4
manifest_str = " \n " . join ( lines ) + " \n "
2024-07-11 20:35:45 +03:00
return manifest_str , additional_comment
def reftest_add_fuzzy_if (
self ,
manifest_str ,
filename ,
fuzzy_if ,
differences ,
pixels ,
lineno ,
zero ,
bug_reference ,
) :
"""
Edits a reftest manifest string to add disabled condition
"""
if self . lmp is None :
from parse_reftest import ListManifestParser
self . lmp = ListManifestParser (
self . implicit_vars , self . verbose , self . error , self . warning , self . info
)
manifest_str , additional_comment = self . lmp . reftest_add_fuzzy_if (
manifest_str ,
filename ,
fuzzy_if ,
differences ,
pixels ,
lineno ,
zero ,
bug_reference ,
)
return manifest_str , additional_comment
def get_lineno_difference_pixels_status ( self , task_id , manifest , allmods ) :
"""
Returns
- lineno in manifest
- image comparison , max * difference *
- number of differing * pixels *
- status ( PASS or FAIL )
as cached from reftest_errorsummary . log for a task
"""
manifest_obj = self . error_summary . get ( manifest , { } )
allmods_obj = manifest_obj . get ( allmods , { } )
lineno = allmods_obj . get ( LINENO , 0 )
runs_obj = allmods_obj . get ( RUNS , { } )
task_obj = runs_obj . get ( task_id , { } )
difference = task_obj . get ( DIFFERENCE , 0 )
pixels = task_obj . get ( PIXELS , 0 )
status = task_obj . get ( STATUS , FAIL )
return lineno , difference , pixels , status
def reftest_find_lineno ( self , manifest , modifiers , allmods ) :
"""
Return the line number with modifiers in manifest ( else 0 )
"""
lineno = 0
mods = [ ]
prefs = [ ]
for i in range ( len ( modifiers ) ) :
if modifiers [ i ] . find ( " pref( " ) > = 0 or modifiers [ i ] . find ( " skip-if( " ) > = 0 :
prefs . append ( modifiers [ i ] )
else :
mods . append ( modifiers [ i ] )
m = len ( mods )
manifest_str = io . open ( manifest , " r " , encoding = " utf-8 " ) . read ( )
lines = manifest_str . splitlines ( )
defaults = [ ]
found = False
alt_lineno = 0
for linenum in range ( len ( lines ) ) :
line = lines [ linenum ]
if len ( line ) > 0 and line [ 0 ] == " # " :
continue
comment_start = line . find ( " # " ) # MUST NOT match anchors!
if comment_start > 0 :
line = line [ 0 : comment_start ] . strip ( )
words = line . split ( )
n = len ( words )
if n > 1 and words [ 0 ] == " defaults " :
defaults = words [ 1 : ] . copy ( )
continue
line_defaults = defaults . copy ( )
i = 0
while i < n :
if words [ i ] . find ( " pref( " ) > = 0 or words [ i ] . find ( " skip-if( " ) > = 0 :
line_defaults . append ( words [ i ] )
del words [ i ]
n - = 1
else :
i + = 1
if ( len ( prefs ) == 0 or prefs == line_defaults ) and words == mods :
found = True
lineno = linenum + 1
break
elif m > 2 and n > 2 :
if words [ - 3 : ] == mods [ - 3 : ] :
alt_lineno = linenum + 1
else :
bwords = [ os . path . basename ( f ) for f in words [ - 2 : ] ]
bmods = [ os . path . basename ( f ) for f in mods [ - 2 : ] ]
if bwords == bmods :
alt_lineno = linenum + 1
if not found :
if alt_lineno > 0 :
lineno = alt_lineno
self . warning (
f " manifest ' { manifest } ' found lineno: { lineno } , but it does not contain all the prefs from modifiers, \n SEARCH: { allmods } \n FOUND : { lines [ alt_lineno - 1 ] } "
)
else :
lineno = 0
self . error (
f " manifest ' { manifest } ' does not contain line with modifiers: { allmods } "
)
return lineno
def get_allpaths ( self , task_id , manifest , path ) :
"""
Looks up the reftest_errorsummary . log for a task
and caches the details in self . error_summary by
task_id , manifest , allmods
where allmods is the concatenation of all modifiers
and the details include
- image comparison , max * difference *
- number of differing * pixels *
- status : unexpected PASS or FAIL
The list iof unique modifiers ( allmods ) for the given path are returned
"""
allpaths = [ ]
words = path . split ( )
if len ( words ) != 3 or words [ 1 ] not in TEST_TYPES :
self . warning (
f " reftest_errorsummary.log for task: { task_id } has unsupported test type ' { path } ' "
)
return allpaths
if manifest in self . error_summary :
for allmods in self . error_summary [ manifest ] :
if self . error_summary [ manifest ] [ allmods ] [
TEST
] == path and task_id in self . error_summary [ manifest ] [ allmods ] . get (
RUNS , { }
) :
allpaths . append ( allmods )
if len ( allpaths ) > 0 :
return allpaths # cached (including self tests)
error_url = f " https://firefox-ci-tc.services.mozilla.com/api/queue/v1/task/ { task_id } /artifacts/public/test_info/reftest_errorsummary.log "
self . vinfo ( f " Requesting reftest_errorsummary.log for task: { task_id } " )
r = requests . get ( error_url , headers = self . headers )
if r . status_code != 200 :
self . error ( f " Unable to get reftest_errorsummary.log for task: { task_id } " )
return allpaths
for line in r . text . encode ( " utf-8 " ) . splitlines ( ) :
summary = json . loads ( line )
group = summary . get ( GROUP , " " )
if not group or not os . path . exists ( group ) : # not error line
continue
test = summary . get ( TEST , None )
if test is None :
continue
if not MODIFIERS in summary :
self . warning (
f " reftest_errorsummary.log for task: { task_id } does not have modifiers for ' { test } ' "
)
continue
words = test . split ( )
if len ( words ) != 3 or words [ 1 ] not in TEST_TYPES :
self . warning (
f " reftest_errorsummary.log for task: { task_id } has unsupported test ' { test } ' "
)
continue
status = summary . get ( STATUS , " " )
if status not in [ FAIL , PASS ] :
self . warning (
f " reftest_errorsummary.log for task: { task_id } has unknown status: { status } for ' { test } ' "
)
continue
error = summary . get ( SUBTEST , " " )
mods = summary [ MODIFIERS ]
allmods = " " . join ( mods )
if group not in self . error_summary :
self . error_summary [ group ] = { }
if allmods not in self . error_summary [ group ] :
self . error_summary [ group ] [ allmods ] = { }
self . error_summary [ group ] [ allmods ] [ TEST ] = test
lineno = self . error_summary [ group ] [ allmods ] . get ( LINENO , 0 )
if lineno == 0 :
lineno = self . reftest_find_lineno ( group , mods , allmods )
if lineno > 0 :
self . error_summary [ group ] [ allmods ] [ LINENO ] = lineno
if RUNS not in self . error_summary [ group ] [ allmods ] :
self . error_summary [ group ] [ allmods ] [ RUNS ] = { }
if task_id not in self . error_summary [ group ] [ allmods ] [ RUNS ] :
self . error_summary [ group ] [ allmods ] [ RUNS ] [ task_id ] = { }
self . error_summary [ group ] [ allmods ] [ RUNS ] [ task_id ] [ ERROR ] = error
if self . _subtest_rx is None :
self . _subtest_rx = re . compile ( SUBTEST_REGEX )
m = self . _subtest_rx . findall ( error )
if len ( m ) == 1 :
difference = int ( m [ 0 ] [ 0 ] )
pixels = int ( m [ 0 ] [ 1 ] )
else :
difference = 0
pixels = 0
if difference > 0 :
self . error_summary [ group ] [ allmods ] [ RUNS ] [ task_id ] [
DIFFERENCE
] = difference
if pixels > 0 :
self . error_summary [ group ] [ allmods ] [ RUNS ] [ task_id ] [ PIXELS ] = pixels
if status != FAIL :
self . error_summary [ group ] [ allmods ] [ RUNS ] [ task_id ] [ STATUS ] = status
if test == path :
allpaths . append ( allmods )
return allpaths