зеркало из https://github.com/mozilla/gecko-dev.git
Backed out 2 changesets (bug 1427877
) for causing Thunderbird xpcshell-bustages. a=backout
MANUAL PUSH: Approval from sheriffs via IRC. Immediate fix for massive xpcshell failures. Backed out changeset 6b90caa175cb (bug1427877
) Backed out changeset bf126cb4b08f (bug1427877
) --HG-- extra : amend_source : 0ff7d8fd28b27380cb21ae41ff1df5339759863b
This commit is contained in:
Родитель
807bd8d142
Коммит
605749ff64
|
@ -6806,6 +6806,33 @@
|
|||
"bug_numbers": [1254099],
|
||||
"description": "Generic histogram to track uptake of remote content like blocklists, settings or updates."
|
||||
},
|
||||
"THUNDERBIRD_GLODA_SIZE_MB": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
"expires_in_version": "never",
|
||||
"kind": "linear",
|
||||
"high": 1000,
|
||||
"n_buckets": 40,
|
||||
"description": "Gloda: size of global-messages-db.sqlite (MB)"
|
||||
},
|
||||
"THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
"expires_in_version": "never",
|
||||
"kind": "exponential",
|
||||
"high": 10000,
|
||||
"n_buckets": 30,
|
||||
"description": "Conversations: time between the moment we click and the second gloda query returns (ms)"
|
||||
},
|
||||
"THUNDERBIRD_INDEXING_RATE_MSG_PER_S": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
"expires_in_version": "never",
|
||||
"kind": "linear",
|
||||
"high": 100,
|
||||
"n_buckets": 20,
|
||||
"description": "Gloda: indexing rate (message/s)"
|
||||
},
|
||||
"FX_GESTURE_INSTALL_SNAPSHOT_OF_PAGE": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
|
|
|
@ -138,14 +138,13 @@ def generate_JSON_definitions(output, *filenames):
|
|||
We only support a single file.
|
||||
"""
|
||||
# Load the event data.
|
||||
events = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_events.load_events(filename, True)
|
||||
events.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
try:
|
||||
events = parse_events.load_events(filenames[0], True)
|
||||
except ParserError as ex:
|
||||
print("\nError processing events:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
event_definitions = OrderedDict()
|
||||
for event in events:
|
||||
|
@ -171,14 +170,13 @@ def generate_JSON_definitions(output, *filenames):
|
|||
|
||||
def main(output, *filenames):
|
||||
# Load the event data.
|
||||
events = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_events.load_events(filename, True)
|
||||
events.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
try:
|
||||
events = parse_events.load_events(filenames[0], True)
|
||||
except ParserError as ex:
|
||||
print("\nError processing events:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
# Write the scalar data file.
|
||||
print(banner, file=output)
|
||||
|
|
|
@ -34,14 +34,14 @@ file_footer = """\
|
|||
|
||||
def main(output, *filenames):
|
||||
# Load the events first.
|
||||
events = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_events.load_events(filename, True)
|
||||
events.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
|
||||
try:
|
||||
events = parse_events.load_events(filenames[0], True)
|
||||
except ParserError as ex:
|
||||
print("\nError processing events:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
grouped = dict()
|
||||
index = 0
|
||||
|
|
|
@ -109,15 +109,14 @@ def write_scalar_tables(scalars, output):
|
|||
|
||||
|
||||
def parse_scalar_definitions(filenames):
|
||||
scalars = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_scalars.load_scalars(filename)
|
||||
scalars.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return scalars
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
|
||||
try:
|
||||
return parse_scalars.load_scalars(filenames[0])
|
||||
except ParserError as ex:
|
||||
print("\nError processing scalars:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def generate_JSON_definitions(output, *filenames):
|
||||
|
|
|
@ -35,14 +35,14 @@ file_footer = """\
|
|||
|
||||
def main(output, *filenames):
|
||||
# Load the scalars first.
|
||||
scalars = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_scalars.load_scalars(filename)
|
||||
scalars.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
|
||||
try:
|
||||
scalars = parse_scalars.load_scalars(filenames[0])
|
||||
except ParserError as ex:
|
||||
print("\nError processing scalars:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
# Write the enum file.
|
||||
print(banner, file=output)
|
||||
|
|
|
@ -691,7 +691,7 @@ def load_histograms_into_dict(ordered_pairs, strict_type_checks):
|
|||
# just Histograms.json. For each file's basename, we have a specific
|
||||
# routine to parse that file, and return a dictionary mapping histogram
|
||||
# names to histogram parameters.
|
||||
def from_json(filename, strict_type_checks):
|
||||
def from_Histograms_json(filename, strict_type_checks):
|
||||
with open(filename, 'r') as f:
|
||||
try:
|
||||
def hook(ps):
|
||||
|
@ -731,17 +731,17 @@ def from_nsDeprecatedOperationList(filename, strict_type_checks):
|
|||
return histograms
|
||||
|
||||
|
||||
FILENAME_PARSERS = [
|
||||
(lambda x: from_json if x.endswith('.json') else None),
|
||||
(lambda x: from_nsDeprecatedOperationList if x == 'nsDeprecatedOperationList.h' else None),
|
||||
]
|
||||
FILENAME_PARSERS = {
|
||||
'Histograms.json': from_Histograms_json,
|
||||
'nsDeprecatedOperationList.h': from_nsDeprecatedOperationList,
|
||||
}
|
||||
|
||||
# Similarly to the dance above with buildconfig, usecounters may not be
|
||||
# available, so handle that gracefully.
|
||||
try:
|
||||
import usecounters
|
||||
|
||||
FILENAME_PARSERS.append(lambda x: from_UseCounters_conf if x == 'UseCounters.conf' else None)
|
||||
FILENAME_PARSERS['UseCounters.conf'] = from_UseCounters_conf
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -755,15 +755,7 @@ the histograms defined in filenames.
|
|||
|
||||
all_histograms = OrderedDict()
|
||||
for filename in filenames:
|
||||
parser = None
|
||||
for checkFn in FILENAME_PARSERS:
|
||||
parser = checkFn(os.path.basename(filename))
|
||||
if parser is not None:
|
||||
break
|
||||
|
||||
if parser is None:
|
||||
ParserError("Don't know how to parse %s." % filename).handle_now()
|
||||
|
||||
parser = FILENAME_PARSERS[os.path.basename(filename)]
|
||||
histograms = parser(filename, strict_type_checks)
|
||||
|
||||
# OrderedDicts are important, because then the iteration order over
|
||||
|
|
|
@ -29,7 +29,6 @@ SUPPORTED_PRODUCTS = {
|
|||
'fennec': 'Fennec',
|
||||
'geckoview': 'Geckoview',
|
||||
'geckoview_streaming': 'GeckoviewStreaming',
|
||||
'thunderbird': 'Thunderbird',
|
||||
}
|
||||
|
||||
SUPPORTED_OPERATING_SYSTEMS = [
|
||||
|
|
|
@ -188,8 +188,6 @@ SupportedProduct GetCurrentProduct() {
|
|||
} else {
|
||||
return SupportedProduct::Fennec;
|
||||
}
|
||||
#elif defined(MOZ_THUNDERBIRD)
|
||||
return SupportedProduct::Thunderbird;
|
||||
#else
|
||||
return SupportedProduct::Firefox;
|
||||
#endif
|
||||
|
|
|
@ -39,7 +39,6 @@ enum class SupportedProduct : uint8_t {
|
|||
Fennec = (1 << 1),
|
||||
Geckoview = (1 << 2),
|
||||
GeckoviewStreaming = (1 << 3),
|
||||
Thunderbird = (1 << 4),
|
||||
};
|
||||
MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(SupportedProduct);
|
||||
|
||||
|
|
|
@ -402,6 +402,9 @@
|
|||
"SYSTEM_FONT_FALLBACK_SCRIPT",
|
||||
"TAP_TO_LOAD_ENABLED",
|
||||
"TAP_TO_LOAD_IMAGE_SIZE",
|
||||
"THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS",
|
||||
"THUNDERBIRD_GLODA_SIZE_MB",
|
||||
"THUNDERBIRD_INDEXING_RATE_MSG_PER_S",
|
||||
"TLS_ERROR_REPORT_UI",
|
||||
"TRANSLATED_CHARACTERS",
|
||||
"TRANSLATED_PAGES",
|
||||
|
@ -1036,6 +1039,9 @@
|
|||
"TELEMETRY_TEST_KEYED_RELEASE_OPTOUT",
|
||||
"TELEMETRY_TEST_RELEASE_OPTIN",
|
||||
"TELEMETRY_TEST_RELEASE_OPTOUT",
|
||||
"THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS",
|
||||
"THUNDERBIRD_GLODA_SIZE_MB",
|
||||
"THUNDERBIRD_INDEXING_RATE_MSG_PER_S",
|
||||
"TLS_ERROR_REPORT_UI",
|
||||
"TOTAL_CONTENT_PAGE_LOAD_TIME",
|
||||
"TOTAL_COUNT_HIGH_ERRORS",
|
||||
|
|
|
@ -158,8 +158,6 @@ histogram_files = [
|
|||
'/dom/base/UseCounters.conf',
|
||||
'/dom/base/nsDeprecatedOperationList.h',
|
||||
]
|
||||
if CONFIG['MOZ_TELEMETRY_EXTRA_HISTOGRAM_FILES']:
|
||||
histogram_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_HISTOGRAM_FILES'])
|
||||
|
||||
data = GENERATED_FILES['TelemetryHistogramData.inc']
|
||||
data.script = 'build_scripts/gen_histogram_data.py'
|
||||
|
@ -177,8 +175,6 @@ data.inputs = histogram_files
|
|||
scalar_files = [
|
||||
'Scalars.yaml',
|
||||
]
|
||||
if CONFIG['MOZ_TELEMETRY_EXTRA_SCALAR_FILES']:
|
||||
scalar_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_SCALAR_FILES'])
|
||||
|
||||
scalar_data = GENERATED_FILES['TelemetryScalarData.h']
|
||||
scalar_data.script = 'build_scripts/gen_scalar_data.py'
|
||||
|
@ -201,8 +197,6 @@ FINAL_TARGET_FILES += ['!ScalarArtifactDefinitions.json']
|
|||
event_files = [
|
||||
'Events.yaml',
|
||||
]
|
||||
if CONFIG['MOZ_TELEMETRY_EXTRA_EVENT_FILES']:
|
||||
event_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_EVENT_FILES'])
|
||||
|
||||
event_data = GENERATED_FILES['TelemetryEventData.h']
|
||||
event_data.script = 'build_scripts/gen_event_data.py'
|
||||
|
|
Загрузка…
Ссылка в новой задаче