зеркало из https://github.com/mozilla/gecko-dev.git
Merge inbound to mozilla-central. a=merge
This commit is contained in:
Коммит
34cb8d0a2a
|
@ -4,3 +4,4 @@
|
|||
if (os == "win") and debug and webrender: FAIL
|
||||
if (os == "win") and debug and not webrender and (processor == "x86_64"): [PASS, FAIL]
|
||||
if (os == "win") and (processor == "x86_64"): FAIL
|
||||
if (os == "win") and not debug: [PASS, FAIL]
|
||||
|
|
|
@ -6801,33 +6801,6 @@
|
|||
"bug_numbers": [1254099],
|
||||
"description": "Generic histogram to track uptake of remote content like blocklists, settings or updates."
|
||||
},
|
||||
"THUNDERBIRD_GLODA_SIZE_MB": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
"expires_in_version": "never",
|
||||
"kind": "linear",
|
||||
"high": 1000,
|
||||
"n_buckets": 40,
|
||||
"description": "Gloda: size of global-messages-db.sqlite (MB)"
|
||||
},
|
||||
"THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
"expires_in_version": "never",
|
||||
"kind": "exponential",
|
||||
"high": 10000,
|
||||
"n_buckets": 30,
|
||||
"description": "Conversations: time between the moment we click and the second gloda query returns (ms)"
|
||||
},
|
||||
"THUNDERBIRD_INDEXING_RATE_MSG_PER_S": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
"expires_in_version": "never",
|
||||
"kind": "linear",
|
||||
"high": 100,
|
||||
"n_buckets": 20,
|
||||
"description": "Gloda: indexing rate (message/s)"
|
||||
},
|
||||
"FX_GESTURE_INSTALL_SNAPSHOT_OF_PAGE": {
|
||||
"record_in_processes": ["main", "content"],
|
||||
"products": ["firefox", "fennec", "geckoview"],
|
||||
|
|
|
@ -138,13 +138,14 @@ def generate_JSON_definitions(output, *filenames):
|
|||
We only support a single file.
|
||||
"""
|
||||
# Load the event data.
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
try:
|
||||
events = parse_events.load_events(filenames[0], True)
|
||||
except ParserError as ex:
|
||||
print("\nError processing events:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
events = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_events.load_events(filename, True)
|
||||
events.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
event_definitions = OrderedDict()
|
||||
for event in events:
|
||||
|
@ -170,13 +171,14 @@ def generate_JSON_definitions(output, *filenames):
|
|||
|
||||
def main(output, *filenames):
|
||||
# Load the event data.
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
try:
|
||||
events = parse_events.load_events(filenames[0], True)
|
||||
except ParserError as ex:
|
||||
print("\nError processing events:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
events = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_events.load_events(filename, True)
|
||||
events.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Write the scalar data file.
|
||||
print(banner, file=output)
|
||||
|
|
|
@ -34,14 +34,14 @@ file_footer = """\
|
|||
|
||||
def main(output, *filenames):
|
||||
# Load the events first.
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
|
||||
try:
|
||||
events = parse_events.load_events(filenames[0], True)
|
||||
except ParserError as ex:
|
||||
print("\nError processing events:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
events = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_events.load_events(filename, True)
|
||||
events.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
grouped = dict()
|
||||
index = 0
|
||||
|
|
|
@ -109,14 +109,15 @@ def write_scalar_tables(scalars, output):
|
|||
|
||||
|
||||
def parse_scalar_definitions(filenames):
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
|
||||
try:
|
||||
return parse_scalars.load_scalars(filenames[0])
|
||||
except ParserError as ex:
|
||||
print("\nError processing scalars:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
scalars = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_scalars.load_scalars(filename)
|
||||
scalars.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return scalars
|
||||
|
||||
|
||||
def generate_JSON_definitions(output, *filenames):
|
||||
|
|
|
@ -35,14 +35,14 @@ file_footer = """\
|
|||
|
||||
def main(output, *filenames):
|
||||
# Load the scalars first.
|
||||
if len(filenames) > 1:
|
||||
raise Exception('We don\'t support loading from more than one file.')
|
||||
|
||||
try:
|
||||
scalars = parse_scalars.load_scalars(filenames[0])
|
||||
except ParserError as ex:
|
||||
print("\nError processing scalars:\n" + str(ex) + "\n")
|
||||
sys.exit(1)
|
||||
scalars = []
|
||||
for filename in filenames:
|
||||
try:
|
||||
batch = parse_scalars.load_scalars(filename)
|
||||
scalars.extend(batch)
|
||||
except ParserError as ex:
|
||||
print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Write the enum file.
|
||||
print(banner, file=output)
|
||||
|
|
|
@ -691,7 +691,7 @@ def load_histograms_into_dict(ordered_pairs, strict_type_checks):
|
|||
# just Histograms.json. For each file's basename, we have a specific
|
||||
# routine to parse that file, and return a dictionary mapping histogram
|
||||
# names to histogram parameters.
|
||||
def from_Histograms_json(filename, strict_type_checks):
|
||||
def from_json(filename, strict_type_checks):
|
||||
with open(filename, 'r') as f:
|
||||
try:
|
||||
def hook(ps):
|
||||
|
@ -731,17 +731,17 @@ def from_nsDeprecatedOperationList(filename, strict_type_checks):
|
|||
return histograms
|
||||
|
||||
|
||||
FILENAME_PARSERS = {
|
||||
'Histograms.json': from_Histograms_json,
|
||||
'nsDeprecatedOperationList.h': from_nsDeprecatedOperationList,
|
||||
}
|
||||
FILENAME_PARSERS = [
|
||||
(lambda x: from_json if x.endswith('.json') else None),
|
||||
(lambda x: from_nsDeprecatedOperationList if x == 'nsDeprecatedOperationList.h' else None),
|
||||
]
|
||||
|
||||
# Similarly to the dance above with buildconfig, usecounters may not be
|
||||
# available, so handle that gracefully.
|
||||
try:
|
||||
import usecounters
|
||||
|
||||
FILENAME_PARSERS['UseCounters.conf'] = from_UseCounters_conf
|
||||
FILENAME_PARSERS.append(lambda x: from_UseCounters_conf if x == 'UseCounters.conf' else None)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -755,7 +755,15 @@ the histograms defined in filenames.
|
|||
|
||||
all_histograms = OrderedDict()
|
||||
for filename in filenames:
|
||||
parser = FILENAME_PARSERS[os.path.basename(filename)]
|
||||
parser = None
|
||||
for checkFn in FILENAME_PARSERS:
|
||||
parser = checkFn(os.path.basename(filename))
|
||||
if parser is not None:
|
||||
break
|
||||
|
||||
if parser is None:
|
||||
ParserError("Don't know how to parse %s." % filename).handle_now()
|
||||
|
||||
histograms = parser(filename, strict_type_checks)
|
||||
|
||||
# OrderedDicts are important, because then the iteration order over
|
||||
|
|
|
@ -29,6 +29,7 @@ SUPPORTED_PRODUCTS = {
|
|||
'fennec': 'Fennec',
|
||||
'geckoview': 'Geckoview',
|
||||
'geckoview_streaming': 'GeckoviewStreaming',
|
||||
'thunderbird': 'Thunderbird',
|
||||
}
|
||||
|
||||
SUPPORTED_OPERATING_SYSTEMS = [
|
||||
|
|
|
@ -188,6 +188,8 @@ SupportedProduct GetCurrentProduct() {
|
|||
} else {
|
||||
return SupportedProduct::Fennec;
|
||||
}
|
||||
#elif defined(MOZ_THUNDERBIRD)
|
||||
return SupportedProduct::Thunderbird;
|
||||
#else
|
||||
return SupportedProduct::Firefox;
|
||||
#endif
|
||||
|
|
|
@ -39,6 +39,7 @@ enum class SupportedProduct : uint8_t {
|
|||
Fennec = (1 << 1),
|
||||
Geckoview = (1 << 2),
|
||||
GeckoviewStreaming = (1 << 3),
|
||||
Thunderbird = (1 << 4),
|
||||
};
|
||||
MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(SupportedProduct);
|
||||
|
||||
|
|
|
@ -402,9 +402,6 @@
|
|||
"SYSTEM_FONT_FALLBACK_SCRIPT",
|
||||
"TAP_TO_LOAD_ENABLED",
|
||||
"TAP_TO_LOAD_IMAGE_SIZE",
|
||||
"THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS",
|
||||
"THUNDERBIRD_GLODA_SIZE_MB",
|
||||
"THUNDERBIRD_INDEXING_RATE_MSG_PER_S",
|
||||
"TLS_ERROR_REPORT_UI",
|
||||
"TRANSLATED_CHARACTERS",
|
||||
"TRANSLATED_PAGES",
|
||||
|
@ -1039,9 +1036,6 @@
|
|||
"TELEMETRY_TEST_KEYED_RELEASE_OPTOUT",
|
||||
"TELEMETRY_TEST_RELEASE_OPTIN",
|
||||
"TELEMETRY_TEST_RELEASE_OPTOUT",
|
||||
"THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS",
|
||||
"THUNDERBIRD_GLODA_SIZE_MB",
|
||||
"THUNDERBIRD_INDEXING_RATE_MSG_PER_S",
|
||||
"TLS_ERROR_REPORT_UI",
|
||||
"TOTAL_CONTENT_PAGE_LOAD_TIME",
|
||||
"TOTAL_COUNT_HIGH_ERRORS",
|
||||
|
|
|
@ -158,6 +158,8 @@ histogram_files = [
|
|||
'/dom/base/UseCounters.conf',
|
||||
'/dom/base/nsDeprecatedOperationList.h',
|
||||
]
|
||||
if CONFIG['MOZ_TELEMETRY_EXTRA_HISTOGRAM_FILES']:
|
||||
histogram_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_HISTOGRAM_FILES'])
|
||||
|
||||
data = GENERATED_FILES['TelemetryHistogramData.inc']
|
||||
data.script = 'build_scripts/gen_histogram_data.py'
|
||||
|
@ -175,6 +177,8 @@ data.inputs = histogram_files
|
|||
scalar_files = [
|
||||
'Scalars.yaml',
|
||||
]
|
||||
if CONFIG['MOZ_TELEMETRY_EXTRA_SCALAR_FILES']:
|
||||
scalar_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_SCALAR_FILES'])
|
||||
|
||||
scalar_data = GENERATED_FILES['TelemetryScalarData.h']
|
||||
scalar_data.script = 'build_scripts/gen_scalar_data.py'
|
||||
|
@ -197,6 +201,8 @@ FINAL_TARGET_FILES += ['!ScalarArtifactDefinitions.json']
|
|||
event_files = [
|
||||
'Events.yaml',
|
||||
]
|
||||
if CONFIG['MOZ_TELEMETRY_EXTRA_EVENT_FILES']:
|
||||
event_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_EVENT_FILES'])
|
||||
|
||||
event_data = GENERATED_FILES['TelemetryEventData.h']
|
||||
event_data.script = 'build_scripts/gen_event_data.py'
|
||||
|
|
Загрузка…
Ссылка в новой задаче