Decorate MainPings and CrashPings with a parse() method
This commit is contained in:
Родитель
46548dcc92
Коммит
bf8ee528c7
|
@ -141,37 +141,44 @@ object ErrorAggregator {
|
||||||
dimensions.build
|
dimensions.build
|
||||||
}
|
}
|
||||||
|
|
||||||
private def parseCrashPing(ping: CrashPing): Tuple1[Row] = {
|
class ParsableCrashPing(ping: CrashPing) {
|
||||||
// Non-main crashes are already retrieved from main pings
|
def parse(): Tuple1[Row] = {
|
||||||
if(!ping.isMain()) return Tuple1(null)
|
// Non-main crashes are already retrieved from main pings
|
||||||
|
if(!ping.isMain()) return Tuple1(null)
|
||||||
|
|
||||||
val dimensions = buildDimensions(ping.meta)
|
val dimensions = buildDimensions(ping.meta)
|
||||||
val stats = new RowBuilder(statsSchema)
|
val stats = new RowBuilder(statsSchema)
|
||||||
stats("count") = Some(1)
|
stats("count") = Some(1)
|
||||||
stats("main_crashes") = Some(1)
|
stats("main_crashes") = Some(1)
|
||||||
Tuple1(RowBuilder.merge(dimensions, stats.build))
|
Tuple1(RowBuilder.merge(dimensions, stats.build))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
implicit def ParsableCrashPingToCrashPing(ping: CrashPing) = new ParsableCrashPing(ping)
|
||||||
|
|
||||||
private def parseMainPing(ping: MainPing): Tuple1[Row] = {
|
class ParsableMainPing(ping: MainPing) {
|
||||||
// If a main ping has no usage hours discard it.
|
def parse(): Tuple1[Row] = {
|
||||||
val usageHours = ping.usageHours()
|
// If a main ping has no usage hours discard it.
|
||||||
if (usageHours == 0) return Tuple1(null)
|
val usageHours = ping.usageHours()
|
||||||
|
if (usageHours == 0) return Tuple1(null)
|
||||||
|
|
||||||
val dimensions = buildDimensions(ping.meta)
|
val dimensions = buildDimensions(ping.meta)
|
||||||
val stats = new RowBuilder(statsSchema)
|
val stats = new RowBuilder(statsSchema)
|
||||||
stats("count") = Some(1)
|
stats("count") = Some(1)
|
||||||
stats("usage_hours") = Some(usageHours)
|
stats("usage_hours") = Some(usageHours)
|
||||||
countHistogramErrorsSchema.fieldNames.foreach(stats_name => {
|
countHistogramErrorsSchema.fieldNames.foreach(stats_name => {
|
||||||
stats(stats_name) = Some(ping.getCountHistogramValue(stats_name))
|
stats(stats_name) = Some(ping.getCountHistogramValue(stats_name))
|
||||||
})
|
})
|
||||||
stats("content_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "content"))
|
stats("content_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "content"))
|
||||||
stats("gpu_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "gpu"))
|
stats("gpu_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "gpu"))
|
||||||
stats("plugin_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "plugin"))
|
stats("plugin_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "plugin"))
|
||||||
stats("gmplugin_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "gmplugin"))
|
stats("gmplugin_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "gmplugin"))
|
||||||
stats("content_shutdown_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_KILL_HARD", "ShutDownKill"))
|
stats("content_shutdown_crashes") = Some(ping.getCountKeyedHistogramValue("SUBPROCESS_KILL_HARD", "ShutDownKill"))
|
||||||
|
|
||||||
Tuple1(RowBuilder.merge(dimensions, stats.build))
|
Tuple1(RowBuilder.merge(dimensions, stats.build))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
implicit def ParsableCrashPingToMainPing(ping: MainPing) = new ParsableMainPing(ping)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
We can't use an Option[Row] because entire rows cannot be null in Spark SQL. The best we can do is to resort to Tuple1[Row].
|
We can't use an Option[Row] because entire rows cannot be null in Spark SQL. The best we can do is to resort to Tuple1[Row].
|
||||||
See https://github.com/apache/spark/blob/38b9e69623c14a675b14639e8291f5d29d2a0bc3/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala#L53
|
See https://github.com/apache/spark/blob/38b9e69623c14a675b14639e8291f5d29d2a0bc3/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala#L53
|
||||||
|
@ -185,9 +192,9 @@ object ErrorAggregator {
|
||||||
return Tuple1(null)
|
return Tuple1(null)
|
||||||
}
|
}
|
||||||
if(docType == "crash") {
|
if(docType == "crash") {
|
||||||
parseCrashPing(messageToCrashPing(message))
|
messageToCrashPing(message).parse()
|
||||||
} else {
|
} else {
|
||||||
parseMainPing(messageToMainPing(message))
|
messageToMainPing(message).parse()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Загрузка…
Ссылка в новой задаче