Fix failing tests and factor out scalar values
This commit is contained in:
Родитель
ce4d8f874b
Коммит
68d9d191ba
|
@ -10,6 +10,8 @@ import org.scalatest._
|
||||||
class TestAggregator extends FlatSpec with Matchers{
|
class TestAggregator extends FlatSpec with Matchers{
|
||||||
|
|
||||||
implicit val formats = DefaultFormats
|
implicit val formats = DefaultFormats
|
||||||
|
val k = TestUtils.scalarValue
|
||||||
|
val app = TestUtils.application
|
||||||
|
|
||||||
"The aggregator" should "sum metrics over a set of dimensions" in {
|
"The aggregator" should "sum metrics over a set of dimensions" in {
|
||||||
val spark = SparkSession.builder()
|
val spark = SparkSession.builder()
|
||||||
|
@ -17,27 +19,29 @@ class TestAggregator extends FlatSpec with Matchers{
|
||||||
.master("local[1]")
|
.master("local[1]")
|
||||||
.getOrCreate()
|
.getOrCreate()
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
val messages = (TestUtils.generateCrashMessages(42) ++ TestUtils.generateMainMessages(42)).map(_.toByteArray).seq
|
val messages =
|
||||||
|
(TestUtils.generateCrashMessages(k)
|
||||||
|
++ TestUtils.generateMainMessages(k)).map(_.toByteArray).seq
|
||||||
val df = ErrorAggregator.aggregate(spark.sqlContext.createDataset(messages).toDF, raiseOnError = true, online = false)
|
val df = ErrorAggregator.aggregate(spark.sqlContext.createDataset(messages).toDF, raiseOnError = true, online = false)
|
||||||
df.count() should be (1)
|
df.count() should be (1)
|
||||||
df.select("submission_date").first()(0).toString should be ("2016-04-07")
|
df.select("submission_date").first()(0).toString should be ("2016-04-07")
|
||||||
df.select("channel").first()(0) should be (TestUtils.application.channel)
|
df.select("channel").first()(0) should be (app.channel)
|
||||||
df.select("version").first()(0) should be (TestUtils.application.version)
|
df.select("version").first()(0) should be (app.version)
|
||||||
df.select("build_id").first()(0) should be (TestUtils.application.buildId)
|
df.select("build_id").first()(0) should be (app.buildId)
|
||||||
df.select("application").first()(0) should be (TestUtils.application.name)
|
df.select("application").first()(0) should be (app.name)
|
||||||
df.select("os_name").first()(0) should be ("Linux")
|
df.select("os_name").first()(0) should be ("Linux")
|
||||||
df.select("os_version").first()(0) should be ("42")
|
df.select("os_version").first()(0) should be (s"${k}")
|
||||||
df.select("architecture").first()(0) should be (TestUtils.application.architecture)
|
df.select("architecture").first()(0) should be (app.architecture)
|
||||||
df.select("country").first()(0) should be ("IT")
|
df.select("country").first()(0) should be ("IT")
|
||||||
df.select("main_crashes").first()(0) should be (42)
|
df.select("main_crashes").first()(0) should be (k)
|
||||||
df.select("content_crashes").first()(0) should be (42)
|
df.select("content_crashes").first()(0) should be (k)
|
||||||
df.select("gpu_crashes").first()(0) should be (42)
|
df.select("gpu_crashes").first()(0) should be (k)
|
||||||
df.select("plugin_crashes").first()(0) should be (42)
|
df.select("plugin_crashes").first()(0) should be (k)
|
||||||
df.select("gmplugin_crashes").first()(0) should be (42)
|
df.select("gmplugin_crashes").first()(0) should be (k)
|
||||||
df.select("content_shutdown_crashes").first()(0) should be (42)
|
df.select("content_shutdown_crashes").first()(0) should be (k)
|
||||||
df.select("count").first()(0) should be (84)
|
df.select("count").first()(0) should be (k * 2)
|
||||||
df.select("usage_hours").first()(0) should be (42.0)
|
df.select("usage_hours").first()(0) should be (k.toFloat)
|
||||||
df.select("browser_shim_usage_blocked").first()(0) should be (42)
|
df.select("browser_shim_usage_blocked").first()(0) should be (k)
|
||||||
df.select("experiment_id").first()(0) should be ("experiment1")
|
df.select("experiment_id").first()(0) should be ("experiment1")
|
||||||
df.select("experiment_branch").first()(0) should be ("control")
|
df.select("experiment_branch").first()(0) should be ("control")
|
||||||
df.select("e10s_enabled").first()(0) should equal (true)
|
df.select("e10s_enabled").first()(0) should equal (true)
|
||||||
|
|
|
@ -13,23 +13,24 @@ class TestPings extends FlatSpec with Matchers{
|
||||||
|
|
||||||
val message = TestUtils.generateMainMessages(1).head
|
val message = TestUtils.generateMainMessages(1).head
|
||||||
val mainPing = messageToMainPing(message)
|
val mainPing = messageToMainPing(message)
|
||||||
|
val ts = TestUtils.testTimestampMillis
|
||||||
|
|
||||||
"MainPing" should "return the value of a count histogram" in {
|
"MainPing" should "return the value of a count histogram" in {
|
||||||
mainPing.getCountHistogramValue("foo") should be (0)
|
mainPing.getCountHistogramValue("foo").isEmpty should be (true)
|
||||||
mainPing.getCountHistogramValue("BROWSER_SHIM_USAGE_BLOCKED") should be (1)
|
mainPing.getCountHistogramValue("BROWSER_SHIM_USAGE_BLOCKED").get should be (1)
|
||||||
}
|
}
|
||||||
it should "return the value of a keyed count histogram" in {
|
it should "return the value of a keyed count histogram" in {
|
||||||
mainPing.getCountKeyedHistogramValue("foo", "bar") should be (0)
|
mainPing.getCountKeyedHistogramValue("foo", "bar").isEmpty should be (true)
|
||||||
mainPing.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "foo") should be (0)
|
mainPing.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "foo").isEmpty should be (true)
|
||||||
mainPing.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "content") should be (1)
|
mainPing.getCountKeyedHistogramValue("SUBPROCESS_CRASHES_WITH_DUMP", "content").get should be (1)
|
||||||
}
|
}
|
||||||
it should "return the value of its usage hours" in {
|
it should "return the value of its usage hours" in {
|
||||||
mainPing.usageHours should be (1.0)
|
mainPing.usageHours.get should be (1.0)
|
||||||
val messageNoUsageHours = TestUtils.generateMainMessages(1, Some(Map("payload.info" -> "{}"))).head
|
val messageNoUsageHours = TestUtils.generateMainMessages(1, Some(Map("payload.info" -> "{}"))).head
|
||||||
val pingNoUsageHours = messageToMainPing(messageNoUsageHours)
|
val pingNoUsageHours = messageToMainPing(messageNoUsageHours)
|
||||||
pingNoUsageHours.usageHours should be (0.0)
|
pingNoUsageHours.usageHours.isEmpty should be (true)
|
||||||
}
|
}
|
||||||
it should "return its timestamp" in {
|
it should "return its timestamp" in {
|
||||||
mainPing.meta.normalizedTimestamp() should be (new Timestamp(1460036116829L))
|
mainPing.meta.normalizedTimestamp() should be (new Timestamp(ts))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,9 @@ object TestUtils {
|
||||||
"x86", "20170101000000", "release", "Firefox", "42.0", "Mozilla", "42.0", "x86-msvc"
|
"x86", "20170101000000", "release", "Firefox", "42.0", "Mozilla", "42.0", "x86-msvc"
|
||||||
)
|
)
|
||||||
private val applicationJson = compact(render(Extraction.decompose(application)))
|
private val applicationJson = compact(render(Extraction.decompose(application)))
|
||||||
|
val scalarValue = 42
|
||||||
|
val testTimestampNano = 1460036116829920000L
|
||||||
|
val testTimestampMillis = testTimestampNano / 1000000
|
||||||
|
|
||||||
def generateCrashMessages(size: Int, fieldsOverride: Option[Map[String, Any]]=None): Seq[Message] = {
|
def generateCrashMessages(size: Int, fieldsOverride: Option[Map[String, Any]]=None): Seq[Message] = {
|
||||||
val defaultMap = Map(
|
val defaultMap = Map(
|
||||||
|
@ -57,7 +60,7 @@ object TestUtils {
|
||||||
| },
|
| },
|
||||||
| "application": ${applicationJson}
|
| "application": ${applicationJson}
|
||||||
|}""".stripMargin),
|
|}""".stripMargin),
|
||||||
timestamp=1460036116829920000L
|
timestamp=testTimestampNano
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -110,7 +113,7 @@ object TestUtils {
|
||||||
RichMessage(s"main-${index}",
|
RichMessage(s"main-${index}",
|
||||||
outputMap,
|
outputMap,
|
||||||
Some(s"""{"application": ${applicationJson}}""".stripMargin),
|
Some(s"""{"application": ${applicationJson}}""".stripMargin),
|
||||||
timestamp=1460036116829920000L
|
timestamp=testTimestampNano
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Загрузка…
Ссылка в новой задаче