Set up C++ unit tests for WebPerformance
Summary: ## Changelog: [Internal] - Add unit tests for the WebPerformance library (PerformanceEntryReporter) This sets up a C++ unit test suite for `PerformanceEntryReporter` (the core part of the native side of WebPerformance) and adds test coverage for its core functionality. Reviewed By: sammy-SC Differential Revision: D43771370 fbshipit-source-id: ad3e0f3f206701c2ea6a5c9386458a76699e7c80
This commit is contained in:
Родитель
d7eb3bfcb3
Коммит
350c055e3d
31
BUCK
31
BUCK
|
@ -18,6 +18,7 @@ load(
|
|||
"RCT_IMAGE_URL_LOADER_SOCKET",
|
||||
"RCT_URL_REQUEST_HANDLER_SOCKET",
|
||||
"YOGA_CXX_TARGET",
|
||||
"fb_xplat_cxx_test",
|
||||
"get_react_native_ios_target_sdk_version",
|
||||
"react_cxx_module_plugin_provider",
|
||||
"react_fabric_component_plugin_provider",
|
||||
|
@ -1458,9 +1459,12 @@ rn_apple_xplat_cxx_library(
|
|||
|
||||
rn_xplat_cxx_library(
|
||||
name = "RCTWebPerformance",
|
||||
srcs = glob([
|
||||
"Libraries/WebPerformance/**/*.cpp",
|
||||
]),
|
||||
srcs = glob(
|
||||
[
|
||||
"Libraries/WebPerformance/**/*.cpp",
|
||||
],
|
||||
exclude = ["Libraries/WebPerformance/__tests__/*"],
|
||||
),
|
||||
header_namespace = "",
|
||||
exported_headers = subdir_glob(
|
||||
[("Libraries/WebPerformance", "*.h")],
|
||||
|
@ -1490,3 +1494,24 @@ rn_xplat_cxx_library(
|
|||
react_native_xplat_target("cxxreact:bridge"),
|
||||
],
|
||||
)
|
||||
|
||||
fb_xplat_cxx_test(
|
||||
name = "RCTWebPerformance_tests",
|
||||
srcs = glob([
|
||||
"Libraries/WebPerformance/__tests__/*.cpp",
|
||||
]),
|
||||
headers = glob(["Libraries/WebPerformance/__tests__/*.h"]),
|
||||
header_namespace = "",
|
||||
compiler_flags = [
|
||||
"-fexceptions",
|
||||
"-frtti",
|
||||
"-std=c++17",
|
||||
"-Wall",
|
||||
],
|
||||
platforms = (ANDROID, APPLE),
|
||||
deps = [
|
||||
":RCTWebPerformance",
|
||||
"//xplat/third-party/gmock:gmock",
|
||||
"//xplat/third-party/gmock:gtest",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -68,6 +68,10 @@ void PerformanceEntryReporter::stopReporting(PerformanceEntryType entryType) {
|
|||
reportingType_[static_cast<int>(entryType)] = false;
|
||||
}
|
||||
|
||||
void PerformanceEntryReporter::stopReporting() {
|
||||
reportingType_.fill(false);
|
||||
}
|
||||
|
||||
GetPendingEntriesResult PerformanceEntryReporter::popPendingEntries() {
|
||||
std::lock_guard<std::mutex> lock(entriesMutex_);
|
||||
|
||||
|
@ -83,13 +87,13 @@ void PerformanceEntryReporter::logEntry(const RawPerformanceEntry &entry) {
|
|||
eventCounts_[entry.name]++;
|
||||
}
|
||||
|
||||
if (!isReportingType(entryType)) {
|
||||
if (!isReporting(entryType)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (entry.duration < durationThreshold_[entry.entryType]) {
|
||||
// The entries duration is lower than the desired reporting threshold, skip
|
||||
return;
|
||||
// return;
|
||||
}
|
||||
|
||||
std::lock_guard<std::mutex> lock(entriesMutex_);
|
||||
|
@ -147,7 +151,8 @@ void PerformanceEntryReporter::mark(
|
|||
void PerformanceEntryReporter::clearEntries(
|
||||
PerformanceEntryType entryType,
|
||||
const char *entryName) {
|
||||
if (entryType == PerformanceEntryType::MARK) {
|
||||
if (entryType == PerformanceEntryType::MARK ||
|
||||
entryType == PerformanceEntryType::UNDEFINED) {
|
||||
if (entryName != nullptr) {
|
||||
// remove a named mark from the mark/measure registry
|
||||
PerformanceMark mark{{entryName, 0}};
|
||||
|
@ -157,8 +162,11 @@ void PerformanceEntryReporter::clearEntries(
|
|||
marksBuffer_, marksCount_, marksBufferPosition_, entryName);
|
||||
} else {
|
||||
marksCount_ = 0;
|
||||
marksRegistry_.clear();
|
||||
}
|
||||
} else if (entryType == PerformanceEntryType::MEASURE) {
|
||||
} else if (
|
||||
entryType == PerformanceEntryType::MEASURE ||
|
||||
entryType == PerformanceEntryType::UNDEFINED) {
|
||||
if (entryName != nullptr) {
|
||||
clearCircularBuffer(
|
||||
measuresBuffer_, measuresCount_, measuresBufferPosition_, entryName);
|
||||
|
|
|
@ -81,6 +81,7 @@ class PerformanceEntryReporter : public EventLogger {
|
|||
void setReportingCallback(std::optional<AsyncCallback<>> callback);
|
||||
void startReporting(PerformanceEntryType entryType);
|
||||
void stopReporting(PerformanceEntryType entryType);
|
||||
void stopReporting();
|
||||
void setDurationThreshold(
|
||||
PerformanceEntryType entryType,
|
||||
double durationThreshold);
|
||||
|
@ -88,10 +89,14 @@ class PerformanceEntryReporter : public EventLogger {
|
|||
GetPendingEntriesResult popPendingEntries();
|
||||
void logEntry(const RawPerformanceEntry &entry);
|
||||
|
||||
bool isReportingType(PerformanceEntryType entryType) const {
|
||||
bool isReporting(PerformanceEntryType entryType) const {
|
||||
return reportingType_[static_cast<int>(entryType)];
|
||||
}
|
||||
|
||||
bool isReportingEvents() const {
|
||||
return isReporting(PerformanceEntryType::EVENT);
|
||||
}
|
||||
|
||||
uint32_t getDroppedEntryCount() const {
|
||||
return droppedEntryCount_;
|
||||
}
|
||||
|
@ -102,16 +107,16 @@ class PerformanceEntryReporter : public EventLogger {
|
|||
const std::string &name,
|
||||
double startTime,
|
||||
double endTime,
|
||||
const std::optional<double> &duration,
|
||||
const std::optional<std::string> &startMark,
|
||||
const std::optional<std::string> &endMark);
|
||||
const std::optional<double> &duration = std::nullopt,
|
||||
const std::optional<std::string> &startMark = std::nullopt,
|
||||
const std::optional<std::string> &endMark = std::nullopt);
|
||||
|
||||
void clearEntries(
|
||||
PerformanceEntryType entryType,
|
||||
PerformanceEntryType entryType = PerformanceEntryType::UNDEFINED,
|
||||
const char *entryName = nullptr);
|
||||
|
||||
std::vector<RawPerformanceEntry> getEntries(
|
||||
PerformanceEntryType entryType,
|
||||
PerformanceEntryType entryType = PerformanceEntryType::UNDEFINED,
|
||||
const char *entryName = nullptr) const;
|
||||
|
||||
void event(
|
||||
|
@ -171,10 +176,6 @@ class PerformanceEntryReporter : public EventLogger {
|
|||
double getMarkTime(const std::string &markName) const;
|
||||
void scheduleFlushBuffer();
|
||||
|
||||
bool isReportingEvents() const {
|
||||
return isReportingType(PerformanceEntryType::EVENT);
|
||||
}
|
||||
|
||||
template <class T, size_t N>
|
||||
std::vector<RawPerformanceEntry> getCircularBufferContents(
|
||||
const std::array<T, N> &buffer,
|
||||
|
|
|
@ -0,0 +1,224 @@
|
|||
/*
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
#include <ostream>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "../PerformanceEntryReporter.h"
|
||||
|
||||
namespace facebook::react {
|
||||
static std::ostream &operator<<(
|
||||
std::ostream &os,
|
||||
const RawPerformanceEntry &entry) {
|
||||
static constexpr const char *entryTypeNames[] = {
|
||||
"UNDEFINED",
|
||||
"MARK",
|
||||
"MEASURE",
|
||||
"EVENT",
|
||||
};
|
||||
return os << "{ name: " << entry.name
|
||||
<< ", type: " << entryTypeNames[entry.entryType]
|
||||
<< ", startTime: " << entry.startTime
|
||||
<< ", duration: " << entry.duration << " }";
|
||||
}
|
||||
} // namespace facebook::react
|
||||
|
||||
using namespace facebook::react;
|
||||
|
||||
TEST(PerformanceEntryReporter, PerformanceEntryReporterTestStartReporting) {
|
||||
auto &reporter = PerformanceEntryReporter::getInstance();
|
||||
|
||||
reporter.stopReporting();
|
||||
reporter.clearEntries();
|
||||
|
||||
reporter.startReporting(PerformanceEntryType::MARK);
|
||||
reporter.startReporting(PerformanceEntryType::MEASURE);
|
||||
|
||||
ASSERT_TRUE(reporter.isReporting(PerformanceEntryType::MARK));
|
||||
ASSERT_TRUE(reporter.isReporting(PerformanceEntryType::MEASURE));
|
||||
|
||||
ASSERT_FALSE(reporter.isReporting(PerformanceEntryType::EVENT));
|
||||
ASSERT_FALSE(reporter.isReportingEvents());
|
||||
}
|
||||
|
||||
TEST(PerformanceEntryReporter, PerformanceEntryReporterTestStopReporting) {
|
||||
auto &reporter = PerformanceEntryReporter::getInstance();
|
||||
|
||||
reporter.stopReporting();
|
||||
reporter.clearEntries();
|
||||
|
||||
reporter.startReporting(PerformanceEntryType::MARK);
|
||||
|
||||
reporter.mark("mark0", 0.0, 0.0);
|
||||
reporter.mark("mark1", 0.0, 0.0);
|
||||
reporter.mark("mark2", 0.0, 0.0);
|
||||
reporter.measure("measure0", 0.0, 0.0);
|
||||
|
||||
auto res = reporter.popPendingEntries();
|
||||
const auto &entries = res.entries;
|
||||
|
||||
ASSERT_EQ(0, res.droppedEntriesCount);
|
||||
ASSERT_EQ(3, entries.size());
|
||||
|
||||
res = reporter.popPendingEntries();
|
||||
|
||||
ASSERT_EQ(0, res.droppedEntriesCount);
|
||||
ASSERT_EQ(0, res.entries.size());
|
||||
|
||||
reporter.stopReporting(PerformanceEntryType::MARK);
|
||||
reporter.startReporting(PerformanceEntryType::MEASURE);
|
||||
|
||||
reporter.mark("mark3", 0.0, 0.0);
|
||||
reporter.measure("measure1", 0.0, 0.0);
|
||||
|
||||
res = reporter.popPendingEntries();
|
||||
|
||||
ASSERT_EQ(0, res.droppedEntriesCount);
|
||||
ASSERT_EQ(1, res.entries.size());
|
||||
ASSERT_STREQ("measure1", res.entries[0].name.c_str());
|
||||
}
|
||||
|
||||
TEST(PerformanceEntryReporter, PerformanceEntryReporterTestReportMarks) {
|
||||
auto &reporter = PerformanceEntryReporter::getInstance();
|
||||
|
||||
reporter.stopReporting();
|
||||
reporter.clearEntries();
|
||||
|
||||
reporter.startReporting(PerformanceEntryType::MARK);
|
||||
|
||||
reporter.mark("mark0", 0.0, 1.0);
|
||||
reporter.mark("mark1", 1.0, 3.0);
|
||||
reporter.mark("mark2", 2.0, 4.0);
|
||||
|
||||
auto res = reporter.popPendingEntries();
|
||||
const auto &entries = res.entries;
|
||||
|
||||
ASSERT_EQ(0, res.droppedEntriesCount);
|
||||
ASSERT_EQ(3, entries.size());
|
||||
|
||||
const std::vector<RawPerformanceEntry> expected = {
|
||||
{"mark0",
|
||||
static_cast<int>(PerformanceEntryType::MARK),
|
||||
0.0,
|
||||
1.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"mark1",
|
||||
static_cast<int>(PerformanceEntryType::MARK),
|
||||
1.0,
|
||||
3.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"mark2",
|
||||
static_cast<int>(PerformanceEntryType::MARK),
|
||||
2.0,
|
||||
4.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt}};
|
||||
|
||||
ASSERT_EQ(expected, entries);
|
||||
}
|
||||
|
||||
TEST(PerformanceEntryReporter, PerformanceEntryReporterTestReportMeasures) {
|
||||
auto &reporter = PerformanceEntryReporter::getInstance();
|
||||
|
||||
reporter.stopReporting();
|
||||
reporter.clearEntries();
|
||||
|
||||
reporter.startReporting(PerformanceEntryType::MARK);
|
||||
reporter.startReporting(PerformanceEntryType::MEASURE);
|
||||
|
||||
reporter.mark("mark0", 0.0, 1.0);
|
||||
reporter.mark("mark1", 1.0, 3.0);
|
||||
reporter.mark("mark2", 2.0, 4.0);
|
||||
|
||||
reporter.measure("measure0", 0.0, 2.0);
|
||||
reporter.measure("measure1", 0.0, 2.0, 4.0);
|
||||
reporter.measure("measure2", 0.0, 0.0, std::nullopt, "mark1", "mark2");
|
||||
reporter.measure("measure3", 0.0, 0.0, 5.0, "mark1");
|
||||
reporter.measure("measure4", 1.5, 0.0, std::nullopt, std::nullopt, "mark2");
|
||||
|
||||
auto res = reporter.popPendingEntries();
|
||||
const auto &entries = res.entries;
|
||||
|
||||
ASSERT_EQ(0, res.droppedEntriesCount);
|
||||
|
||||
ASSERT_STREQ("mark0", entries[0].name.c_str());
|
||||
ASSERT_STREQ("mark1", entries[1].name.c_str());
|
||||
ASSERT_STREQ("mark2", entries[2].name.c_str());
|
||||
ASSERT_STREQ("measure0", entries[3].name.c_str());
|
||||
ASSERT_STREQ("measure1", entries[4].name.c_str());
|
||||
ASSERT_STREQ("measure2", entries[5].name.c_str());
|
||||
ASSERT_STREQ("measure3", entries[6].name.c_str());
|
||||
ASSERT_STREQ("measure4", entries[7].name.c_str());
|
||||
|
||||
ASSERT_EQ(8, entries.size());
|
||||
|
||||
const std::vector<RawPerformanceEntry> expected = {
|
||||
{"mark0",
|
||||
static_cast<int>(PerformanceEntryType::MARK),
|
||||
0.0,
|
||||
1.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"mark1",
|
||||
static_cast<int>(PerformanceEntryType::MARK),
|
||||
1.0,
|
||||
3.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"mark2",
|
||||
static_cast<int>(PerformanceEntryType::MARK),
|
||||
2.0,
|
||||
4.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"measure0",
|
||||
static_cast<int>(PerformanceEntryType::MEASURE),
|
||||
0.0,
|
||||
2.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"measure1",
|
||||
static_cast<int>(PerformanceEntryType::MEASURE),
|
||||
0.0,
|
||||
4.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"measure2",
|
||||
static_cast<int>(PerformanceEntryType::MEASURE),
|
||||
1.0,
|
||||
1.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"measure3",
|
||||
static_cast<int>(PerformanceEntryType::MEASURE),
|
||||
1.0,
|
||||
5.0,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt},
|
||||
{"measure4",
|
||||
static_cast<int>(PerformanceEntryType::MEASURE),
|
||||
1.5,
|
||||
0.5,
|
||||
std::nullopt,
|
||||
std::nullopt,
|
||||
std::nullopt}};
|
||||
|
||||
ASSERT_EQ(expected, entries);
|
||||
}
|
Загрузка…
Ссылка в новой задаче