Add a function to report perf results in JSON format.
Add support to report perf results in the JSON format specified in [1]. [1] https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md Bug: webrtc:8566 Change-Id: I25f829a4b012b3e2a3d56d61582a674f780148d0 Reviewed-on: https://webrtc-review.googlesource.com/26031 Reviewed-by: Patrik Höglund <phoglund@webrtc.org> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> Commit-Queue: Edward Lemur <ehmaldonado@webrtc.org> Cr-Commit-Position: refs/heads/master@{#20950}
This commit is contained in:
parent
30e5b26026
commit
936dfb1cb2
@ -148,6 +148,7 @@ rtc_source_set("test_support") {
|
||||
|
||||
deps = [
|
||||
"..:webrtc_common",
|
||||
"../api:array_view",
|
||||
"../common_video",
|
||||
"../rtc_base:gtest_prod",
|
||||
"../rtc_base:rtc_base_approved",
|
||||
|
||||
@ -8,14 +8,12 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
// A stripped-down version of Chromium's chrome/test/perf/perf_test.cc.
|
||||
// ResultsToString(), PrintResult(size_t value) and AppendResult(size_t value)
|
||||
// have been modified. The remainder are identical to the Chromium version.
|
||||
|
||||
#include "test/testsupport/perf_test.h"
|
||||
#include "rtc_base/criticalsection.h"
|
||||
|
||||
#include <sstream>
|
||||
#include <stdio.h>
|
||||
#include <map>
|
||||
#include <sstream>
|
||||
#include <vector>
|
||||
|
||||
namespace {
|
||||
@ -36,21 +34,131 @@ void PrintResultsImpl(const std::string& graph_name,
|
||||
values.c_str(), units.c_str());
|
||||
}
|
||||
|
||||
template <typename Container>
|
||||
void OutputListToStream(std::ostream* ostream, const Container& values) {
|
||||
const char* sep = "";
|
||||
for (const auto& v : values) {
|
||||
(*ostream) << sep << v;
|
||||
sep = ",";
|
||||
}
|
||||
}
|
||||
|
||||
class PerfResultsLogger {
|
||||
public:
|
||||
void ClearResults() {
|
||||
rtc::CritScope lock(&crit_);
|
||||
graphs_.clear();
|
||||
}
|
||||
void LogResult(const std::string& graph_name,
|
||||
const std::string& trace_name,
|
||||
const double value,
|
||||
const std::string& units,
|
||||
const bool important) {
|
||||
std::ostringstream value_stream;
|
||||
value_stream << value;
|
||||
PrintResultsImpl(graph_name, trace_name, value_stream.str(), units,
|
||||
important);
|
||||
|
||||
std::ostringstream json_stream;
|
||||
json_stream << '"' << trace_name << R"(":{)";
|
||||
json_stream << R"("type":"scalar",)";
|
||||
json_stream << R"("value":)" << value << ',';
|
||||
json_stream << R"("units":")" << units << R"("})";
|
||||
rtc::CritScope lock(&crit_);
|
||||
graphs_[graph_name].push_back(json_stream.str());
|
||||
}
|
||||
void LogResultMeanAndError(const std::string& graph_name,
|
||||
const std::string& trace_name,
|
||||
const double mean,
|
||||
const double error,
|
||||
const std::string& units,
|
||||
const bool important) {
|
||||
std::ostringstream value_stream;
|
||||
value_stream << '{' << mean << ',' << error << '}';
|
||||
PrintResultsImpl(graph_name, trace_name, value_stream.str(), units,
|
||||
important);
|
||||
|
||||
std::ostringstream json_stream;
|
||||
json_stream << '"' << trace_name << R"(":{)";
|
||||
json_stream << R"("type":"list_of_scalars",)";
|
||||
json_stream << R"("values":[)" << mean << "],";
|
||||
json_stream << R"("std":)" << error << ',';
|
||||
json_stream << R"("units":")" << units << R"("})";
|
||||
rtc::CritScope lock(&crit_);
|
||||
graphs_[graph_name].push_back(json_stream.str());
|
||||
}
|
||||
void LogResultList(const std::string& graph_name,
|
||||
const std::string& trace_name,
|
||||
const rtc::ArrayView<const double> values,
|
||||
const std::string& units,
|
||||
const bool important) {
|
||||
std::ostringstream value_stream;
|
||||
value_stream << '[';
|
||||
OutputListToStream(&value_stream, values);
|
||||
value_stream << ']';
|
||||
PrintResultsImpl(graph_name, trace_name, value_stream.str(), units,
|
||||
important);
|
||||
|
||||
std::ostringstream json_stream;
|
||||
json_stream << '"' << trace_name << R"(":{)";
|
||||
json_stream << R"("type":"list_of_scalars",)";
|
||||
json_stream << R"("values":)" << value_stream.str() << ',';
|
||||
json_stream << R"("units":")" << units << R"("})";
|
||||
rtc::CritScope lock(&crit_);
|
||||
graphs_[graph_name].push_back(json_stream.str());
|
||||
}
|
||||
std::string ToJSON() const;
|
||||
|
||||
private:
|
||||
rtc::CriticalSection crit_;
|
||||
std::map<std::string, std::vector<std::string>> graphs_
|
||||
RTC_GUARDED_BY(&crit_);
|
||||
};
|
||||
|
||||
std::string PerfResultsLogger::ToJSON() const {
|
||||
std::ostringstream json_stream;
|
||||
json_stream << R"({"format_version":"1.0",)";
|
||||
json_stream << R"("charts":{)";
|
||||
rtc::CritScope lock(&crit_);
|
||||
for (auto graphs_it = graphs_.begin(); graphs_it != graphs_.end();
|
||||
++graphs_it) {
|
||||
if (graphs_it != graphs_.begin())
|
||||
json_stream << ',';
|
||||
json_stream << '"' << graphs_it->first << "\":";
|
||||
json_stream << '{';
|
||||
OutputListToStream(&json_stream, graphs_it->second);
|
||||
json_stream << '}';
|
||||
}
|
||||
json_stream << "}}";
|
||||
return json_stream.str();
|
||||
}
|
||||
|
||||
PerfResultsLogger& GetPerfResultsLogger() {
|
||||
static PerfResultsLogger* const logger_ = new PerfResultsLogger();
|
||||
return *logger_;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
void ClearPerfResults() {
|
||||
GetPerfResultsLogger().ClearResults();
|
||||
}
|
||||
|
||||
std::string GetPerfResultsJSON() {
|
||||
return GetPerfResultsLogger().ToJSON();
|
||||
}
|
||||
|
||||
void PrintResult(const std::string& measurement,
|
||||
const std::string& modifier,
|
||||
const std::string& trace,
|
||||
const double value,
|
||||
const std::string& units,
|
||||
bool important) {
|
||||
std::ostringstream value_stream;
|
||||
value_stream << value;
|
||||
PrintResultsImpl(measurement + modifier, trace, value_stream.str(), units,
|
||||
important);
|
||||
GetPerfResultsLogger().LogResult(measurement + modifier, trace, value, units,
|
||||
important);
|
||||
}
|
||||
|
||||
void PrintResultMeanAndError(const std::string& measurement,
|
||||
@ -60,32 +168,18 @@ void PrintResultMeanAndError(const std::string& measurement,
|
||||
const double error,
|
||||
const std::string& units,
|
||||
bool important) {
|
||||
std::ostringstream value_stream;
|
||||
value_stream << '{' << mean << ',' << error << '}';
|
||||
PrintResultsImpl(measurement + modifier, trace, value_stream.str(), units,
|
||||
important);
|
||||
GetPerfResultsLogger().LogResultMeanAndError(measurement + modifier, trace,
|
||||
mean, error, units, important);
|
||||
}
|
||||
|
||||
void PrintResultList(const std::string& measurement,
|
||||
const std::string& modifier,
|
||||
const std::string& trace,
|
||||
const std::vector<double>& values,
|
||||
const rtc::ArrayView<const double> values,
|
||||
const std::string& units,
|
||||
bool important) {
|
||||
std::ostringstream value_stream;
|
||||
value_stream << '[';
|
||||
if (!values.empty()) {
|
||||
auto it = values.begin();
|
||||
while (true) {
|
||||
value_stream << *it;
|
||||
if (++it == values.end())
|
||||
break;
|
||||
value_stream << ',';
|
||||
}
|
||||
}
|
||||
value_stream << ']';
|
||||
PrintResultsImpl(measurement + modifier, trace, value_stream.str(), units,
|
||||
important);
|
||||
GetPerfResultsLogger().LogResultList(measurement + modifier, trace, values,
|
||||
units, important);
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
|
||||
@ -8,16 +8,13 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
// A stripped-down version of Chromium's chrome/test/perf/perf_test.h.
|
||||
// Several functions have been removed; the prototypes of the remainder have
|
||||
// not been changed.
|
||||
|
||||
#ifndef TEST_TESTSUPPORT_PERF_TEST_H_
|
||||
#define TEST_TESTSUPPORT_PERF_TEST_H_
|
||||
|
||||
#include "api/array_view.h"
|
||||
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
@ -53,7 +50,6 @@ void PrintResultMeanAndError(const std::string& measurement,
|
||||
const std::string& units,
|
||||
bool important);
|
||||
|
||||
|
||||
// Like PrintResult(), but prints an entire list of results. The |values|
|
||||
// will generally be a list of comma-separated numbers. A typical
|
||||
// post-processing step might produce plots of their mean and standard
|
||||
@ -61,10 +57,17 @@ void PrintResultMeanAndError(const std::string& measurement,
|
||||
void PrintResultList(const std::string& measurement,
|
||||
const std::string& modifier,
|
||||
const std::string& trace,
|
||||
const std::vector<double>& values,
|
||||
rtc::ArrayView<const double> values,
|
||||
const std::string& units,
|
||||
bool important);
|
||||
|
||||
// Get all perf results to date in a JSON format as described in
|
||||
// https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md
|
||||
std::string GetPerfResultsJSON();
|
||||
|
||||
// You shouldn't use this function. It's only used to test the functions above.
|
||||
void ClearPerfResults();
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
@ -14,30 +14,97 @@
|
||||
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace {
|
||||
|
||||
const char* kJsonExpected = R"({
|
||||
"format_version":"1.0",
|
||||
"charts":{
|
||||
"foobar":{
|
||||
"baz_v":{
|
||||
"type":"scalar",
|
||||
"value":7,
|
||||
"units":"widgets"
|
||||
},
|
||||
"baz_me":{
|
||||
"type":"list_of_scalars",
|
||||
"values":[1],
|
||||
"std":2,
|
||||
"units":"lemurs"
|
||||
},
|
||||
"baz_vl":{
|
||||
"type":"list_of_scalars",
|
||||
"values":[1,2,3],
|
||||
"units":"units"
|
||||
}
|
||||
},
|
||||
"measurementmodifier":{
|
||||
"trace":{
|
||||
"type":"scalar",
|
||||
"value":42,
|
||||
"units":"units"
|
||||
}
|
||||
}
|
||||
}
|
||||
})";
|
||||
|
||||
std::string RemoveSpaces(std::string s) {
|
||||
s.erase(std::remove(s.begin(), s.end(), ' '), s.end());
|
||||
s.erase(std::remove(s.begin(), s.end(), '\n'), s.end());
|
||||
return s;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
class PerfTest : public ::testing::Test {
|
||||
protected:
|
||||
void TearDown() override {
|
||||
ClearPerfResults();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
#define MAYBE_AppendResult DISABLED_AppendResult
|
||||
#define MAYBE_TestPrintResult DISABLED_TestPrintResult
|
||||
#else
|
||||
#define MAYBE_AppendResult AppendResult
|
||||
#define MAYBE_TestPrintResult TestPrintResult
|
||||
#endif
|
||||
TEST(PerfTest, MAYBE_AppendResult) {
|
||||
TEST_F(PerfTest, MAYBE_TestPrintResult) {
|
||||
testing::internal::CaptureStdout();
|
||||
std::string expected = "RESULT measurementmodifier: trace= 42 units\n";
|
||||
std::string expected;
|
||||
|
||||
expected += "RESULT measurementmodifier: trace= 42 units\n";
|
||||
PrintResult("measurement", "modifier", "trace", 42, "units", false);
|
||||
|
||||
expected += "*RESULT foobar: baz= 7 widgets\n";
|
||||
PrintResult("foo", "bar", "baz", 7, "widgets", true);
|
||||
expected += "*RESULT foobar: baz_v= 7 widgets\n";
|
||||
PrintResult("foo", "bar", "baz_v", 7, "widgets", true);
|
||||
|
||||
expected += "RESULT foobar: baz= {1,2} lemurs\n";
|
||||
PrintResultMeanAndError("foo", "bar", "baz", 1, 2, "lemurs", false);
|
||||
expected += "RESULT foobar: baz_me= {1,2} lemurs\n";
|
||||
PrintResultMeanAndError("foo", "bar", "baz_me", 1, 2, "lemurs", false);
|
||||
|
||||
expected += "RESULT foobar: baz= [1,2,3] units\n";
|
||||
PrintResultList("foo", "bar", "baz", {1, 2, 3}, "units", false);
|
||||
const double kListOfScalars[] = {1, 2, 3};
|
||||
expected += "RESULT foobar: baz_vl= [1,2,3] units\n";
|
||||
PrintResultList("foo", "bar", "baz_vl", kListOfScalars, "units", false);
|
||||
|
||||
std::string output = testing::internal::GetCapturedStdout();
|
||||
EXPECT_EQ(expected, output);
|
||||
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
|
||||
}
|
||||
|
||||
TEST_F(PerfTest, TestGetPerfResultsJSON) {
|
||||
PrintResult("measurement", "modifier", "trace", 42, "units", false);
|
||||
PrintResult("foo", "bar", "baz_v", 7, "widgets", true);
|
||||
PrintResultMeanAndError("foo", "bar", "baz_me", 1, 2, "lemurs", false);
|
||||
const double kListOfScalars[] = {1, 2, 3};
|
||||
PrintResultList("foo", "bar", "baz_vl", kListOfScalars, "units", false);
|
||||
|
||||
EXPECT_EQ(RemoveSpaces(kJsonExpected), GetPerfResultsJSON());
|
||||
}
|
||||
|
||||
TEST_F(PerfTest, TestClearPerfResults) {
|
||||
PrintResult("measurement", "modifier", "trace", 42, "units", false);
|
||||
ClearPerfResults();
|
||||
EXPECT_EQ(R"({"format_version":"1.0","charts":{}})", GetPerfResultsJSON());
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user