Introduce MetricsExporter for Chrome Perf Dashboard
Bug: b/246095034 Change-Id: I12ac5898909fcdcefc8238464bc74c5166c0177e Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/274900 Commit-Queue: Artem Titov <titovartem@webrtc.org> Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> Cr-Commit-Position: refs/heads/main@{#38127}
This commit is contained in:
parent
36a6599a95
commit
034f17aa4e
@ -30,7 +30,10 @@ if (rtc_include_tests) {
|
||||
]
|
||||
|
||||
if (rtc_enable_protobuf) {
|
||||
deps += [ ":metrics_set_proto_file_exporter_test" ]
|
||||
deps += [
|
||||
":chrome_perf_dashboard_metrics_exporter_test",
|
||||
":metrics_set_proto_file_exporter_test",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -91,6 +94,26 @@ rtc_library("metrics_logger_and_exporter") {
|
||||
]
|
||||
}
|
||||
|
||||
rtc_library("chrome_perf_dashboard_metrics_exporter") {
|
||||
visibility = [ "*" ]
|
||||
testonly = true
|
||||
sources = [
|
||||
"chrome_perf_dashboard_metrics_exporter.cc",
|
||||
"chrome_perf_dashboard_metrics_exporter.h",
|
||||
]
|
||||
deps = [
|
||||
":metric",
|
||||
":metrics_exporter",
|
||||
"../../../api:array_view",
|
||||
"../../../test:fileutils",
|
||||
"../../../test:perf_test",
|
||||
]
|
||||
absl_deps = [
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
"//third_party/abseil-cpp/absl/strings",
|
||||
]
|
||||
}
|
||||
|
||||
if (rtc_enable_protobuf) {
|
||||
proto_library("metric_proto") {
|
||||
visibility = [ "*" ]
|
||||
@ -160,5 +183,18 @@ if (rtc_include_tests) {
|
||||
"../../units:timestamp",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_library("chrome_perf_dashboard_metrics_exporter_test") {
|
||||
testonly = true
|
||||
sources = [ "chrome_perf_dashboard_metrics_exporter_test.cc" ]
|
||||
deps = [
|
||||
":chrome_perf_dashboard_metrics_exporter",
|
||||
":metric",
|
||||
"../../../api/units:timestamp",
|
||||
"../../../test:fileutils",
|
||||
"../../../test:test_support",
|
||||
"//third_party/catapult/tracing/tracing:histogram",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
138
api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc
Normal file
138
api/test/metrics/chrome_perf_dashboard_metrics_exporter.cc
Normal file
@ -0,0 +1,138 @@
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/strings/string_view.h"
|
||||
#include "api/array_view.h"
|
||||
#include "api/test/metrics/metric.h"
|
||||
#include "test/testsupport/file_utils.h"
|
||||
#include "test/testsupport/perf_test_histogram_writer.h"
|
||||
#include "test/testsupport/perf_test_result_writer.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
namespace {
|
||||
|
||||
std::string ToChromePerfDashboardUnit(Unit unit) {
|
||||
switch (unit) {
|
||||
case Unit::kTimeMs:
|
||||
return "msBestFitFormat";
|
||||
case Unit::kPercent:
|
||||
return "n%";
|
||||
case Unit::kSizeInBytes:
|
||||
return "sizeInBytes";
|
||||
case Unit::kKilobitsPerSecond:
|
||||
// Chrome Perf Dashboard doesn't have kpbs units, so we change the unit
|
||||
// and value accordingly.
|
||||
return "bytesPerSecond";
|
||||
case Unit::kHertz:
|
||||
return "Hz";
|
||||
case Unit::kUnitless:
|
||||
return "unitless";
|
||||
case Unit::kCount:
|
||||
return "count";
|
||||
}
|
||||
}
|
||||
|
||||
double ToChromePerfDashboardValue(double value, Unit unit) {
|
||||
switch (unit) {
|
||||
case Unit::kKilobitsPerSecond:
|
||||
// Chrome Perf Dashboard doesn't have kpbs units, so we change the unit
|
||||
// and value accordingly.
|
||||
return value * 1000 / 8;
|
||||
default:
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
ImproveDirection ToChromePerfDashboardImproveDirection(
|
||||
ImprovementDirection direction) {
|
||||
switch (direction) {
|
||||
case ImprovementDirection::kBiggerIsBetter:
|
||||
return ImproveDirection::kBiggerIsBetter;
|
||||
case ImprovementDirection::kNeitherIsBetter:
|
||||
return ImproveDirection::kNone;
|
||||
case ImprovementDirection::kSmallerIsBetter:
|
||||
return ImproveDirection::kSmallerIsBetter;
|
||||
}
|
||||
}
|
||||
|
||||
bool WriteMetricsToFile(const std::string& path, const std::string& data) {
|
||||
CreateDir(DirName(path));
|
||||
FILE* output = fopen(path.c_str(), "wb");
|
||||
if (output == NULL) {
|
||||
printf("Failed to write to %s.\n", path.c_str());
|
||||
return false;
|
||||
}
|
||||
size_t written = fwrite(data.c_str(), sizeof(char), data.size(), output);
|
||||
fclose(output);
|
||||
|
||||
if (written != data.size()) {
|
||||
size_t expected = data.size();
|
||||
printf("Wrote %zu, tried to write %zu\n", written, expected);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool IsEmpty(const Metric::Stats& stats) {
|
||||
return !stats.mean.has_value() && !stats.stddev.has_value() &&
|
||||
!stats.min.has_value() && !stats.max.has_value();
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
ChromePerfDashboardMetricsExporter::ChromePerfDashboardMetricsExporter(
|
||||
absl::string_view export_file_path)
|
||||
: export_file_path_(export_file_path) {}
|
||||
|
||||
bool ChromePerfDashboardMetricsExporter::Export(
|
||||
rtc::ArrayView<const Metric> metrics) {
|
||||
std::unique_ptr<PerfTestResultWriter> writer =
|
||||
absl::WrapUnique<PerfTestResultWriter>(CreateHistogramWriter());
|
||||
for (const Metric& metric : metrics) {
|
||||
std::vector<double> samples(metric.time_series.samples.size());
|
||||
for (size_t i = 0; i < metric.time_series.samples.size(); ++i) {
|
||||
samples[i] = ToChromePerfDashboardValue(
|
||||
metric.time_series.samples[i].value, metric.unit);
|
||||
}
|
||||
|
||||
if (samples.empty() && IsEmpty(metric.stats)) {
|
||||
// If there were no data collected for the metric it is expected that 0
|
||||
// will be exported, so add 0 to the samples.
|
||||
samples.push_back(ToChromePerfDashboardValue(0, metric.unit));
|
||||
}
|
||||
|
||||
if (!samples.empty()) {
|
||||
writer->LogResultList(
|
||||
metric.name, metric.test_case, samples,
|
||||
ToChromePerfDashboardUnit(metric.unit),
|
||||
/*important=*/false,
|
||||
ToChromePerfDashboardImproveDirection(metric.improvement_direction));
|
||||
} else {
|
||||
writer->LogResultMeanAndError(
|
||||
metric.name, metric.test_case, *metric.stats.mean,
|
||||
*metric.stats.stddev, ToChromePerfDashboardUnit(metric.unit),
|
||||
/*important=*/false,
|
||||
ToChromePerfDashboardImproveDirection(metric.improvement_direction));
|
||||
}
|
||||
}
|
||||
return WriteMetricsToFile(export_file_path_, writer->Serialize());
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
41
api/test/metrics/chrome_perf_dashboard_metrics_exporter.h
Normal file
41
api/test/metrics/chrome_perf_dashboard_metrics_exporter.h
Normal file
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_
|
||||
#define API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "absl/strings/string_view.h"
|
||||
#include "api/array_view.h"
|
||||
#include "api/test/metrics/metric.h"
|
||||
#include "api/test/metrics/metrics_exporter.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
// Exports all collected metrics in the Chrome Perf Dashboard proto format.
|
||||
class ChromePerfDashboardMetricsExporter : public MetricsExporter {
|
||||
public:
|
||||
// `export_file_path` - path where the proto file will be written.
|
||||
explicit ChromePerfDashboardMetricsExporter(
|
||||
absl::string_view export_file_path);
|
||||
~ChromePerfDashboardMetricsExporter() override = default;
|
||||
|
||||
bool Export(rtc::ArrayView<const Metric> metrics) override;
|
||||
|
||||
private:
|
||||
const std::string export_file_path_;
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_
|
||||
217
api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc
Normal file
217
api/test/metrics/chrome_perf_dashboard_metrics_exporter_test.cc
Normal file
@ -0,0 +1,217 @@
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h"
|
||||
|
||||
#include <fstream>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "api/test/metrics/metric.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/testsupport/file_utils.h"
|
||||
#include "third_party/catapult/tracing/tracing/value/histogram.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
namespace {
|
||||
|
||||
using ::testing::DoubleNear;
|
||||
using ::testing::Eq;
|
||||
using ::testing::Test;
|
||||
|
||||
namespace proto = ::catapult::tracing::tracing::proto;
|
||||
|
||||
std::map<std::string, std::string> DefaultMetadata() {
|
||||
return std::map<std::string, std::string>{{"key", "value"}};
|
||||
}
|
||||
|
||||
Metric::TimeSeries::Sample Sample(double value) {
|
||||
return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1),
|
||||
.value = value,
|
||||
.sample_metadata = DefaultMetadata()};
|
||||
}
|
||||
|
||||
std::string ReadFileAsString(const std::string& filename) {
|
||||
std::ifstream infile(filename, std::ios_base::binary);
|
||||
auto buffer = std::vector<char>(std::istreambuf_iterator<char>(infile),
|
||||
std::istreambuf_iterator<char>());
|
||||
return std::string(buffer.begin(), buffer.end());
|
||||
}
|
||||
|
||||
class ChromePerfDashboardMetricsExporterTest : public Test {
|
||||
protected:
|
||||
~ChromePerfDashboardMetricsExporterTest() override = default;
|
||||
|
||||
void SetUp() override {
|
||||
temp_filename_ = webrtc::test::TempFilename(
|
||||
webrtc::test::OutputPath(),
|
||||
"chrome_perf_dashboard_metrics_exporter_test");
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
ASSERT_TRUE(webrtc::test::RemoveFile(temp_filename_));
|
||||
}
|
||||
|
||||
std::string temp_filename_;
|
||||
};
|
||||
|
||||
TEST_F(ChromePerfDashboardMetricsExporterTest, ExportMetricFormatCorrect) {
|
||||
Metric metric1{
|
||||
.name = "test_metric1",
|
||||
.unit = Unit::kTimeMs,
|
||||
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
|
||||
.test_case = "test_case_name1",
|
||||
.metric_metadata = DefaultMetadata(),
|
||||
.time_series =
|
||||
Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}},
|
||||
.stats =
|
||||
Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
|
||||
Metric metric2{
|
||||
.name = "test_metric2",
|
||||
.unit = Unit::kKilobitsPerSecond,
|
||||
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
|
||||
.test_case = "test_case_name2",
|
||||
.metric_metadata = DefaultMetadata(),
|
||||
.time_series =
|
||||
Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}},
|
||||
.stats = Metric::Stats{
|
||||
.mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}};
|
||||
|
||||
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
|
||||
|
||||
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2}));
|
||||
proto::HistogramSet actual_histogram_set;
|
||||
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
|
||||
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(2));
|
||||
|
||||
// Validate output for `metric1`
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).name(), Eq("test_metric1"));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).unit().unit(),
|
||||
Eq(proto::Unit::MS_BEST_FIT_FORMAT));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).unit().improvement_direction(),
|
||||
Eq(proto::ImprovementDirection::BIGGER_IS_BETTER));
|
||||
EXPECT_THAT(
|
||||
actual_histogram_set.histograms(0).diagnostics().diagnostic_map().size(),
|
||||
Eq(1lu));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0)
|
||||
.diagnostics()
|
||||
.diagnostic_map()
|
||||
.at("stories")
|
||||
.generic_set()
|
||||
.values(0),
|
||||
Eq("\"test_case_name1\""));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(2));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(10.0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(1), Eq(20.0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(2));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(20));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(),
|
||||
DoubleNear(2.64916, 0.1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(15));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(10));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(30));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(50));
|
||||
|
||||
// Validate output for `metric2`
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).name(), Eq("test_metric2"));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).unit().unit(),
|
||||
Eq(proto::Unit::BYTES_PER_SECOND));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).unit().improvement_direction(),
|
||||
Eq(proto::ImprovementDirection::SMALLER_IS_BETTER));
|
||||
EXPECT_THAT(
|
||||
actual_histogram_set.histograms(1).diagnostics().diagnostic_map().size(),
|
||||
Eq(1lu));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1)
|
||||
.diagnostics()
|
||||
.diagnostic_map()
|
||||
.at("stories")
|
||||
.generic_set()
|
||||
.values(0),
|
||||
Eq("\"test_case_name2\""));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).sample_values().size(), Eq(2));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).sample_values(0), Eq(2500.0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).sample_values(1), Eq(5000.0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().count(), Eq(2));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().max(), Eq(5000));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().meanlogs(),
|
||||
DoubleNear(8.17062, 0.1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().mean(), Eq(3750));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().min(), Eq(2500));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().sum(), Eq(7500));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(1).running().variance(),
|
||||
Eq(3125000));
|
||||
}
|
||||
|
||||
TEST_F(ChromePerfDashboardMetricsExporterTest,
|
||||
ExportEmptyMetricExportsZeroValue) {
|
||||
Metric metric{.name = "test_metric",
|
||||
.unit = Unit::kTimeMs,
|
||||
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
|
||||
.test_case = "test_case_name",
|
||||
.metric_metadata = DefaultMetadata(),
|
||||
.time_series = Metric::TimeSeries{.samples = {}},
|
||||
.stats = Metric::Stats{}};
|
||||
|
||||
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
|
||||
|
||||
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric}));
|
||||
proto::HistogramSet actual_histogram_set;
|
||||
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
|
||||
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1));
|
||||
|
||||
// Validate values for `metric`
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(0.0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(),
|
||||
DoubleNear(0, 1e-6));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(), Eq(0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0));
|
||||
}
|
||||
|
||||
TEST_F(ChromePerfDashboardMetricsExporterTest,
|
||||
ExportMetricWithOnlyStatsExportsMeanValues) {
|
||||
Metric metric{.name = "test_metric",
|
||||
.unit = Unit::kTimeMs,
|
||||
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
|
||||
.test_case = "test_case_name",
|
||||
.metric_metadata = DefaultMetadata(),
|
||||
.time_series = Metric::TimeSeries{.samples = {}},
|
||||
.stats = Metric::Stats{
|
||||
.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
|
||||
|
||||
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
|
||||
|
||||
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric}));
|
||||
proto::HistogramSet actual_histogram_set;
|
||||
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
|
||||
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1));
|
||||
|
||||
// Validate values for `metric`
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(15.0));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(15));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(),
|
||||
DoubleNear(2.70805, 0.1));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(15));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(15));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(15));
|
||||
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
Loading…
x
Reference in New Issue
Block a user