Propogate already collected stats for frames in flight in DVQA to comparisons

Bug: b/196229820
Change-Id: Ic1417bfe8de4fb83ce8527be28f99616148677c6
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/229186
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34805}
This commit is contained in:
Artem Titov 2021-08-19 15:41:00 +02:00 committed by WebRTC LUCI CQ
parent 57d13105e5
commit 8d8bc62265
8 changed files with 582 additions and 59 deletions

View File

@ -527,6 +527,7 @@ if (!build_with_chromium) {
"../..:test_support",
"../../../api:create_frame_generator",
"../../../api/units:timestamp",
"../../../rtc_base:stringutils",
"../../../system_wrappers",
]
}

View File

@ -194,8 +194,10 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
frames_comparator_.AddComparison(
InternalStatsKey(stream_index, peer_index, i), it->second.frame(),
absl::nullopt, true, it->second.GetStatsForPeer(i));
InternalStatsKey(stream_index, peer_index, i),
/*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt, FrameComparisonType::kDroppedFrame,
it->second.GetStatsForPeer(i));
}
captured_frames_in_flight_.erase(it);
@ -420,7 +422,8 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
frames_comparator_.AddComparison(
stats_key, dropped_frame, absl::nullopt, true,
stats_key, /*captured=*/absl::nullopt, /*rendered=*/absl::nullopt,
FrameComparisonType::kDroppedFrame,
dropped_frame_it->second.GetStatsForPeer(peer_index));
if (dropped_frame_it->second.HaveAllPeersReceived()) {
@ -439,7 +442,8 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
frames_comparator_.AddComparison(
stats_key, dropped_count, captured_frame, frame, /*dropped=*/false,
stats_key, dropped_count, captured_frame, /*rendered=*/frame,
FrameComparisonType::kRegular,
frame_in_flight->GetStatsForPeer(peer_index));
if (frame_it->second.HaveAllPeersReceived()) {
@ -526,9 +530,14 @@ void DefaultVideoQualityAnalyzer::Stop() {
}
state_ = State::kStopped;
// Add the amount of frames in flight to the analyzer stats before all left
// frames in flight will be sent to the `frames_compartor_`.
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
for (auto& state_entry : stream_states_) {
const size_t stream_index = state_entry.first;
const StreamState& stream_state = state_entry.second;
StreamState& stream_state = state_entry.second;
for (size_t i = 0; i < peers_->size(); ++i) {
if (i == stream_state.owner() && !options_.enable_receive_own_stream) {
continue;
@ -545,6 +554,25 @@ void DefaultVideoQualityAnalyzer::Stop() {
last_rendered_frame_times.emplace(
stats_key, stream_state.last_rendered_frame_time(i).value());
}
// Add frames in flight for this stream into frames comparator.
// Frames in flight were not rendered, so they won't affect stream's
// last rendered frame time.
while (!stream_state.IsEmpty(i)) {
uint16_t frame_id = stream_state.PopFront(i);
auto it = captured_frames_in_flight_.find(frame_id);
RTC_DCHECK(it != captured_frames_in_flight_.end());
FrameInFlight& frame = it->second;
frames_comparator_.AddComparison(
stats_key, /*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt, FrameComparisonType::kFrameInFlight,
frame.GetStatsForPeer(i));
if (frame.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(it);
}
}
}
}
}
@ -563,8 +591,6 @@ void DefaultVideoQualityAnalyzer::Stop() {
frames_comparator_stats.cpu_overloaded_comparisons_done;
analyzer_stats_.memory_overloaded_comparisons_done =
frames_comparator_stats.memory_overloaded_comparisons_done;
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
}
ReportResults();
}

View File

@ -37,6 +37,132 @@ SamplesStatsCounter::StatsSample StatsSample(double value,
return SamplesStatsCounter::StatsSample{value, sampling_time};
}
FrameComparison ValidateFrameComparison(FrameComparison comparison) {
RTC_DCHECK(comparison.frame_stats.captured_time.IsFinite())
<< "Any comparison has to have finite captured_time";
switch (comparison.type) {
case FrameComparisonType::kRegular:
// Regular comparison has to have all FrameStats filled in.
RTC_DCHECK(comparison.captured.has_value() ||
comparison.overload_reason != OverloadReason::kNone)
<< "Regular comparison has to have captured frame if it's not "
<< "overloaded comparison";
RTC_DCHECK(comparison.rendered.has_value() ||
comparison.overload_reason != OverloadReason::kNone)
<< "rendered frame has to be presented if it's not overloaded "
<< "comparison";
RTC_DCHECK(comparison.frame_stats.pre_encode_time.IsFinite())
<< "Regular comparison has to have finite pre_encode_time";
RTC_DCHECK(comparison.frame_stats.encoded_time.IsFinite())
<< "Regular comparison has to have finite encoded_time";
RTC_DCHECK(comparison.frame_stats.received_time.IsFinite())
<< "Regular comparison has to have finite received_time";
RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite())
<< "Regular comparison has to have finite decode_start_time";
RTC_DCHECK(comparison.frame_stats.decode_end_time.IsFinite())
<< "Regular comparison has to have finite decode_end_time";
RTC_DCHECK(comparison.frame_stats.rendered_time.IsFinite())
<< "Regular comparison has to have finite rendered_time";
RTC_DCHECK(comparison.frame_stats.rendered_frame_width.has_value())
<< "Regular comparison has to have rendered_frame_width";
RTC_DCHECK(comparison.frame_stats.rendered_frame_height.has_value())
<< "Regular comparison has to have rendered_frame_height";
RTC_DCHECK(comparison.frame_stats.used_encoder.has_value())
<< "Regular comparison has to have used_encoder";
RTC_DCHECK(comparison.frame_stats.used_decoder.has_value())
<< "Regular comparison has to have used_decoder";
break;
case FrameComparisonType::kDroppedFrame:
// Frame can be dropped before encoder, by encoder, inside network or
// after decoder.
RTC_DCHECK(!comparison.captured.has_value())
<< "Dropped frame comparison can't have captured frame";
RTC_DCHECK(!comparison.rendered.has_value())
<< "Dropped frame comparison can't have rendered frame";
if (comparison.frame_stats.encoded_time.IsFinite()) {
RTC_DCHECK(comparison.frame_stats.used_encoder.has_value())
<< "Dropped frame comparison has to have used_encoder when "
<< "encoded_time is set";
RTC_DCHECK(comparison.frame_stats.pre_encode_time.IsFinite())
<< "Dropped frame comparison has to have finite pre_encode_time "
<< "when encoded_time is finite.";
}
if (comparison.frame_stats.decode_end_time.IsFinite()) {
RTC_DCHECK(comparison.frame_stats.received_time.IsFinite())
<< "Dropped frame comparison has to have received_time when "
<< "decode_end_time is set";
RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite())
<< "Dropped frame comparison has to have decode_start_time when "
<< "decode_end_time is set";
RTC_DCHECK(comparison.frame_stats.used_decoder.has_value())
<< "Dropped frame comparison has to have used_decoder when "
<< "decode_end_time is set";
} else {
RTC_DCHECK(!comparison.frame_stats.received_time.IsFinite())
<< "Dropped frame comparison can't have received_time when "
<< "decode_end_time is not set";
RTC_DCHECK(!comparison.frame_stats.decode_start_time.IsFinite())
<< "Dropped frame comparison can't have decode_start_time when "
<< "decode_end_time is not set";
RTC_DCHECK(!comparison.frame_stats.used_decoder.has_value())
<< "Dropped frame comparison can't have used_decoder when "
<< "decode_end_time is not set";
}
RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite())
<< "Dropped frame comparison can't have rendered_time";
RTC_DCHECK(!comparison.frame_stats.rendered_frame_width.has_value())
<< "Dropped frame comparison can't have rendered_frame_width";
RTC_DCHECK(!comparison.frame_stats.rendered_frame_height.has_value())
<< "Dropped frame comparison can't have rendered_frame_height";
break;
case FrameComparisonType::kFrameInFlight:
// Frame in flight comparison may miss almost any FrameStats, but if
// stats for stage X are set, then stats for stage X - 1 also has to be
// set. Also these frames were never rendered.
RTC_DCHECK(!comparison.captured.has_value())
<< "Frame in flight comparison can't have captured frame";
RTC_DCHECK(!comparison.rendered.has_value())
<< "Frame in flight comparison can't have rendered frame";
RTC_DCHECK(!comparison.frame_stats.rendered_time.IsFinite())
<< "Frame in flight comparison can't have rendered_time";
RTC_DCHECK(!comparison.frame_stats.rendered_frame_width.has_value())
<< "Frame in flight comparison can't have rendered_frame_width";
RTC_DCHECK(!comparison.frame_stats.rendered_frame_height.has_value())
<< "Frame in flight comparison can't have rendered_frame_height";
if (comparison.frame_stats.decode_end_time.IsFinite()) {
RTC_DCHECK(comparison.frame_stats.used_decoder.has_value())
<< "Frame in flight comparison has to have used_decoder when "
<< "decode_end_time is set";
RTC_DCHECK(comparison.frame_stats.decode_start_time.IsFinite())
<< "Frame in flight comparison has to have finite "
<< "decode_start_time when decode_end_time is finite.";
}
if (comparison.frame_stats.decode_start_time.IsFinite()) {
RTC_DCHECK(comparison.frame_stats.received_time.IsFinite())
<< "Frame in flight comparison has to have finite received_time "
<< "when decode_start_time is finite.";
}
if (comparison.frame_stats.received_time.IsFinite()) {
RTC_DCHECK(comparison.frame_stats.encoded_time.IsFinite())
<< "Frame in flight comparison has to have finite encoded_time "
<< "when received_time is finite.";
}
if (comparison.frame_stats.encoded_time.IsFinite()) {
RTC_DCHECK(comparison.frame_stats.used_encoder.has_value())
<< "Frame in flight comparison has to have used_encoder when "
<< "encoded_time is set";
RTC_DCHECK(comparison.frame_stats.pre_encode_time.IsFinite())
<< "Frame in flight comparison has to have finite pre_encode_time "
<< "when encoded_time is finite.";
}
break;
}
return comparison;
}
} // namespace
void DefaultVideoQualityAnalyzerFramesComparator::Start(int max_threads_count) {
@ -144,13 +270,13 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparison(
InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats) {
MutexLock lock(&mutex_);
RTC_CHECK_EQ(state_, State::kActive)
<< "Frames comparator has to be started before it will be used";
AddComparisonInternal(std::move(stats_key), std::move(captured),
std::move(rendered), dropped, std::move(frame_stats));
std::move(rendered), type, std::move(frame_stats));
}
void DefaultVideoQualityAnalyzerFramesComparator::AddComparison(
@ -158,7 +284,7 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparison(
int skipped_between_rendered,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats) {
MutexLock lock(&mutex_);
RTC_CHECK_EQ(state_, State::kActive)
@ -166,14 +292,14 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparison(
stream_stats_.at(stats_key).skipped_between_rendered.AddSample(
StatsSample(skipped_between_rendered, Now()));
AddComparisonInternal(std::move(stats_key), std::move(captured),
std::move(rendered), dropped, std::move(frame_stats));
std::move(rendered), type, std::move(frame_stats));
}
void DefaultVideoQualityAnalyzerFramesComparator::AddComparisonInternal(
InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats) {
cpu_measurer_.StartExcludingCpuThreadTime();
frames_comparator_stats_.comparisons_queue_size.AddSample(
@ -181,17 +307,18 @@ void DefaultVideoQualityAnalyzerFramesComparator::AddComparisonInternal(
// If there too many computations waiting in the queue, we won't provide
// frames itself to make future computations lighter.
if (comparisons_.size() >= kMaxActiveComparisons) {
comparisons_.emplace_back(std::move(stats_key), absl::nullopt,
absl::nullopt, dropped, std::move(frame_stats),
OverloadReason::kCpu);
comparisons_.emplace_back(ValidateFrameComparison(
FrameComparison(std::move(stats_key), /*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt, type,
std::move(frame_stats), OverloadReason::kCpu)));
} else {
OverloadReason overload_reason = OverloadReason::kNone;
if (!captured && !dropped) {
if (!captured && type == FrameComparisonType::kRegular) {
overload_reason = OverloadReason::kMemory;
}
comparisons_.emplace_back(std::move(stats_key), std::move(captured),
std::move(rendered), dropped,
std::move(frame_stats), overload_reason);
comparisons_.emplace_back(ValidateFrameComparison(FrameComparison(
std::move(stats_key), std::move(captured), std::move(rendered), type,
std::move(frame_stats), overload_reason)));
}
comparison_available_event_.Set();
cpu_measurer_.StopExcludingCpuThreadTime();
@ -235,11 +362,14 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparisons() {
void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison(
const FrameComparison& comparison) {
// Comparison is checked to be valid before adding, so we can use this
// assumptions during computations.
// Perform expensive psnr and ssim calculations while not holding lock.
double psnr = -1.0;
double ssim = -1.0;
if (options_.heavy_metrics_computation_enabled && comparison.captured &&
!comparison.dropped) {
if (options_.heavy_metrics_computation_enabled &&
comparison.captured.has_value() && comparison.rendered.has_value()) {
rtc::scoped_refptr<I420BufferInterface> reference_buffer =
comparison.captured->video_frame_buffer()->ToI420();
rtc::scoped_refptr<I420BufferInterface> test_buffer =
@ -260,18 +390,21 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison(
auto stats_it = stream_stats_.find(comparison.stats_key);
RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString();
webrtc_pc_e2e::StreamStats* stats = &stats_it->second;
frames_comparator_stats_.comparisons_done++;
if (comparison.overload_reason == OverloadReason::kCpu) {
frames_comparator_stats_.cpu_overloaded_comparisons_done++;
} else if (comparison.overload_reason == OverloadReason::kMemory) {
frames_comparator_stats_.memory_overloaded_comparisons_done++;
}
if (psnr > 0) {
stats->psnr.AddSample(StatsSample(psnr, frame_stats.rendered_time));
}
if (ssim > 0) {
stats->ssim.AddSample(StatsSample(ssim, frame_stats.received_time));
}
if (frame_stats.encoded_time.IsFinite()) {
stats->encode_time_ms.AddSample(StatsSample(
(frame_stats.encoded_time - frame_stats.pre_encode_time).ms(),
@ -288,25 +421,32 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison(
}
}
// Next stats can be calculated only if frame was received on remote side.
if (!comparison.dropped) {
stats->resolution_of_rendered_frame.AddSample(
StatsSample(*comparison.frame_stats.rendered_frame_width *
*comparison.frame_stats.rendered_frame_height,
frame_stats.rendered_time));
stats->transport_time_ms.AddSample(StatsSample(
(frame_stats.decode_start_time - frame_stats.encoded_time).ms(),
frame_stats.received_time));
stats->total_delay_incl_transport_ms.AddSample(StatsSample(
(frame_stats.rendered_time - frame_stats.captured_time).ms(),
frame_stats.received_time));
stats->decode_time_ms.AddSample(StatsSample(
(frame_stats.decode_end_time - frame_stats.decode_start_time).ms(),
frame_stats.decode_end_time));
stats->receive_to_render_time_ms.AddSample(StatsSample(
(frame_stats.rendered_time - frame_stats.received_time).ms(),
frame_stats.rendered_time));
if (comparison.type != FrameComparisonType::kDroppedFrame) {
if (frame_stats.rendered_time.IsFinite()) {
stats->resolution_of_rendered_frame.AddSample(
StatsSample(*comparison.frame_stats.rendered_frame_width *
*comparison.frame_stats.rendered_frame_height,
frame_stats.rendered_time));
stats->total_delay_incl_transport_ms.AddSample(StatsSample(
(frame_stats.rendered_time - frame_stats.captured_time).ms(),
frame_stats.received_time));
stats->receive_to_render_time_ms.AddSample(StatsSample(
(frame_stats.rendered_time - frame_stats.received_time).ms(),
frame_stats.rendered_time));
}
if (frame_stats.decode_start_time.IsFinite()) {
stats->transport_time_ms.AddSample(StatsSample(
(frame_stats.decode_start_time - frame_stats.encoded_time).ms(),
frame_stats.decode_start_time));
}
if (frame_stats.decode_end_time.IsFinite()) {
stats->decode_time_ms.AddSample(StatsSample(
(frame_stats.decode_end_time - frame_stats.decode_start_time).ms(),
frame_stats.decode_end_time));
}
if (frame_stats.prev_frame_rendered_time.IsFinite()) {
if (frame_stats.prev_frame_rendered_time.IsFinite() &&
frame_stats.rendered_time.IsFinite()) {
TimeDelta time_between_rendered_frames =
frame_stats.rendered_time - frame_stats.prev_frame_rendered_time;
stats->time_between_rendered_frames_ms.AddSample(StatsSample(

View File

@ -92,10 +92,18 @@ class DefaultVideoQualityAnalyzerFramesComparator {
stream_started_time,
Timestamp start_time);
// `captured` - video frame captured by sender to use for PSNR/SSIM
// computation. If `type` is `FrameComparisonType::kRegular` and
// `captured` is `absl::nullopt` comparison is assumed to be overloaded
// due to memory constraints.
// `rendered` - video frame rendered by receiver to use for PSNR/SSIM
// computation. Required only if `type` is
// `FrameComparisonType::kRegular`, but can still be omitted if
// `captured` is `absl::nullopt`.
void AddComparison(InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats);
// `skipped_between_rendered` - amount of frames dropped on this stream before
// last received frame and current frame.
@ -103,7 +111,7 @@ class DefaultVideoQualityAnalyzerFramesComparator {
int skipped_between_rendered,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats);
std::map<InternalStatsKey, webrtc_pc_e2e::StreamStats> stream_stats() const {
@ -121,7 +129,7 @@ class DefaultVideoQualityAnalyzerFramesComparator {
void AddComparisonInternal(InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats)
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void ProcessComparisons();

View File

@ -11,9 +11,11 @@
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
#include <map>
#include <vector>
#include "api/test/create_frame_generator.h"
#include "api/units/timestamp.h"
#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/clock.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
@ -56,18 +58,43 @@ FrameStats FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
frame_stats.decode_end_time = captured_time + TimeDelta::Millis(50);
frame_stats.rendered_time = captured_time + TimeDelta::Millis(60);
frame_stats.used_encoder = Vp8CodecForOneFrame(1, frame_stats.encoded_time);
frame_stats.used_encoder =
frame_stats.used_decoder =
Vp8CodecForOneFrame(1, frame_stats.decode_end_time);
frame_stats.rendered_frame_width = 10;
frame_stats.rendered_frame_height = 10;
return frame_stats;
}
FrameStats ShiftStatsOn(const FrameStats& stats, TimeDelta delta) {
FrameStats frame_stats(stats.captured_time + delta);
frame_stats.pre_encode_time = stats.pre_encode_time + delta;
frame_stats.encoded_time = stats.encoded_time + delta;
frame_stats.received_time = stats.received_time + delta;
frame_stats.decode_start_time = stats.decode_start_time + delta;
frame_stats.decode_end_time = stats.decode_end_time + delta;
frame_stats.rendered_time = stats.rendered_time + delta;
frame_stats.used_encoder = stats.used_encoder;
frame_stats.used_decoder = stats.used_decoder;
frame_stats.rendered_frame_width = stats.rendered_frame_width;
frame_stats.rendered_frame_height = stats.rendered_frame_height;
return frame_stats;
}
double GetFirstOrDie(const SamplesStatsCounter& counter) {
EXPECT_TRUE(!counter.IsEmpty()) << "Counter has to be not empty";
return counter.GetSamples()[0];
}
std::string ToString(const SamplesStatsCounter& counter) {
rtc::StringBuilder out;
for (const StatsSample& s : counter.GetTimedSamples()) {
out << "{ time_ms=" << s.time.ms() << "; value=" << s.value << "}, ";
}
return out.str();
}
TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
StatsPresentedAfterAddingOneComparison) {
DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
@ -89,8 +116,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
stream_start_time, stream_start_time);
comparator.AddComparison(stats_key,
/*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt, /*dropped=*/false,
frame_stats);
/*rendered=*/absl::nullopt,
FrameComparisonType::kRegular, frame_stats);
comparator.Stop({});
std::map<InternalStatsKey, webrtc_pc_e2e::StreamStats> stats =
@ -130,12 +157,12 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
stream_start_time, stream_start_time);
comparator.AddComparison(stats_key,
/*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt, /*dropped=*/false,
frame_stats1);
/*rendered=*/absl::nullopt,
FrameComparisonType::kRegular, frame_stats1);
comparator.AddComparison(stats_key,
/*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt, /*dropped=*/false,
frame_stats2);
/*rendered=*/absl::nullopt,
FrameComparisonType::kRegular, frame_stats2);
comparator.Stop({});
std::map<InternalStatsKey, webrtc_pc_e2e::StreamStats> stats =
@ -143,7 +170,117 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
EXPECT_DOUBLE_EQ(
GetFirstOrDie(stats.at(stats_key).time_between_rendered_frames_ms), 15.0);
EXPECT_DOUBLE_EQ(stats.at(stats_key).encode_frame_rate.GetEventsPerSecond(),
2.0 / 15 * 1000);
2.0 / 15 * 1000)
<< "There should be 2 events with interval of 15 ms";
;
}
TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
FrameInFlightStatsAreHandledCorrectly) {
DefaultVideoQualityAnalyzerCpuMeasurer cpu_measurer;
DefaultVideoQualityAnalyzerFramesComparator comparator(
Clock::GetRealTimeClock(), cpu_measurer, AnalyzerOptionsForTest());
Timestamp stream_start_time = Clock::GetRealTimeClock()->CurrentTime();
size_t stream = 0;
size_t sender = 0;
size_t receiver = 1;
size_t peers_count = 2;
InternalStatsKey stats_key(stream, sender, receiver);
// There are 7 different timings inside frame stats: captured, pre_encode,
// encoded, received, decode_start, decode_end, rendered. captured is always
// set and received is set together with decode_start. So we create 6
// different frame stats with interval of 15 ms, where for each stat next
// timings will be set
// * 1st - captured
// * 2nd - captured, pre_encode
// * 3rd - captured, pre_encode, encoded
// * 4th - captured, pre_encode, encoded, received, decode_start
// * 5th - captured, pre_encode, encoded, received, decode_start, decode_end
// * 6th - all of them set
std::vector<FrameStats> stats;
// 1st stat
FrameStats frame_stats(stream_start_time);
stats.push_back(frame_stats);
// 2nd stat
frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
frame_stats.pre_encode_time =
frame_stats.captured_time + TimeDelta::Millis(10);
stats.push_back(frame_stats);
// 3rd stat
frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
frame_stats.encoded_time = frame_stats.captured_time + TimeDelta::Millis(20);
frame_stats.used_encoder = Vp8CodecForOneFrame(1, frame_stats.encoded_time);
stats.push_back(frame_stats);
// 4th stat
frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
frame_stats.received_time = frame_stats.captured_time + TimeDelta::Millis(30);
frame_stats.decode_start_time =
frame_stats.captured_time + TimeDelta::Millis(40);
stats.push_back(frame_stats);
// 5th stat
frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
frame_stats.decode_end_time =
frame_stats.captured_time + TimeDelta::Millis(50);
frame_stats.used_decoder =
Vp8CodecForOneFrame(1, frame_stats.decode_end_time);
stats.push_back(frame_stats);
// 6th stat
frame_stats = ShiftStatsOn(frame_stats, TimeDelta::Millis(15));
frame_stats.rendered_time = frame_stats.captured_time + TimeDelta::Millis(60);
frame_stats.rendered_frame_width = 10;
frame_stats.rendered_frame_height = 10;
stats.push_back(frame_stats);
comparator.Start(1);
comparator.EnsureStatsForStream(stream, sender, peers_count,
stream_start_time, stream_start_time);
for (size_t i = 0; i < stats.size() - 1; ++i) {
comparator.AddComparison(stats_key,
/*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt,
FrameComparisonType::kFrameInFlight, stats[i]);
}
comparator.AddComparison(stats_key,
/*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt,
FrameComparisonType::kRegular,
stats[stats.size() - 1]);
comparator.Stop({});
EXPECT_EQ(comparator.stream_stats().size(), 1lu);
webrtc_pc_e2e::StreamStats result_stats =
comparator.stream_stats().at(stats_key);
EXPECT_DOUBLE_EQ(result_stats.transport_time_ms.GetAverage(), 20.0)
<< ToString(result_stats.transport_time_ms);
EXPECT_EQ(result_stats.transport_time_ms.NumSamples(), 3);
EXPECT_DOUBLE_EQ(result_stats.total_delay_incl_transport_ms.GetAverage(),
60.0)
<< ToString(result_stats.total_delay_incl_transport_ms);
EXPECT_EQ(result_stats.total_delay_incl_transport_ms.NumSamples(), 1);
EXPECT_DOUBLE_EQ(result_stats.encode_time_ms.GetAverage(), 10)
<< ToString(result_stats.encode_time_ms);
EXPECT_EQ(result_stats.encode_time_ms.NumSamples(), 4);
EXPECT_DOUBLE_EQ(result_stats.decode_time_ms.GetAverage(), 10)
<< ToString(result_stats.decode_time_ms);
EXPECT_EQ(result_stats.decode_time_ms.NumSamples(), 2);
EXPECT_DOUBLE_EQ(result_stats.receive_to_render_time_ms.GetAverage(), 30)
<< ToString(result_stats.receive_to_render_time_ms);
EXPECT_EQ(result_stats.receive_to_render_time_ms.NumSamples(), 1);
EXPECT_DOUBLE_EQ(result_stats.resolution_of_rendered_frame.GetAverage(), 100)
<< ToString(result_stats.resolution_of_rendered_frame);
EXPECT_EQ(result_stats.resolution_of_rendered_frame.NumSamples(), 1);
EXPECT_DOUBLE_EQ(result_stats.encode_frame_rate.GetEventsPerSecond(),
4.0 / 45 * 1000)
<< "There should be 4 events with interval of 15 ms";
}
} // namespace

View File

@ -39,13 +39,13 @@ bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
FrameComparison::FrameComparison(InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats,
OverloadReason overload_reason)
: stats_key(std::move(stats_key)),
captured(std::move(captured)),
rendered(std::move(rendered)),
dropped(dropped),
type(type),
frame_stats(std::move(frame_stats)),
overload_reason(overload_reason) {}

View File

@ -75,6 +75,18 @@ enum class OverloadReason {
kMemory
};
enum class FrameComparisonType {
// Comparison for captured and rendered frame.
kRegular,
// Comparison for captured frame that is known to be dropped somewhere in
// video pipeline.
kDroppedFrame,
// Comparison for captured frame that was still in the video pipeline when
// test was stopped. It's unknown is this frame dropped or would it be
// delivered if test continue.
kFrameInFlight
};
// Represents comparison between two VideoFrames. Contains video frames itself
// and stats. Can be one of two types:
// 1. Normal - in this case `captured` is presented and either `rendered` is
@ -87,7 +99,7 @@ struct FrameComparison {
FrameComparison(InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameComparisonType type,
FrameStats frame_stats,
OverloadReason overload_reason);
@ -96,10 +108,7 @@ struct FrameComparison {
// queue.
absl::optional<VideoFrame> captured;
absl::optional<VideoFrame> rendered;
// If true frame was dropped somewhere from capturing to rendering and
// wasn't rendered on remote peer side. If `dropped` is true, `rendered`
// will be `absl::nullopt`.
bool dropped;
FrameComparisonType type;
FrameStats frame_stats;
OverloadReason overload_reason;
};

View File

@ -521,7 +521,10 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) {
}
}
TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
// Test the case which can happen when SFU is switching from one layer to
// another, so the same frame can be received twice by the same peer.
TEST(DefaultVideoQualityAnalyzerTest,
OneFrameReceivedTwiceBySamePeerWith2Receivers) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
@ -566,7 +569,9 @@ TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, 1);
// We have 2 comparisons here because 1 for the frame received by Bob and
// 1 for the frame in flight from Alice to Charlie.
EXPECT_EQ(stats.comparisons_done, 2);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, 1);
@ -1224,6 +1229,203 @@ TEST(DefaultVideoQualityAnalyzerTest, CodecTrackedCorrectly) {
EXPECT_EQ(stream_stats.decoders[1].last_frame_id, frames[5].id());
}
TEST(DefaultVideoQualityAnalyzerTest,
FramesInFlightAreCorrectlySentToTheComparatorAfterStop) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), options);
analyzer.Start("test_case",
std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
kAnalyzerMaxThreadsCount);
// There are 7 different timings inside frame stats: captured, pre_encode,
// encoded, received, decode_start, decode_end, rendered. captured is always
// set and received is set together with decode_start. So we create 6
// different frames, where for each frame next timings will be set
// * 1st - all of them set
// * 2nd - captured, pre_encode, encoded, received, decode_start, decode_end
// * 3rd - captured, pre_encode, encoded, received, decode_start
// * 4th - captured, pre_encode, encoded
// * 5th - captured, pre_encode
// * 6th - captured
std::vector<VideoFrame> frames;
// Sender side actions
for (int i = 0; i < 6; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), 1);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames.push_back(frame);
}
for (int i = 0; i < 5; ++i) {
analyzer.OnFramePreEncode(kSenderPeerName, frames[i]);
}
for (int i = 0; i < 4; ++i) {
analyzer.OnFrameEncoded(kSenderPeerName, frames[i].id(),
FakeEncode(frames[i]),
VideoQualityAnalyzerInterface::EncoderStats());
}
// Receiver side actions
for (int i = 0; i < 3; ++i) {
analyzer.OnFramePreDecode(kReceiverPeerName, frames[i].id(),
FakeEncode(frames[i]));
}
for (int i = 0; i < 2; ++i) {
analyzer.OnFrameDecoded(kReceiverPeerName, DeepCopy(frames[i]),
VideoQualityAnalyzerInterface::DecoderStats());
}
for (int i = 0; i < 1; ++i) {
analyzer.OnFrameRendered(kReceiverPeerName, DeepCopy(frames[i]));
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(analyzer_stats.comparisons_done, 6);
// The last frames in flight size has to reflect the amount of frame in flight
// before all of them were sent to the comparison when Stop() was invoked.
std::vector<StatsSample> frames_in_flight_sizes =
GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
EXPECT_EQ(frames_in_flight_sizes.back().value, 5)
<< ToString(frames_in_flight_sizes);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, 6);
EXPECT_EQ(frame_counters.pre_encoded, 5);
EXPECT_EQ(frame_counters.encoded, 4);
EXPECT_EQ(frame_counters.received, 3);
EXPECT_EQ(frame_counters.decoded, 2);
EXPECT_EQ(frame_counters.rendered, 1);
EXPECT_EQ(analyzer.GetStats().size(), 1lu);
{
FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
StatsKey(kStreamLabel, kSenderPeerName, kReceiverPeerName));
EXPECT_EQ(stream_conters.captured, 6);
EXPECT_EQ(stream_conters.pre_encoded, 5);
EXPECT_EQ(stream_conters.encoded, 4);
EXPECT_EQ(stream_conters.received, 3);
EXPECT_EQ(stream_conters.decoded, 2);
EXPECT_EQ(stream_conters.rendered, 1);
}
}
TEST(
DefaultVideoQualityAnalyzerTest,
FramesInFlightAreCorrectlySentToTheComparatorAfterStopForSenderAndReceiver) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
options.enable_receive_own_stream = true;
DefaultVideoQualityAnalyzer analyzer(Clock::GetRealTimeClock(), options);
analyzer.Start("test_case",
std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
kAnalyzerMaxThreadsCount);
// There are 7 different timings inside frame stats: captured, pre_encode,
// encoded, received, decode_start, decode_end, rendered. captured is always
// set and received is set together with decode_start. So we create 6
// different frames, where for each frame next timings will be set
// * 1st - all of them set
// * 2nd - captured, pre_encode, encoded, received, decode_start, decode_end
// * 3rd - captured, pre_encode, encoded, received, decode_start
// * 4th - captured, pre_encode, encoded
// * 5th - captured, pre_encode
// * 6th - captured
std::vector<VideoFrame> frames;
// Sender side actions
for (int i = 0; i < 6; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), 1);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames.push_back(frame);
}
for (int i = 0; i < 5; ++i) {
analyzer.OnFramePreEncode(kSenderPeerName, frames[i]);
}
for (int i = 0; i < 4; ++i) {
analyzer.OnFrameEncoded(kSenderPeerName, frames[i].id(),
FakeEncode(frames[i]),
VideoQualityAnalyzerInterface::EncoderStats());
}
// Receiver side actions
for (int i = 0; i < 3; ++i) {
analyzer.OnFramePreDecode(kSenderPeerName, frames[i].id(),
FakeEncode(frames[i]));
analyzer.OnFramePreDecode(kReceiverPeerName, frames[i].id(),
FakeEncode(frames[i]));
}
for (int i = 0; i < 2; ++i) {
analyzer.OnFrameDecoded(kSenderPeerName, DeepCopy(frames[i]),
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameDecoded(kReceiverPeerName, DeepCopy(frames[i]),
VideoQualityAnalyzerInterface::DecoderStats());
}
for (int i = 0; i < 1; ++i) {
analyzer.OnFrameRendered(kSenderPeerName, DeepCopy(frames[i]));
analyzer.OnFrameRendered(kReceiverPeerName, DeepCopy(frames[i]));
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(analyzer_stats.comparisons_done, 12);
// The last frames in flight size has to reflect the amount of frame in flight
// before all of them were sent to the comparison when Stop() was invoked.
std::vector<StatsSample> frames_in_flight_sizes =
GetSortedSamples(analyzer_stats.frames_in_flight_left_count);
EXPECT_EQ(frames_in_flight_sizes.back().value, 5)
<< ToString(frames_in_flight_sizes);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, 6);
EXPECT_EQ(frame_counters.pre_encoded, 5);
EXPECT_EQ(frame_counters.encoded, 4);
EXPECT_EQ(frame_counters.received, 6);
EXPECT_EQ(frame_counters.decoded, 4);
EXPECT_EQ(frame_counters.rendered, 2);
EXPECT_EQ(analyzer.GetStats().size(), 2lu);
{
FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
StatsKey(kStreamLabel, kSenderPeerName, kReceiverPeerName));
EXPECT_EQ(stream_conters.captured, 6);
EXPECT_EQ(stream_conters.pre_encoded, 5);
EXPECT_EQ(stream_conters.encoded, 4);
EXPECT_EQ(stream_conters.received, 3);
EXPECT_EQ(stream_conters.decoded, 2);
EXPECT_EQ(stream_conters.rendered, 1);
}
{
FrameCounters stream_conters = analyzer.GetPerStreamCounters().at(
StatsKey(kStreamLabel, kSenderPeerName, kSenderPeerName));
EXPECT_EQ(stream_conters.captured, 6);
EXPECT_EQ(stream_conters.pre_encoded, 5);
EXPECT_EQ(stream_conters.encoded, 4);
EXPECT_EQ(stream_conters.received, 3);
EXPECT_EQ(stream_conters.decoded, 2);
EXPECT_EQ(stream_conters.rendered, 1);
}
}
} // namespace
} // namespace webrtc_pc_e2e
} // namespace webrtc