Add support of multiple peers into DefaultVideoQualityAnalyzer

Bug: webrtc:11631
Change-Id: I8c43efcfdccc441c85e199984ae1ce565c1d12fe
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/176411
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Andrey Logvin <landrey@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31519}
This commit is contained in:
Artem Titov 2020-06-12 17:06:45 +02:00 committed by Commit Bot
parent 72c0622a06
commit 7a2f0fa99f
7 changed files with 1067 additions and 220 deletions

View File

@ -512,6 +512,7 @@ if (!build_with_chromium) {
"../../../api/video:video_frame",
"../../../api/video:video_frame_i420",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../rtc_base:stringutils",
"../../../system_wrappers",
]
}
@ -605,6 +606,7 @@ if (!build_with_chromium) {
]
deps = [
":multi_head_queue",
"../..:perf_test",
"../../../api:array_view",
"../../../api:video_quality_analyzer_api",

View File

@ -20,6 +20,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "rtc_base/cpu_time.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
@ -36,6 +37,7 @@ void LogFrameCounters(const std::string& name, const FrameCounters& counters) {
RTC_LOG(INFO) << "[" << name << "] Pre encoded : " << counters.pre_encoded;
RTC_LOG(INFO) << "[" << name << "] Encoded : " << counters.encoded;
RTC_LOG(INFO) << "[" << name << "] Received : " << counters.received;
RTC_LOG(INFO) << "[" << name << "] Decoded : " << counters.decoded;
RTC_LOG(INFO) << "[" << name << "] Rendered : " << counters.rendered;
RTC_LOG(INFO) << "[" << name << "] Dropped : " << counters.dropped;
}
@ -47,6 +49,15 @@ void LogStreamInternalStats(const std::string& name, const StreamStats& stats) {
<< stats.dropped_before_encoder;
}
template <typename T>
absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
auto it = map.find(key);
if (it == map.end()) {
return absl::nullopt;
}
return it->second;
}
} // namespace
void RateCounter::AddEvent(Timestamp event_time) {
@ -66,9 +77,52 @@ double RateCounter::GetEventsPerSecond() const {
(event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
}
std::string StatsKey::ToString() const {
rtc::StringBuilder out;
out << stream_label << "_" << sender << "_" << receiver;
return out.str();
}
bool operator<(const StatsKey& a, const StatsKey& b) {
if (a.stream_label != b.stream_label) {
return a.stream_label < b.stream_label;
}
if (a.sender != b.sender) {
return a.sender < b.sender;
}
return a.receiver < b.receiver;
}
bool operator==(const StatsKey& a, const StatsKey& b) {
return a.stream_label == b.stream_label && a.sender == b.sender &&
a.receiver == b.receiver;
}
std::string InternalStatsKey::ToString() const {
rtc::StringBuilder out;
out << "stream=" << stream << "_sender=" << sender
<< "_receiver=" << receiver;
return out.str();
}
bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
if (a.stream != b.stream) {
return a.stream < b.stream;
}
if (a.sender != b.sender) {
return a.sender < b.sender;
}
return a.receiver < b.receiver;
}
bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
return a.stream == b.stream && a.sender == b.sender &&
a.receiver == b.receiver;
}
DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled,
int max_frames_in_flight_per_stream_count)
size_t max_frames_in_flight_per_stream_count)
: heavy_metrics_computation_enabled_(heavy_metrics_computation_enabled),
max_frames_in_flight_per_stream_count_(
max_frames_in_flight_per_stream_count),
@ -82,6 +136,7 @@ void DefaultVideoQualityAnalyzer::Start(
rtc::ArrayView<const std::string> peer_names,
int max_threads_count) {
test_label_ = std::move(test_case_name);
peers_ = std::make_unique<NamesCollection>(peer_names);
for (int i = 0; i < max_threads_count; i++) {
auto thread = std::make_unique<rtc::PlatformThread>(
&DefaultVideoQualityAnalyzer::ProcessComparisonsThread, this,
@ -107,68 +162,109 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
// |next_frame_id| is atomic, so we needn't lock here.
uint16_t frame_id = next_frame_id_++;
Timestamp start_time = Timestamp::MinusInfinity();
size_t peer_index = peers_->index(peer_name);
size_t stream_index;
{
rtc::CritScope crit(&lock_);
// Create a local copy of start_time_ to access it under |comparison_lock_|
// without holding a |lock_|
// Create a local copy of start_time_ to access it under
// |comparison_lock_| without holding a |lock_|
start_time = start_time_;
stream_index = streams_.AddIfAbsent(stream_label);
}
{
// Ensure stats for this stream exists.
rtc::CritScope crit(&comparison_lock_);
if (stream_stats_.find(stream_label) == stream_stats_.end()) {
stream_stats_.insert({stream_label, StreamStats()});
// Assume that the first freeze was before first stream frame captured.
// This way time before the first freeze would be counted as time between
// freezes.
stream_last_freeze_end_time_.insert({stream_label, start_time});
for (size_t i = 0; i < peers_->size(); ++i) {
if (i == peer_index) {
continue;
}
InternalStatsKey stats_key(stream_index, peer_index, i);
if (stream_stats_.find(stats_key) == stream_stats_.end()) {
stream_stats_.insert({stats_key, StreamStats()});
// Assume that the first freeze was before first stream frame captured.
// This way time before the first freeze would be counted as time
// between freezes.
stream_last_freeze_end_time_.insert({stats_key, start_time});
} else {
// When we see some |stream_label| for the first time we need to create
// stream stats object for it and set up some states, but we need to do
// it only once and for all receivers, so on the next frame on the same
// |stream_label| we can be sure, that it's already done and we needn't
// to scan though all peers again.
break;
}
}
}
{
rtc::CritScope crit(&lock_);
stream_to_sender_[stream_index] = peer_index;
frame_counters_.captured++;
stream_frame_counters_[stream_label].captured++;
for (size_t i = 0; i < peers_->size(); ++i) {
if (i != peer_index) {
InternalStatsKey key(stream_index, peer_index, i);
stream_frame_counters_[key].captured++;
}
}
StreamState* state = &stream_states_[stream_label];
auto state_it = stream_states_.find(stream_index);
if (state_it == stream_states_.end()) {
stream_states_.emplace(stream_index,
StreamState(peer_index, peers_->size()));
}
StreamState* state = &stream_states_.at(stream_index);
state->PushBack(frame_id);
// Update frames in flight info.
auto it = captured_frames_in_flight_.find(frame_id);
if (it != captured_frames_in_flight_.end()) {
// We overflow uint16_t and hit previous frame id and this frame is still
// in flight. It means that this stream wasn't rendered for long time and
// we need to process existing frame as dropped.
auto stats_it = frame_stats_.find(frame_id);
RTC_DCHECK(stats_it != frame_stats_.end());
// If we overflow uint16_t and hit previous frame id and this frame is
// still in flight, it means that this stream wasn't rendered for long
// time and we need to process existing frame as dropped.
for (size_t i = 0; i < peers_->size(); ++i) {
if (i == peer_index) {
continue;
}
uint16_t oldest_frame_id = state->PopFront();
RTC_DCHECK_EQ(frame_id, oldest_frame_id);
frame_counters_.dropped++;
stream_frame_counters_[stream_label].dropped++;
AddComparison(it->second, absl::nullopt, true, stats_it->second);
uint16_t oldest_frame_id = state->PopFront(i);
RTC_DCHECK_EQ(frame_id, oldest_frame_id);
frame_counters_.dropped++;
InternalStatsKey key(stream_index, peer_index, i);
stream_frame_counters_.at(key).dropped++;
rtc::CritScope crit1(&comparison_lock_);
analyzer_stats_.frames_in_flight_left_count.AddSample(
captured_frames_in_flight_.size());
AddComparison(InternalStatsKey(stream_index, peer_index, i),
it->second.frame(), absl::nullopt, true,
it->second.GetStatsForPeer(i));
}
captured_frames_in_flight_.erase(it);
frame_stats_.erase(stats_it);
}
captured_frames_in_flight_.insert(
std::pair<uint16_t, VideoFrame>(frame_id, frame));
captured_frames_in_flight_.emplace(
frame_id,
FrameInFlight(stream_index, frame,
/*captured_time=*/Now(), peer_index, peers_->size()));
// Set frame id on local copy of the frame
captured_frames_in_flight_.at(frame_id).set_id(frame_id);
frame_stats_.insert(std::pair<uint16_t, FrameStats>(
frame_id, FrameStats(stream_label, /*captured_time=*/Now())));
captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id);
// Update history stream<->frame mapping
for (auto it = stream_to_frame_id_history_.begin();
it != stream_to_frame_id_history_.end(); ++it) {
it->second.erase(frame_id);
}
stream_to_frame_id_history_[stream_label].insert(frame_id);
stream_to_frame_id_history_[stream_index].insert(frame_id);
// If state has too many frames that are in flight => remove the oldest
// queued frame in order to avoid to use too much memory.
if (state->GetAliveFramesCount() > max_frames_in_flight_per_stream_count_) {
uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
auto removed_count = captured_frames_in_flight_.erase(frame_id_to_remove);
RTC_DCHECK_EQ(removed_count, 1)
auto it = captured_frames_in_flight_.find(frame_id_to_remove);
RTC_CHECK(it != captured_frames_in_flight_.end())
<< "Frame with ID " << frame_id_to_remove
<< " is expected to be in flight, but hasn't been found in "
<< "|captured_frames_in_flight_|";
bool is_removed = it->second.RemoveFrame();
RTC_DCHECK(is_removed)
<< "Invalid stream state: alive frame is removed already";
}
}
@ -179,12 +275,18 @@ void DefaultVideoQualityAnalyzer::OnFramePreEncode(
absl::string_view peer_name,
const webrtc::VideoFrame& frame) {
rtc::CritScope crit(&lock_);
auto it = frame_stats_.find(frame.id());
RTC_DCHECK(it != frame_stats_.end())
auto it = captured_frames_in_flight_.find(frame.id());
RTC_DCHECK(it != captured_frames_in_flight_.end())
<< "Frame id=" << frame.id() << " not found";
frame_counters_.pre_encoded++;
stream_frame_counters_[it->second.stream_label].pre_encoded++;
it->second.pre_encode_time = Now();
size_t peer_index = peers_->index(peer_name);
for (size_t i = 0; i < peers_->size(); ++i) {
if (i != peer_index) {
InternalStatsKey key(it->second.stream(), peer_index, i);
stream_frame_counters_.at(key).pre_encoded++;
}
}
it->second.SetPreEncodeTime(Now());
}
void DefaultVideoQualityAnalyzer::OnFrameEncoded(
@ -193,18 +295,23 @@ void DefaultVideoQualityAnalyzer::OnFrameEncoded(
const webrtc::EncodedImage& encoded_image,
const EncoderStats& stats) {
rtc::CritScope crit(&lock_);
auto it = frame_stats_.find(frame_id);
RTC_DCHECK(it != frame_stats_.end());
auto it = captured_frames_in_flight_.find(frame_id);
RTC_DCHECK(it != captured_frames_in_flight_.end());
// For SVC we can receive multiple encoded images for one frame, so to cover
// all cases we have to pick the last encode time.
if (it->second.encoded_time.IsInfinite()) {
if (!it->second.HasEncodedTime()) {
// Increase counters only when we meet this frame first time.
frame_counters_.encoded++;
stream_frame_counters_[it->second.stream_label].encoded++;
size_t peer_index = peers_->index(peer_name);
for (size_t i = 0; i < peers_->size(); ++i) {
if (i != peer_index) {
InternalStatsKey key(it->second.stream(), peer_index, i);
stream_frame_counters_.at(key).encoded++;
}
}
}
it->second.encoded_time = Now();
it->second.encoded_image_size = encoded_image.size();
it->second.target_encode_bitrate += stats.target_encode_bitrate;
it->second.OnFrameEncoded(Now(), encoded_image.size(),
stats.target_encode_bitrate);
}
void DefaultVideoQualityAnalyzer::OnFrameDropped(
@ -218,8 +325,11 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
uint16_t frame_id,
const webrtc::EncodedImage& input_image) {
rtc::CritScope crit(&lock_);
auto it = frame_stats_.find(frame_id);
if (it == frame_stats_.end()) {
size_t peer_index = peers_->index(peer_name);
auto it = captured_frames_in_flight_.find(frame_id);
if (it == captured_frames_in_flight_.end() ||
it->second.HasReceivedTime(peer_index)) {
// It means this frame was predecoded before, so we can skip it. It may
// happen when we have multiple simulcast streams in one track and received
// the same picture from two different streams because SFU can't reliably
@ -227,12 +337,11 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
// from the same frame it has relayed right before for the first stream.
return;
}
RTC_DCHECK(it->second.received_time.IsInfinite())
<< "Received multiple spatial layers for stream_label="
<< it->second.stream_label;
frame_counters_.received++;
stream_frame_counters_[it->second.stream_label].received++;
it->second.decode_start_time = Now();
InternalStatsKey key(it->second.stream(),
stream_to_sender_.at(it->second.stream()), peer_index);
stream_frame_counters_.at(key).received++;
// Determine the time of the last received packet of this video frame.
RTC_DCHECK(!input_image.PacketInfos().empty());
int64_t last_receive_time =
@ -242,7 +351,10 @@ void DefaultVideoQualityAnalyzer::OnFramePreDecode(
return a.receive_time_ms() < b.receive_time_ms();
})
->receive_time_ms();
it->second.received_time = Timestamp::Millis(last_receive_time);
it->second.OnFramePreDecode(
peer_index,
/*received_time=*/Timestamp::Millis(last_receive_time),
/*decode_start_time=*/Now());
}
void DefaultVideoQualityAnalyzer::OnFrameDecoded(
@ -250,8 +362,11 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded(
const webrtc::VideoFrame& frame,
const DecoderStats& stats) {
rtc::CritScope crit(&lock_);
auto it = frame_stats_.find(frame.id());
if (it == frame_stats_.end()) {
size_t peer_index = peers_->index(peer_name);
auto it = captured_frames_in_flight_.find(frame.id());
if (it == captured_frames_in_flight_.end() ||
it->second.HasDecodeEndTime(peer_index)) {
// It means this frame was decoded before, so we can skip it. It may happen
// when we have multiple simulcast streams in one track and received
// the same picture from two different streams because SFU can't reliably
@ -260,16 +375,21 @@ void DefaultVideoQualityAnalyzer::OnFrameDecoded(
return;
}
frame_counters_.decoded++;
stream_frame_counters_[it->second.stream_label].decoded++;
it->second.decode_end_time = Now();
InternalStatsKey key(it->second.stream(),
stream_to_sender_.at(it->second.stream()), peer_index);
stream_frame_counters_.at(key).decoded++;
it->second.SetDecodeEndTime(peer_index, Now());
}
void DefaultVideoQualityAnalyzer::OnFrameRendered(
absl::string_view peer_name,
const webrtc::VideoFrame& raw_frame) {
rtc::CritScope crit(&lock_);
auto stats_it = frame_stats_.find(raw_frame.id());
if (stats_it == frame_stats_.end()) {
size_t peer_index = peers_->index(peer_name);
auto frame_it = captured_frames_in_flight_.find(raw_frame.id());
if (frame_it == captured_frames_in_flight_.end() ||
frame_it->second.HasRenderedTime(peer_index)) {
// It means this frame was rendered before, so we can skip it. It may happen
// when we have multiple simulcast streams in one track and received
// the same picture from two different streams because SFU can't reliably
@ -277,7 +397,6 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
// from the same frame it has relayed right before for the first stream.
return;
}
FrameStats* frame_stats = &stats_it->second;
// Copy entire video frame including video buffer to ensure that analyzer
// won't hold any WebRTC internal buffers.
@ -285,76 +404,80 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
frame.set_video_frame_buffer(
I420Buffer::Copy(*raw_frame.video_frame_buffer()->ToI420()));
// Find corresponding captured frame.
FrameInFlight* frame_in_flight = &frame_it->second;
absl::optional<VideoFrame> captured_frame = frame_in_flight->frame();
const size_t stream_index = frame_in_flight->stream();
StreamState* state = &stream_states_.at(stream_index);
const InternalStatsKey stats_key(stream_index, state->owner(), peer_index);
// Update frames counters.
frame_counters_.rendered++;
stream_frame_counters_[frame_stats->stream_label].rendered++;
stream_frame_counters_.at(stats_key).rendered++;
// Update current frame stats.
frame_stats->rendered_time = Now();
frame_stats->rendered_frame_width = frame.width();
frame_stats->rendered_frame_height = frame.height();
// Find corresponding captured frame.
auto frame_it = captured_frames_in_flight_.find(frame.id());
absl::optional<VideoFrame> captured_frame =
frame_it != captured_frames_in_flight_.end()
? absl::optional<VideoFrame>(frame_it->second)
: absl::nullopt;
frame_in_flight->OnFrameRendered(peer_index, Now(), frame.width(),
frame.height());
// After we received frame here we need to check if there are any dropped
// frames between this one and last one, that was rendered for this video
// stream.
const std::string& stream_label = frame_stats->stream_label;
StreamState* state = &stream_states_[stream_label];
int dropped_count = 0;
while (!state->Empty() && state->Front() != frame.id()) {
while (!state->IsEmpty(peer_index) &&
state->Front(peer_index) != frame.id()) {
dropped_count++;
uint16_t dropped_frame_id = state->PopFront();
uint16_t dropped_frame_id = state->PopFront(peer_index);
// Frame with id |dropped_frame_id| was dropped. We need:
// 1. Update global and stream frame counters
// 2. Extract corresponding frame from |captured_frames_in_flight_|
// 3. Extract corresponding frame stats from |frame_stats_|
// 4. Send extracted frame to comparison with dropped=true
// 5. Cleanup dropped frame
// 3. Send extracted frame to comparison with dropped=true
// 4. Cleanup dropped frame
frame_counters_.dropped++;
stream_frame_counters_[stream_label].dropped++;
stream_frame_counters_.at(stats_key).dropped++;
auto dropped_frame_stats_it = frame_stats_.find(dropped_frame_id);
RTC_DCHECK(dropped_frame_stats_it != frame_stats_.end());
auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
absl::optional<VideoFrame> dropped_frame =
dropped_frame_it != captured_frames_in_flight_.end()
? absl::optional<VideoFrame>(dropped_frame_it->second)
: absl::nullopt;
RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
absl::optional<VideoFrame> dropped_frame = dropped_frame_it->second.frame();
dropped_frame_it->second.MarkDropped(peer_index);
AddComparison(dropped_frame, absl::nullopt, true,
dropped_frame_stats_it->second);
{
rtc::CritScope crit1(&comparison_lock_);
analyzer_stats_.frames_in_flight_left_count.AddSample(
captured_frames_in_flight_.size());
AddComparison(stats_key, dropped_frame, absl::nullopt, true,
dropped_frame_it->second.GetStatsForPeer(peer_index));
}
frame_stats_.erase(dropped_frame_stats_it);
if (dropped_frame_it != captured_frames_in_flight_.end()) {
if (dropped_frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(dropped_frame_it);
}
}
RTC_DCHECK(!state->Empty());
state->PopFront();
RTC_DCHECK(!state->IsEmpty(peer_index));
state->PopFront(peer_index);
if (state->last_rendered_frame_time()) {
frame_stats->prev_frame_rendered_time =
state->last_rendered_frame_time().value();
if (state->last_rendered_frame_time(peer_index)) {
frame_in_flight->SetPrevFrameRenderedTime(
peer_index, state->last_rendered_frame_time(peer_index).value());
}
state->set_last_rendered_frame_time(frame_stats->rendered_time);
state->SetLastRenderedFrameTime(peer_index,
frame_in_flight->rendered_time(peer_index));
{
rtc::CritScope cr(&comparison_lock_);
stream_stats_[stream_label].skipped_between_rendered.AddSample(
dropped_count);
stream_stats_[stats_key].skipped_between_rendered.AddSample(dropped_count);
}
AddComparison(captured_frame, frame, false, *frame_stats);
if (frame_it != captured_frames_in_flight_.end()) {
{
rtc::CritScope crit(&comparison_lock_);
analyzer_stats_.frames_in_flight_left_count.AddSample(
captured_frames_in_flight_.size());
AddComparison(stats_key, captured_frame, frame, false,
frame_in_flight->GetStatsForPeer(peer_index));
}
if (frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(frame_it);
}
frame_stats_.erase(stats_it);
}
void DefaultVideoQualityAnalyzer::OnEncoderError(
@ -396,46 +519,58 @@ void DefaultVideoQualityAnalyzer::Stop() {
// between freezes.
rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
for (auto& item : stream_stats_) {
const StreamState& state = stream_states_[item.first];
// If there are no freezes in the call we have to report
// time_between_freezes_ms as call duration and in such case
// |stream_last_freeze_end_time_| for this stream will be |start_time_|.
// If there is freeze, then we need add time from last rendered frame
// to last freeze end as time between freezes.
if (state.last_rendered_frame_time()) {
item.second.time_between_freezes_ms.AddSample(
(state.last_rendered_frame_time().value() -
stream_last_freeze_end_time_.at(item.first))
.ms());
for (auto& state_entry : stream_states_) {
const size_t stream_index = state_entry.first;
const StreamState& stream_state = state_entry.second;
for (size_t i = 0; i < peers_->size(); ++i) {
if (i == static_cast<size_t>(stream_state.owner())) {
continue;
}
InternalStatsKey stats_key(stream_index, stream_state.owner(), i);
// If there are no freezes in the call we have to report
// time_between_freezes_ms as call duration and in such case
// |stream_last_freeze_end_time_| for this stream will be |start_time_|.
// If there is freeze, then we need add time from last rendered frame
// to last freeze end as time between freezes.
if (stream_state.last_rendered_frame_time(i)) {
stream_stats_[stats_key].time_between_freezes_ms.AddSample(
stream_state.last_rendered_frame_time(i).value().ms() -
stream_last_freeze_end_time_.at(stats_key).ms());
}
}
}
analyzer_stats_.frames_in_flight_left_count.AddSample(
captured_frames_in_flight_.size());
}
ReportResults();
}
std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
rtc::CritScope crit1(&lock_);
auto it = frame_stats_.find(frame_id);
if (it != frame_stats_.end()) {
return it->second.stream_label;
auto it = captured_frames_in_flight_.find(frame_id);
if (it != captured_frames_in_flight_.end()) {
return streams_.name(it->second.stream());
}
for (auto hist_it = stream_to_frame_id_history_.begin();
hist_it != stream_to_frame_id_history_.end(); ++hist_it) {
auto hist_set_it = hist_it->second.find(frame_id);
if (hist_set_it != hist_it->second.end()) {
return hist_it->first;
return streams_.name(hist_it->first);
}
}
RTC_CHECK(false) << "Unknown frame_id=" << frame_id;
}
std::set<std::string> DefaultVideoQualityAnalyzer::GetKnownVideoStreams()
const {
std::set<StatsKey> DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const {
rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
std::set<std::string> out;
std::set<StatsKey> out;
for (auto& item : stream_stats_) {
out.insert(item.first);
RTC_LOG(INFO) << item.first.ToString() << " ==> "
<< ToStatsKey(item.first).ToString();
out.insert(ToStatsKey(item.first));
}
return out;
}
@ -445,16 +580,24 @@ const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
return frame_counters_;
}
const std::map<std::string, FrameCounters>&
std::map<StatsKey, FrameCounters>
DefaultVideoQualityAnalyzer::GetPerStreamCounters() const {
rtc::CritScope crit(&lock_);
return stream_frame_counters_;
std::map<StatsKey, FrameCounters> out;
for (auto& item : stream_frame_counters_) {
out.emplace(ToStatsKey(item.first), item.second);
}
return out;
}
std::map<std::string, StreamStats> DefaultVideoQualityAnalyzer::GetStats()
const {
rtc::CritScope cri(&comparison_lock_);
return stream_stats_;
std::map<StatsKey, StreamStats> DefaultVideoQualityAnalyzer::GetStats() const {
rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
std::map<StatsKey, StreamStats> out;
for (auto& item : stream_stats_) {
out.emplace(ToStatsKey(item.first), item.second);
}
return out;
}
AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
@ -463,25 +606,27 @@ AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
}
void DefaultVideoQualityAnalyzer::AddComparison(
InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats) {
StartExcludingCpuThreadTime();
rtc::CritScope crit(&comparison_lock_);
analyzer_stats_.comparisons_queue_size.AddSample(comparisons_.size());
// If there too many computations waiting in the queue, we won't provide
// frames itself to make future computations lighter.
if (comparisons_.size() >= kMaxActiveComparisons) {
comparisons_.emplace_back(absl::nullopt, absl::nullopt, dropped,
frame_stats, OverloadReason::kCpu);
comparisons_.emplace_back(std::move(stats_key), absl::nullopt,
absl::nullopt, dropped, std::move(frame_stats),
OverloadReason::kCpu);
} else {
OverloadReason overload_reason = OverloadReason::kNone;
if (!captured && !dropped) {
overload_reason = OverloadReason::kMemory;
}
comparisons_.emplace_back(std::move(captured), std::move(rendered), dropped,
frame_stats, overload_reason);
comparisons_.emplace_back(std::move(stats_key), std::move(captured),
std::move(rendered), dropped,
std::move(frame_stats), overload_reason);
}
comparison_available_event_.Set();
StopExcludingCpuThreadTime();
@ -541,8 +686,8 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
const FrameStats& frame_stats = comparison.frame_stats;
rtc::CritScope crit(&comparison_lock_);
auto stats_it = stream_stats_.find(frame_stats.stream_label);
RTC_CHECK(stats_it != stream_stats_.end());
auto stats_it = stream_stats_.find(comparison.stats_key);
RTC_CHECK(stats_it != stream_stats_.end()) << comparison.stats_key.ToString();
StreamStats* stats = &stats_it->second;
analyzer_stats_.comparisons_done++;
if (comparison.overload_reason == OverloadReason::kCpu) {
@ -595,7 +740,7 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
3 * average_time_between_rendered_frames_ms)) {
stats->freeze_time_ms.AddSample(time_between_rendered_frames.ms());
auto freeze_end_it =
stream_last_freeze_end_time_.find(frame_stats.stream_label);
stream_last_freeze_end_time_.find(comparison.stats_key);
RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end());
stats->time_between_freezes_ms.AddSample(
(frame_stats.prev_frame_rendered_time - freeze_end_it->second)
@ -612,15 +757,16 @@ void DefaultVideoQualityAnalyzer::ReportResults() {
rtc::CritScope crit1(&lock_);
rtc::CritScope crit2(&comparison_lock_);
for (auto& item : stream_stats_) {
ReportResults(GetTestCaseName(item.first), item.second,
stream_frame_counters_.at(item.first));
ReportResults(GetTestCaseName(StatsKeyToMetricName(ToStatsKey(item.first))),
item.second, stream_frame_counters_.at(item.first));
}
test::PrintResult("cpu_usage", "", test_label_.c_str(), GetCpuUsagePercent(),
"%", false, ImproveDirection::kSmallerIsBetter);
LogFrameCounters("Global", frame_counters_);
for (auto& item : stream_stats_) {
LogFrameCounters(item.first, stream_frame_counters_.at(item.first));
LogStreamInternalStats(item.first, item.second);
LogFrameCounters(ToStatsKey(item.first).ToString(),
stream_frame_counters_.at(item.first));
LogStreamInternalStats(ToStatsKey(item.first).ToString(), item.second);
}
if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) {
RTC_LOG(INFO) << "comparisons_queue_size min="
@ -748,6 +894,20 @@ Timestamp DefaultVideoQualityAnalyzer::Now() {
return clock_->CurrentTime();
}
StatsKey DefaultVideoQualityAnalyzer::ToStatsKey(
const InternalStatsKey& key) const {
return StatsKey(streams_.name(key.stream), peers_->name(key.sender),
peers_->name(key.receiver));
}
std::string DefaultVideoQualityAnalyzer::StatsKeyToMetricName(
const StatsKey& key) {
if (peers_->size() <= 2) {
return key.stream_label;
}
return key.ToString();
}
void DefaultVideoQualityAnalyzer::StartMeasuringCpuProcessTime() {
rtc::CritScope lock(&cpu_measurement_lock_);
cpu_time_ -= rtc::GetProcessCpuTimeNanos();
@ -775,35 +935,208 @@ double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
}
DefaultVideoQualityAnalyzer::FrameStats::FrameStats(std::string stream_label,
Timestamp captured_time)
: stream_label(std::move(stream_label)), captured_time(captured_time) {}
DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison(
InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats,
OverloadReason overload_reason)
: captured(std::move(captured)),
: stats_key(std::move(stats_key)),
captured(std::move(captured)),
rendered(std::move(rendered)),
dropped(dropped),
frame_stats(std::move(frame_stats)),
overload_reason(overload_reason) {}
uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront() {
uint16_t frame_id = frame_ids_.front();
frame_ids_.pop_front();
if (dead_frames_count_ > 0) {
dead_frames_count_--;
uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) {
absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer);
RTC_DCHECK(frame_id.has_value());
// If alive's frame queue is longer than all others, than also pop frame from
// it, because that frame is received by all receivers.
size_t owner_size = frame_ids_.size(owner_);
size_t other_size = 0;
for (size_t i = 0; i < frame_ids_.readers_count(); ++i) {
size_t cur_size = frame_ids_.size(i);
if (i != owner_ && cur_size > other_size) {
other_size = cur_size;
}
}
return frame_id;
if (owner_size > other_size) {
absl::optional<uint16_t> alive_frame_id = frame_ids_.PopFront(owner_);
RTC_DCHECK(alive_frame_id.has_value());
RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
}
return frame_id.value();
}
uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() {
uint16_t frame_id = frame_ids_[dead_frames_count_];
dead_frames_count_++;
return frame_id;
absl::optional<uint16_t> frame_id = frame_ids_.PopFront(owner_);
RTC_DCHECK(frame_id.has_value());
return frame_id.value();
}
void DefaultVideoQualityAnalyzer::StreamState::SetLastRenderedFrameTime(
size_t peer,
Timestamp time) {
auto it = last_rendered_frame_time_.find(peer);
if (it == last_rendered_frame_time_.end()) {
last_rendered_frame_time_.insert({peer, time});
} else {
it->second = time;
}
}
absl::optional<Timestamp>
DefaultVideoQualityAnalyzer::StreamState::last_rendered_frame_time(
size_t peer) const {
return MaybeGetValue(last_rendered_frame_time_, peer);
}
bool DefaultVideoQualityAnalyzer::FrameInFlight::RemoveFrame() {
if (!frame_) {
return false;
}
frame_ = absl::nullopt;
return true;
}
void DefaultVideoQualityAnalyzer::FrameInFlight::SetFrameId(uint16_t id) {
if (frame_) {
frame_->set_id(id);
}
}
std::vector<size_t>
DefaultVideoQualityAnalyzer::FrameInFlight::GetPeersWhichDidntReceive() const {
std::vector<size_t> out;
for (size_t i = 0; i < peers_count_; ++i) {
auto it = receiver_stats_.find(i);
if (i != owner_ && it != receiver_stats_.end() &&
it->second.rendered_time.IsInfinite()) {
out.push_back(i);
}
}
return out;
}
bool DefaultVideoQualityAnalyzer::FrameInFlight::HaveAllPeersReceived() const {
for (size_t i = 0; i < peers_count_; ++i) {
if (i == owner_) {
continue;
}
auto it = receiver_stats_.find(i);
if (it == receiver_stats_.end()) {
return false;
}
if (!it->second.dropped && it->second.rendered_time.IsInfinite()) {
return false;
}
}
return true;
}
void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameEncoded(
webrtc::Timestamp time,
int64_t encoded_image_size,
uint32_t target_encode_bitrate) {
encoded_time_ = time;
encoded_image_size_ = encoded_image_size;
target_encode_bitrate_ += target_encode_bitrate;
}
void DefaultVideoQualityAnalyzer::FrameInFlight::OnFramePreDecode(
size_t peer,
webrtc::Timestamp received_time,
webrtc::Timestamp decode_start_time) {
receiver_stats_[peer].received_time = received_time;
receiver_stats_[peer].decode_start_time = decode_start_time;
}
bool DefaultVideoQualityAnalyzer::FrameInFlight::HasReceivedTime(
size_t peer) const {
auto it = receiver_stats_.find(peer);
if (it == receiver_stats_.end()) {
return false;
}
return it->second.received_time.IsFinite();
}
bool DefaultVideoQualityAnalyzer::FrameInFlight::HasDecodeEndTime(
size_t peer) const {
auto it = receiver_stats_.find(peer);
if (it == receiver_stats_.end()) {
return false;
}
return it->second.decode_end_time.IsFinite();
}
void DefaultVideoQualityAnalyzer::FrameInFlight::OnFrameRendered(
size_t peer,
webrtc::Timestamp time,
int width,
int height) {
receiver_stats_[peer].rendered_time = time;
receiver_stats_[peer].rendered_frame_width = width;
receiver_stats_[peer].rendered_frame_height = height;
}
bool DefaultVideoQualityAnalyzer::FrameInFlight::HasRenderedTime(
size_t peer) const {
auto it = receiver_stats_.find(peer);
if (it == receiver_stats_.end()) {
return false;
}
return it->second.rendered_time.IsFinite();
}
DefaultVideoQualityAnalyzer::FrameStats
DefaultVideoQualityAnalyzer::FrameInFlight::GetStatsForPeer(size_t peer) const {
FrameStats stats(captured_time_);
stats.pre_encode_time = pre_encode_time_;
stats.encoded_time = encoded_time_;
stats.target_encode_bitrate = target_encode_bitrate_;
stats.encoded_image_size = encoded_image_size_;
absl::optional<ReceiverFrameStats> receiver_stats =
MaybeGetValue<ReceiverFrameStats>(receiver_stats_, peer);
if (receiver_stats.has_value()) {
stats.received_time = receiver_stats->received_time;
stats.decode_start_time = receiver_stats->decode_start_time;
stats.decode_end_time = receiver_stats->decode_end_time;
stats.rendered_time = receiver_stats->rendered_time;
stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time;
stats.rendered_frame_width = receiver_stats->rendered_frame_width;
stats.rendered_frame_height = receiver_stats->rendered_frame_height;
}
return stats;
}
size_t DefaultVideoQualityAnalyzer::NamesCollection::AddIfAbsent(
absl::string_view name) {
auto it = index_.find(name);
if (it != index_.end()) {
return it->second;
}
size_t out = names_.size();
size_t old_capacity = names_.capacity();
names_.emplace_back(name);
size_t new_capacity = names_.capacity();
if (old_capacity == new_capacity) {
index_.emplace(names_[out], out);
} else {
// Reallocation happened in the vector, so we need to rebuild |index_|
index_.clear();
for (size_t i = 0; i < names_.size(); ++i) {
index_.emplace(names_[i], i);
}
}
return out;
}
} // namespace webrtc_pc_e2e

View File

@ -29,6 +29,7 @@
#include "rtc_base/numerics/samples_stats_counter.h"
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/clock.h"
#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
@ -37,7 +38,7 @@ namespace webrtc_pc_e2e {
// WebRTC will request a key frame after 3 seconds if no frames were received.
// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
// key frame request.
constexpr int kDefaultMaxFramesInFlightPerStream = 270;
constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
class RateCounter {
public:
@ -125,13 +126,51 @@ struct AnalyzerStats {
// it is queued when its captured frame was already removed due to high memory
// usage for that video stream.
int64_t memory_overloaded_comparisons_done = 0;
// Count of frames in flight in analyzer measured when new comparison is added
// and after analyzer was stopped.
SamplesStatsCounter frames_in_flight_left_count;
};
struct StatsKey {
StatsKey(std::string stream_label, std::string sender, std::string receiver)
: stream_label(std::move(stream_label)),
sender(std::move(sender)),
receiver(std::move(receiver)) {}
std::string ToString() const;
// Label of video stream to which stats belongs to.
std::string stream_label;
// Name of the peer which send this stream.
std::string sender;
// Name of the peer on which stream was received.
std::string receiver;
};
// Required to use StatsKey as std::map key.
bool operator<(const StatsKey& a, const StatsKey& b);
bool operator==(const StatsKey& a, const StatsKey& b);
struct InternalStatsKey {
InternalStatsKey(size_t stream, size_t sender, size_t receiver)
: stream(stream), sender(sender), receiver(receiver) {}
std::string ToString() const;
size_t stream;
size_t sender;
size_t receiver;
};
// Required to use InternalStatsKey as std::map key.
bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
public:
explicit DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled = true,
int max_frames_in_flight_per_stream_count =
size_t max_frames_in_flight_per_stream_count =
kDefaultMaxFramesInFlightPerStream);
~DefaultVideoQualityAnalyzer() override;
@ -169,21 +208,19 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
const StatsReports& stats_reports) override {}
// Returns set of stream labels, that were met during test call.
std::set<std::string> GetKnownVideoStreams() const;
std::set<StatsKey> GetKnownVideoStreams() const;
const FrameCounters& GetGlobalCounters() const;
// Returns frame counter per stream label. Valid stream labels can be obtained
// by calling GetKnownVideoStreams()
const std::map<std::string, FrameCounters>& GetPerStreamCounters() const;
std::map<StatsKey, FrameCounters> GetPerStreamCounters() const;
// Returns video quality stats per stream label. Valid stream labels can be
// obtained by calling GetKnownVideoStreams()
std::map<std::string, StreamStats> GetStats() const;
std::map<StatsKey, StreamStats> GetStats() const;
AnalyzerStats GetAnalyzerStats() const;
private:
struct FrameStats {
FrameStats(std::string stream_label, Timestamp captured_time);
std::string stream_label;
FrameStats(Timestamp captured_time) : captured_time(captured_time) {}
// Frame events timestamp.
Timestamp captured_time;
@ -196,12 +233,11 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
Timestamp rendered_time = Timestamp::MinusInfinity();
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
int64_t encoded_image_size = 0;
uint32_t target_encode_bitrate = 0;
absl::optional<int> rendered_frame_width = absl::nullopt;
absl::optional<int> rendered_frame_height = absl::nullopt;
int64_t encoded_image_size = 0;
};
// Describes why comparison was done in overloaded mode (without calculating
@ -223,12 +259,14 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// because there were too many comparisons in the queue. |dropped| can be
// true or false showing was frame dropped or not.
struct FrameComparison {
FrameComparison(absl::optional<VideoFrame> captured,
FrameComparison(InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats,
OverloadReason overload_reason);
InternalStatsKey stats_key;
// Frames can be omitted if there too many computations waiting in the
// queue.
absl::optional<VideoFrame> captured;
@ -244,49 +282,175 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Represents a current state of video stream.
class StreamState {
public:
void PushBack(uint16_t frame_id) { frame_ids_.emplace_back(frame_id); }
StreamState(size_t owner, size_t peers_count)
: owner_(owner), frame_ids_(peers_count) {}
uint16_t PopFront();
size_t owner() const { return owner_; }
bool Empty() { return frame_ids_.empty(); }
uint16_t Front() { return frame_ids_.front(); }
int GetAliveFramesCount() { return frame_ids_.size() - dead_frames_count_; }
void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
// Crash if state is empty.
uint16_t PopFront(size_t peer);
bool IsEmpty(size_t peer) const { return frame_ids_.IsEmpty(peer); }
// Crash if state is empty.
uint16_t Front(size_t peer) const { return frame_ids_.Front(peer).value(); }
size_t GetAliveFramesCount() { return frame_ids_.size(owner_); }
uint16_t MarkNextAliveFrameAsDead();
void set_last_rendered_frame_time(Timestamp time) {
last_rendered_frame_time_ = time;
}
absl::optional<Timestamp> last_rendered_frame_time() const {
return last_rendered_frame_time_;
}
void SetLastRenderedFrameTime(size_t peer, Timestamp time);
absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
private:
// Index of the owner. Owner's queue in |frame_ids_| will keep alive frames.
const size_t owner_;
// To correctly determine dropped frames we have to know sequence of frames
// in each stream so we will keep a list of frame ids inside the stream.
// When the frame is rendered, we will pop ids from the list for until id
// will match with rendered one. All ids before matched one can be
// considered as dropped:
// This list is represented by multi head queue of frame ids with separate
// head for each receiver. When the frame is rendered, we will pop ids from
// the corresponding head until id will match with rendered one. All ids
// before matched one can be considered as dropped:
//
// | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
//
// If we received frame with id frame_id3, then we will pop frame_id1 and
// frame_id2 and consider that frames as dropped and then compare received
// frame with the one from |captured_frames_in_flight_| with id frame_id3.
std::deque<uint16_t> frame_ids_;
// Count of dead frames in the beginning of the deque.
int dead_frames_count_;
absl::optional<Timestamp> last_rendered_frame_time_ = absl::nullopt;
//
// To track alive frames (frames that contains frame's payload in
// |captured_frames_in_flight_|) the head which corresponds to |owner_| will
// be used. So that head will point to the first alive frame in frames list.
MultiHeadQueue<uint16_t> frame_ids_;
std::map<size_t, Timestamp> last_rendered_frame_time_;
};
enum State { kNew, kActive, kStopped };
void AddComparison(absl::optional<VideoFrame> captured,
struct ReceiverFrameStats {
// Time when last packet of a frame was received.
Timestamp received_time = Timestamp::MinusInfinity();
Timestamp decode_start_time = Timestamp::MinusInfinity();
Timestamp decode_end_time = Timestamp::MinusInfinity();
Timestamp rendered_time = Timestamp::MinusInfinity();
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
absl::optional<int> rendered_frame_width = absl::nullopt;
absl::optional<int> rendered_frame_height = absl::nullopt;
bool dropped = false;
};
class FrameInFlight {
public:
FrameInFlight(size_t stream,
VideoFrame frame,
Timestamp captured_time,
size_t owner,
size_t peers_count)
: stream_(stream),
owner_(owner),
peers_count_(peers_count),
frame_(std::move(frame)),
captured_time_(captured_time) {}
size_t stream() const { return stream_; }
const absl::optional<VideoFrame>& frame() const { return frame_; }
// Returns was frame removed or not.
bool RemoveFrame();
void SetFrameId(uint16_t id);
std::vector<size_t> GetPeersWhichDidntReceive() const;
bool HaveAllPeersReceived() const;
void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; }
void OnFrameEncoded(webrtc::Timestamp time,
int64_t encoded_image_size,
uint32_t target_encode_bitrate);
bool HasEncodedTime() const { return encoded_time_.IsFinite(); }
void OnFramePreDecode(size_t peer,
webrtc::Timestamp received_time,
webrtc::Timestamp decode_start_time);
bool HasReceivedTime(size_t peer) const;
void SetDecodeEndTime(size_t peer, webrtc::Timestamp time) {
receiver_stats_[peer].decode_end_time = time;
}
bool HasDecodeEndTime(size_t peer) const;
void OnFrameRendered(size_t peer,
webrtc::Timestamp time,
int width,
int height);
bool HasRenderedTime(size_t peer) const;
// Crash if rendered time is not set for specified |peer|.
webrtc::Timestamp rendered_time(size_t peer) const {
return receiver_stats_.at(peer).rendered_time;
}
void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; }
void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) {
receiver_stats_[peer].prev_frame_rendered_time = time;
}
FrameStats GetStatsForPeer(size_t peer) const;
private:
const size_t stream_;
const size_t owner_;
const size_t peers_count_;
absl::optional<VideoFrame> frame_;
// Frame events timestamp.
Timestamp captured_time_;
Timestamp pre_encode_time_ = Timestamp::MinusInfinity();
Timestamp encoded_time_ = Timestamp::MinusInfinity();
int64_t encoded_image_size_ = 0;
uint32_t target_encode_bitrate_ = 0;
std::map<size_t, ReceiverFrameStats> receiver_stats_;
};
class NamesCollection {
public:
NamesCollection() = default;
explicit NamesCollection(rtc::ArrayView<const std::string> names) {
names_ = std::vector<std::string>(names.begin(), names.end());
for (size_t i = 0; i < names_.size(); ++i) {
index_.emplace(names_[i], i);
}
}
size_t size() const { return names_.size(); }
size_t index(absl::string_view name) const { return index_.at(name); }
const std::string& name(size_t index) const { return names_[index]; }
bool HasName(absl::string_view name) const {
return index_.find(name) != index_.end();
}
// Add specified |name| to the collection if it isn't presented.
// Returns index which corresponds to specified |name|.
size_t AddIfAbsent(absl::string_view name);
private:
std::vector<std::string> names_;
std::map<absl::string_view, size_t> index_;
};
void AddComparison(InternalStatsKey stats_key,
absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered,
bool dropped,
FrameStats frame_stats);
FrameStats frame_stats)
RTC_EXCLUSIVE_LOCKS_REQUIRED(comparison_lock_);
static void ProcessComparisonsThread(void* obj);
void ProcessComparisons();
void ProcessComparison(const FrameComparison& comparison);
@ -306,6 +470,11 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Returns name of current test case for reporting.
std::string GetTestCaseName(const std::string& stream_label) const;
Timestamp Now();
StatsKey ToStatsKey(const InternalStatsKey& key) const
RTC_EXCLUSIVE_LOCKS_REQUIRED(lock_);
// Returns string representation of stats key for metrics naming. Used for
// backward compatibility by metrics naming for 2 peers cases.
std::string StatsKeyToMetricName(const StatsKey& key);
void StartMeasuringCpuProcessTime();
void StopMeasuringCpuProcessTime();
@ -314,15 +483,19 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
double GetCpuUsagePercent();
const bool heavy_metrics_computation_enabled_;
const int max_frames_in_flight_per_stream_count_;
const size_t max_frames_in_flight_per_stream_count_;
webrtc::Clock* const clock_;
std::atomic<uint16_t> next_frame_id_{0};
std::string test_label_;
std::unique_ptr<NamesCollection> peers_;
rtc::CriticalSection lock_;
State state_ RTC_GUARDED_BY(lock_) = State::kNew;
Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
// Mapping from stream label to unique size_t value to use in stats and avoid
// extra string copying.
NamesCollection streams_ RTC_GUARDED_BY(lock_);
// Frames that were captured by all streams and still aren't rendered by any
// stream or deemed dropped. Frame with id X can be removed from this map if:
// 1. The frame with id X was received in OnFrameRendered
@ -330,27 +503,29 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// 3. Next available frame id for newly captured frame is X
// 4. There too many frames in flight for current video stream and X is the
// oldest frame id in this stream.
std::map<uint16_t, VideoFrame> captured_frames_in_flight_
std::map<uint16_t, FrameInFlight> captured_frames_in_flight_
RTC_GUARDED_BY(lock_);
// Global frames count for all video streams.
FrameCounters frame_counters_ RTC_GUARDED_BY(lock_);
// Frame counters per each stream.
std::map<std::string, FrameCounters> stream_frame_counters_
// Frame counters per each stream per each receiver.
std::map<InternalStatsKey, FrameCounters> stream_frame_counters_
RTC_GUARDED_BY(lock_);
std::map<uint16_t, FrameStats> frame_stats_ RTC_GUARDED_BY(lock_);
std::map<std::string, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
// Map from stream index in |streams_| to its StreamState.
std::map<size_t, StreamState> stream_states_ RTC_GUARDED_BY(lock_);
// Map from stream index in |streams_| to sender peer index in |peers_|.
std::map<size_t, size_t> stream_to_sender_ RTC_GUARDED_BY(lock_);
// Stores history mapping between stream labels and frame ids. Updated when
// frame id overlap. It required to properly return stream label after 1st
// frame from simulcast streams was already rendered and last is still
// encoding.
std::map<std::string, std::set<uint16_t>> stream_to_frame_id_history_
// Stores history mapping between stream index in |streams_| and frame ids.
// Updated when frame id overlap. It required to properly return stream label
// after 1st frame from simulcast streams was already rendered and last is
// still encoding.
std::map<size_t, std::set<uint16_t>> stream_to_frame_id_history_
RTC_GUARDED_BY(lock_);
rtc::CriticalSection comparison_lock_;
std::map<std::string, StreamStats> stream_stats_
std::map<InternalStatsKey, StreamStats> stream_stats_
RTC_GUARDED_BY(comparison_lock_);
std::map<std::string, Timestamp> stream_last_freeze_end_time_
std::map<InternalStatsKey, Timestamp> stream_last_freeze_end_time_
RTC_GUARDED_BY(comparison_lock_);
std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
AnalyzerStats analyzer_stats_ RTC_GUARDED_BY(comparison_lock_);

View File

@ -18,6 +18,7 @@
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
@ -26,6 +27,8 @@ namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
using StatsSample = ::webrtc::SamplesStatsCounter::StatsSample;
constexpr int kAnalyzerMaxThreadsCount = 1;
constexpr int kMaxFramesInFlightPerStream = 10;
constexpr int kFrameWidth = 320;
@ -66,6 +69,24 @@ VideoFrame DeepCopy(const VideoFrame& frame) {
return copy;
}
std::vector<StatsSample> GetSortedSamples(const SamplesStatsCounter& counter) {
rtc::ArrayView<const StatsSample> view = counter.GetTimedSamples();
std::vector<StatsSample> out(view.begin(), view.end());
std::sort(out.begin(), out.end(),
[](const StatsSample& a, const StatsSample& b) {
return a.time < b.time;
});
return out;
}
std::string ToString(const std::vector<StatsSample>& values) {
rtc::StringBuilder out;
for (const auto& v : values) {
out << "{ time_ms=" << v.time.ms() << "; value=" << v.value << "}, ";
}
return out.str();
}
TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedAndThenAllFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
@ -117,6 +138,87 @@ TEST(DefaultVideoQualityAnalyzerTest,
EXPECT_EQ(frame_counters.dropped, 0);
}
TEST(DefaultVideoQualityAnalyzerTest,
FillMaxMemoryReceiveAllMemoryOverloadedAndThenAllFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case",
std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
// Feel analyzer's memory up to limit
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
// Receive all frames.
for (const uint16_t& frame_id : frames_order) {
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
frames_order.clear();
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
// Overload analyzer's memory up to limit
for (int i = 0; i < 2 * kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kSenderPeerName, frame);
analyzer.OnFrameEncoded(kSenderPeerName, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
// Receive all frames.
for (const uint16_t& frame_id : frames_order) {
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done,
kMaxFramesInFlightPerStream);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 3);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 3);
EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 3);
EXPECT_EQ(frame_counters.dropped, 0);
}
TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedHalfDroppedAndThenHalfFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
@ -213,6 +315,11 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
std::vector<StatsSample> frames_in_flight_sizes =
GetSortedSamples(stats.frames_in_flight_left_count);
EXPECT_EQ(frames_in_flight_sizes.back().value, 0)
<< ToString(frames_in_flight_sizes);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2);
@ -221,6 +328,231 @@ TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2);
}
TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwice) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case",
std::vector<std::string>{kSenderPeerName, kReceiverPeerName},
kAnalyzerMaxThreadsCount);
VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
captured_frame.set_id(
analyzer.OnFrameCaptured(kSenderPeerName, kStreamLabel, captured_frame));
analyzer.OnFramePreEncode(kSenderPeerName, captured_frame);
analyzer.OnFrameEncoded(kSenderPeerName, captured_frame.id(),
FakeEncode(captured_frame),
VideoQualityAnalyzerInterface::EncoderStats());
VideoFrame received_frame = DeepCopy(captured_frame);
analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
received_frame = DeepCopy(captured_frame);
analyzer.OnFramePreDecode(kReceiverPeerName, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kReceiverPeerName, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kReceiverPeerName, received_frame);
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, 1);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, 1);
EXPECT_EQ(frame_counters.received, 1);
EXPECT_EQ(frame_counters.decoded, 1);
EXPECT_EQ(frame_counters.rendered, 1);
EXPECT_EQ(frame_counters.dropped, 0);
}
TEST(DefaultVideoQualityAnalyzerTest, NormalScenario2Receivers) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
constexpr char kAlice[] = "alice";
constexpr char kBob[] = "bob";
constexpr char kCharlie[] = "charlie";
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(analyzer.OnFrameCaptured(kAlice, kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(kAlice, frame);
SleepMs(20);
analyzer.OnFrameEncoded(kAlice, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats());
}
SleepMs(50);
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(kBob, received_frame.id(),
FakeEncode(received_frame));
SleepMs(30);
analyzer.OnFrameDecoded(kBob, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
SleepMs(10);
analyzer.OnFrameRendered(kBob, received_frame);
}
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(kCharlie, received_frame.id(),
FakeEncode(received_frame));
SleepMs(40);
analyzer.OnFrameDecoded(kCharlie, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
SleepMs(5);
analyzer.OnFrameRendered(kCharlie, received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats analyzer_stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(analyzer_stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(analyzer_stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream);
EXPECT_EQ(analyzer.GetKnownVideoStreams().size(), 2lu);
for (auto stream_key : analyzer.GetKnownVideoStreams()) {
FrameCounters stream_conters =
analyzer.GetPerStreamCounters().at(stream_key);
// On some devices the pipeline can be too slow, so we actually can't
// force real constraints here. Lets just check, that at least 1
// frame passed whole pipeline.
EXPECT_GE(stream_conters.captured, 10);
EXPECT_GE(stream_conters.pre_encoded, 10);
EXPECT_GE(stream_conters.encoded, 10);
EXPECT_GE(stream_conters.received, 5);
EXPECT_GE(stream_conters.decoded, 5);
EXPECT_GE(stream_conters.rendered, 5);
EXPECT_GE(stream_conters.dropped, 5);
}
std::map<StatsKey, StreamStats> stats = analyzer.GetStats();
const StatsKey kAliceBobStats(kStreamLabel, kAlice, kBob);
const StatsKey kAliceCharlieStats(kStreamLabel, kAlice, kCharlie);
EXPECT_EQ(stats.size(), 2lu);
{
auto it = stats.find(kAliceBobStats);
EXPECT_FALSE(it == stats.end());
ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty());
EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(),
kFrameWidth * kFrameHeight - 1);
EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(),
kFrameWidth * kFrameHeight + 1);
}
{
auto it = stats.find(kAliceCharlieStats);
EXPECT_FALSE(it == stats.end());
ASSERT_FALSE(it->second.encode_time_ms.IsEmpty());
EXPECT_GE(it->second.encode_time_ms.GetMin(), 20);
ASSERT_FALSE(it->second.decode_time_ms.IsEmpty());
EXPECT_GE(it->second.decode_time_ms.GetMin(), 30);
ASSERT_FALSE(it->second.resolution_of_rendered_frame.IsEmpty());
EXPECT_GE(it->second.resolution_of_rendered_frame.GetMin(),
kFrameWidth * kFrameHeight - 1);
EXPECT_LE(it->second.resolution_of_rendered_frame.GetMax(),
kFrameWidth * kFrameHeight + 1);
}
}
TEST(DefaultVideoQualityAnalyzerTest, OneFrameReceivedTwiceWith2Receivers) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
constexpr char kAlice[] = "alice";
constexpr char kBob[] = "bob";
constexpr char kCharlie[] = "charlie";
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case", std::vector<std::string>{kAlice, kBob, kCharlie},
kAnalyzerMaxThreadsCount);
VideoFrame captured_frame = NextFrame(frame_generator.get(), 0);
captured_frame.set_id(
analyzer.OnFrameCaptured(kAlice, kStreamLabel, captured_frame));
analyzer.OnFramePreEncode(kAlice, captured_frame);
analyzer.OnFrameEncoded(kAlice, captured_frame.id(),
FakeEncode(captured_frame),
VideoQualityAnalyzerInterface::EncoderStats());
VideoFrame received_frame = DeepCopy(captured_frame);
analyzer.OnFramePreDecode(kBob, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kBob, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kBob, received_frame);
received_frame = DeepCopy(captured_frame);
analyzer.OnFramePreDecode(kBob, received_frame.id(),
FakeEncode(received_frame));
analyzer.OnFrameDecoded(kBob, received_frame,
VideoQualityAnalyzerInterface::DecoderStats());
analyzer.OnFrameRendered(kBob, received_frame);
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, 1);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, 1);
EXPECT_EQ(frame_counters.received, 1);
EXPECT_EQ(frame_counters.decoded, 1);
EXPECT_EQ(frame_counters.rendered, 1);
EXPECT_EQ(frame_counters.dropped, 0);
}
} // namespace
} // namespace webrtc_pc_e2e
} // namespace webrtc

View File

@ -29,8 +29,8 @@ template <typename T>
class MultiHeadQueue {
public:
// Creates queue with exactly |readers_count| readers.
explicit MultiHeadQueue(int readers_count) {
for (int i = 0; i < readers_count; ++i) {
explicit MultiHeadQueue(size_t readers_count) {
for (size_t i = 0; i < readers_count; ++i) {
queues_.push_back(std::deque<T>());
}
}
@ -42,8 +42,8 @@ class MultiHeadQueue {
}
}
// Extract element from specified head. Complexity O(readers_count).
absl::optional<T> PopFront(int index) {
// Extract element from specified head. Complexity O(1).
absl::optional<T> PopFront(size_t index) {
RTC_CHECK_LT(index, queues_.size());
if (queues_[index].empty()) {
return absl::nullopt;
@ -53,8 +53,8 @@ class MultiHeadQueue {
return out;
}
// Returns element at specified head. Complexity O(readers_count).
absl::optional<T> Front(int index) const {
// Returns element at specified head. Complexity O(1).
absl::optional<T> Front(size_t index) const {
RTC_CHECK_LT(index, queues_.size());
if (queues_[index].empty()) {
return absl::nullopt;
@ -62,15 +62,11 @@ class MultiHeadQueue {
return queues_[index].front();
}
// Returns true if for all readers there are no elements in the queue or
// false otherwise. Complexity O(readers_count).
bool IsEmpty() const {
for (auto& queue : queues_) {
if (!queue.empty()) {
return false;
}
}
return true;
// Returns true if for specified head there are no more elements in the queue
// or false otherwise. Complexity O(1).
bool IsEmpty(size_t index) const {
RTC_CHECK_LT(index, queues_.size());
return queues_[index].empty();
}
// Returns size of the longest queue between all readers.
@ -85,6 +81,14 @@ class MultiHeadQueue {
return size;
}
// Returns size of the specified queue. Complexity O(1).
size_t size(size_t index) const {
RTC_CHECK_LT(index, queues_.size());
return queues_[index].size();
}
size_t readers_count() const { return queues_.size(); }
private:
std::vector<std::deque<T>> queues_;
};

View File

@ -18,7 +18,7 @@ namespace {
TEST(MultiHeadQueueTest, GetOnEmpty) {
MultiHeadQueue<int> queue = MultiHeadQueue<int>(10);
EXPECT_TRUE(queue.IsEmpty());
EXPECT_TRUE(queue.IsEmpty(0));
for (int i = 0; i < 10; ++i) {
EXPECT_FALSE(queue.PopFront(i).has_value());
EXPECT_FALSE(queue.Front(i).has_value());
@ -35,7 +35,7 @@ TEST(MultiHeadQueueTest, SingleHeadOneAddOneRemove) {
EXPECT_TRUE(value.has_value());
EXPECT_EQ(value.value(), 1);
EXPECT_EQ(queue.size(), 0lu);
EXPECT_TRUE(queue.IsEmpty());
EXPECT_TRUE(queue.IsEmpty(0));
}
TEST(MultiHeadQueueTest, SingleHead) {

View File

@ -110,19 +110,20 @@ class PeerConnectionE2EQualityTestSmokeTest : public ::testing::Test {
fixture->Run(run_params);
EXPECT_GE(fixture->GetRealTestDuration(), run_params.run_duration);
for (auto stream_label : video_analyzer_ptr->GetKnownVideoStreams()) {
for (auto stream_key : video_analyzer_ptr->GetKnownVideoStreams()) {
FrameCounters stream_conters =
video_analyzer_ptr->GetPerStreamCounters().at(stream_label);
video_analyzer_ptr->GetPerStreamCounters().at(stream_key);
// On some devices the pipeline can be too slow, so we actually can't
// force real constraints here. Lets just check, that at least 1
// frame passed whole pipeline.
int64_t expected_min_fps = run_params.run_duration.seconds() * 15;
EXPECT_GE(stream_conters.captured, expected_min_fps) << stream_label;
EXPECT_GE(stream_conters.pre_encoded, 1) << stream_label;
EXPECT_GE(stream_conters.encoded, 1) << stream_label;
EXPECT_GE(stream_conters.received, 1) << stream_label;
EXPECT_GE(stream_conters.decoded, 1) << stream_label;
EXPECT_GE(stream_conters.rendered, 1) << stream_label;
EXPECT_GE(stream_conters.captured, expected_min_fps)
<< stream_key.ToString();
EXPECT_GE(stream_conters.pre_encoded, 1) << stream_key.ToString();
EXPECT_GE(stream_conters.encoded, 1) << stream_key.ToString();
EXPECT_GE(stream_conters.received, 1) << stream_key.ToString();
EXPECT_GE(stream_conters.decoded, 1) << stream_key.ToString();
EXPECT_GE(stream_conters.rendered, 1) << stream_key.ToString();
}
}
};