[DVQA] Add support for DVQA to pause/resume receiving of stream by peer

Bug: b/271542055, webrtc:14995
Change-Id: Ic02451347160f512588b6fef5d6ac4ad904b5e18
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/297440
Reviewed-by: Jeremy Leconte <jleconte@google.com>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#39568}
This commit is contained in:
Artem Titov 2023-03-15 18:01:17 +01:00 committed by WebRTC LUCI CQ
parent db9be7f194
commit ebce84a502
18 changed files with 1160 additions and 87 deletions

View File

@ -150,6 +150,17 @@ class VideoQualityAnalyzerInterface
// call.
virtual void UnregisterParticipantInCall(absl::string_view peer_name) {}
// Informs analyzer that peer `peer_name` is expected to receive stream
// `stream_label`.
virtual void OnPeerStartedReceiveVideoStream(absl::string_view peer_name,
absl::string_view stream_label) {
}
// Informs analyzer that peer `peer_name` shouldn't receive stream
// `stream_label`.
virtual void OnPeerStoppedReceiveVideoStream(absl::string_view peer_name,
absl::string_view stream_label) {
}
// Tells analyzer that analysis complete and it should calculate final
// statistics.
virtual void Stop() {}

View File

@ -685,6 +685,7 @@ if (rtc_include_tests) {
"../rtc_base/system:file_wrapper",
"pc/e2e:e2e_unittests",
"pc/e2e/analyzer/video:video_analyzer_unittests",
"pc/e2e/analyzer/video/dvqa:dvqa_unittests",
"peer_scenario/tests",
"scenario:scenario_unittests",
"time_controller:time_controller",

View File

@ -233,6 +233,7 @@ rtc_library("default_video_quality_analyzer_internal") {
"../../../../../api:scoped_refptr",
"../../../../../api/numerics",
"../../../../../api/units:data_size",
"../../../../../api/units:time_delta",
"../../../../../api/units:timestamp",
"../../../../../api/video:video_frame",
"../../../../../api/video:video_frame_type",
@ -246,6 +247,7 @@ rtc_library("default_video_quality_analyzer_internal") {
"../../../../../rtc_base/synchronization:mutex",
"../../../../../rtc_tools:video_quality_analysis",
"../../../../../system_wrappers",
"dvqa:pausable_state",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings:strings",
@ -492,6 +494,7 @@ if (rtc_include_tests) {
":default_video_quality_analyzer_internal",
"../../../..:test_support",
"../../../../../api/units:timestamp",
"../../../../../system_wrappers",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
@ -505,13 +508,17 @@ if (rtc_include_tests) {
"../../../..:test_support",
"../../../../../api:create_frame_generator",
"../../../../../api:rtp_packet_info",
"../../../../../api:time_controller",
"../../../../../api/test/metrics:global_metrics_logger_and_exporter",
"../../../../../api/units:time_delta",
"../../../../../api/units:timestamp",
"../../../../../api/video:encoded_image",
"../../../../../api/video:video_frame",
"../../../../../common_video",
"../../../../../rtc_base:stringutils",
"../../../../../rtc_tools:video_quality_analysis",
"../../../../../system_wrappers",
"../../../../time_controller",
]
}

View File

@ -27,6 +27,7 @@
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
#include "system_wrappers/include/clock.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
@ -203,9 +204,9 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
auto state_it = stream_states_.find(stream_index);
if (state_it == stream_states_.end()) {
stream_states_.emplace(
stream_index,
StreamState(peer_index, frame_receivers_indexes, captured_time));
stream_states_.emplace(stream_index,
StreamState(peer_index, frame_receivers_indexes,
captured_time, clock_));
}
StreamState* state = &stream_states_.at(stream_index);
state->PushBack(frame_id);
@ -222,6 +223,11 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
uint16_t oldest_frame_id = state->PopFront(i);
RTC_DCHECK_EQ(frame_id, oldest_frame_id);
if (state->GetPausableState(i)->IsPaused()) {
continue;
}
frame_counters_.dropped++;
InternalStatsKey key(stream_index, peer_index, i);
stream_frame_counters_.at(key).dropped++;
@ -528,40 +534,27 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
// After we received frame here we need to check if there are any dropped
// frames between this one and last one, that was rendered for this video
// stream.
int dropped_count = 0;
while (!state->IsEmpty(peer_index) &&
state->Front(peer_index) != frame.id()) {
dropped_count++;
uint16_t dropped_frame_id = state->PopFront(peer_index);
// Frame with id `dropped_frame_id` was dropped. We need:
// 1. Update global and stream frame counters
// 2. Extract corresponding frame from `captured_frames_in_flight_`
// 3. Send extracted frame to comparison with dropped=true
// 4. Cleanup dropped frame
frame_counters_.dropped++;
stream_frame_counters_.at(stats_key).dropped++;
auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
dropped_frame_it->second.MarkDropped(peer_index);
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
frames_comparator_.AddComparison(
stats_key, /*captured=*/absl::nullopt, /*rendered=*/absl::nullopt,
FrameComparisonType::kDroppedFrame,
dropped_frame_it->second.GetStatsForPeer(peer_index));
if (dropped_frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(dropped_frame_it);
}
}
int dropped_count = ProcessNotSeenFramesBeforeRendered(peer_index, frame.id(),
stats_key, *state);
RTC_DCHECK(!state->IsEmpty(peer_index));
state->PopFront(peer_index);
if (state->last_rendered_frame_time(peer_index)) {
if (state->last_rendered_frame_time(peer_index).has_value()) {
TimeDelta time_between_rendered_frames =
state->GetPausableState(peer_index)
->GetActiveDurationFrom(
*state->last_rendered_frame_time(peer_index));
if (state->GetPausableState(peer_index)->IsPaused()) {
// If stream is currently paused for this receiver, but we still received
// frame, we have to add time from last pause up to Now() to the time
// between rendered frames.
time_between_rendered_frames +=
Now() - state->GetPausableState(peer_index)->GetLastEventTime();
}
frame_in_flight->SetTimeBetweenRenderedFrames(peer_index,
time_between_rendered_frames);
frame_in_flight->SetPrevFrameRenderedTime(
peer_index, state->last_rendered_frame_time(peer_index).value());
peer_index, *state->last_rendered_frame_time(peer_index));
}
state->SetLastRenderedFrameTime(peer_index,
frame_in_flight->rendered_time(peer_index));
@ -734,6 +727,34 @@ void DefaultVideoQualityAnalyzer::UnregisterParticipantInCall(
}
}
void DefaultVideoQualityAnalyzer::OnPeerStartedReceiveVideoStream(
absl::string_view peer_name,
absl::string_view stream_label) {
MutexLock lock(&mutex_);
RTC_CHECK(peers_->HasName(peer_name));
size_t peer_index = peers_->index(peer_name);
RTC_CHECK(streams_.HasName(stream_label));
size_t stream_index = streams_.index(stream_label);
auto it = stream_states_.find(stream_index);
RTC_CHECK(it != stream_states_.end());
it->second.GetPausableState(peer_index)->Resume();
}
void DefaultVideoQualityAnalyzer::OnPeerStoppedReceiveVideoStream(
absl::string_view peer_name,
absl::string_view stream_label) {
MutexLock lock(&mutex_);
RTC_CHECK(peers_->HasName(peer_name));
size_t peer_index = peers_->index(peer_name);
RTC_CHECK(streams_.HasName(stream_label));
size_t stream_index = streams_.index(stream_label);
auto it = stream_states_.find(stream_index);
RTC_CHECK(it != stream_states_.end());
it->second.GetPausableState(peer_index)->Pause();
}
void DefaultVideoQualityAnalyzer::Stop() {
std::map<InternalStatsKey, Timestamp> last_rendered_frame_times;
{
@ -923,7 +944,8 @@ void DefaultVideoQualityAnalyzer::
// Add frames in flight for this stream into frames comparator.
// Frames in flight were not rendered, so they won't affect stream's
// last rendered frame time.
while (!stream_state.IsEmpty(peer_index)) {
while (!stream_state.IsEmpty(peer_index) &&
!stream_state.GetPausableState(peer_index)->IsPaused()) {
uint16_t frame_id = stream_state.PopFront(peer_index);
auto it = captured_frames_in_flight_.find(frame_id);
RTC_DCHECK(it != captured_frames_in_flight_.end());
@ -936,6 +958,103 @@ void DefaultVideoQualityAnalyzer::
}
}
int DefaultVideoQualityAnalyzer::ProcessNotSeenFramesBeforeRendered(
size_t peer_index,
uint16_t rendered_frame_id,
const InternalStatsKey& stats_key,
StreamState& state) {
int dropped_count = 0;
while (!state.IsEmpty(peer_index) &&
state.Front(peer_index) != rendered_frame_id) {
uint16_t next_frame_id = state.PopFront(peer_index);
auto next_frame_it = captured_frames_in_flight_.find(next_frame_id);
RTC_DCHECK(next_frame_it != captured_frames_in_flight_.end());
FrameInFlight& next_frame = next_frame_it->second;
// Depending if the receiver was subscribed to this stream or not at the
// time when frame was captured, the frame should be considered as dropped
// or superfluous (see below for explanation). Superfluous frames must be
// excluded from stats calculations.
//
// We should consider next cases:
// Legend:
// + - frame captured on the stream
// p - stream is paused
// r - stream is resumed
//
// last currently
// rendered rendered
// frame frame
// |---------------------- dropped -------------------------|
// (1) -[]---+---+---+---+---+---+---+---+---+---+---+---+---+---[]-> time
// | |
// | |
// |-- dropped ---┐ ┌- dropped -┐ ┌- dropped ---|
// (2) -[]---+---+---+-|-+---+-|-+---+---+-|-+---+-|-+---+---+---[]-> time
// | p r p r |
// | |
// |-- dropped ---┐ ┌------------ dropped ------------|
// (3) -[]---+---+---+-|-+---+-|-+---+---+---+---+---+-|-+---+---[]-> time
// p r p
//
// Cases explanation:
// (1) Regular media flow, frame is received after freeze.
// (2) Stream was paused and received multiple times. Frame is received
// after freeze from last resume.
// (3) Stream was paused and received multiple times. Frame is received
// after stream was paused because frame was already in the network.
//
// Based on that if stream wasn't paused when `next_frame_id` was captured,
// then `next_frame_id` should be considered as dropped. If stream was NOT
// resumed after `next_frame_id` was captured but we still received a
// `rendered_frame_id` on this stream, then `next_frame_id` also should
// be considered as dropped. In other cases `next_frame_id` should be
// considered as superfluous, because receiver wasn't expected to receive
// `next_frame_id` at all.
bool is_dropped = false;
bool is_paused = state.GetPausableState(peer_index)
->WasPausedAt(next_frame.captured_time());
if (!is_paused) {
is_dropped = true;
} else {
bool was_resumed_after =
state.GetPausableState(peer_index)
->WasResumedAfter(next_frame.captured_time());
if (!was_resumed_after) {
is_dropped = true;
}
}
if (is_dropped) {
dropped_count++;
// Frame with id `dropped_frame_id` was dropped. We need:
// 1. Update global and stream frame counters
// 2. Extract corresponding frame from `captured_frames_in_flight_`
// 3. Send extracted frame to comparison with dropped=true
// 4. Cleanup dropped frame
frame_counters_.dropped++;
stream_frame_counters_.at(stats_key).dropped++;
next_frame.MarkDropped(peer_index);
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
frames_comparator_.AddComparison(stats_key, /*captured=*/absl::nullopt,
/*rendered=*/absl::nullopt,
FrameComparisonType::kDroppedFrame,
next_frame.GetStatsForPeer(peer_index));
} else {
next_frame.MarkSuperfluous(peer_index);
}
if (next_frame_it->second.HaveAllPeersReceived()) {
captured_frames_in_flight_.erase(next_frame_it);
}
}
return dropped_count;
}
void DefaultVideoQualityAnalyzer::ReportResults() {
MutexLock lock(&mutex_);
for (auto& item : frames_comparator_.stream_stats()) {
@ -1047,8 +1166,7 @@ void DefaultVideoQualityAnalyzer::ReportResults(
{MetricMetadataKey::kExperimentalTestNameMetadataKey, test_label_}};
double sum_squared_interframe_delays_secs = 0;
Timestamp video_start_time = Timestamp::PlusInfinity();
Timestamp video_end_time = Timestamp::MinusInfinity();
double video_duration_ms = 0;
for (const SamplesStatsCounter::StatsSample& sample :
stats.time_between_rendered_frames_ms.GetTimedSamples()) {
double interframe_delay_ms = sample.value;
@ -1058,18 +1176,13 @@ void DefaultVideoQualityAnalyzer::ReportResults(
// to smoothness of video playback and includes both freezes and pauses.
sum_squared_interframe_delays_secs +=
interframe_delays_secs * interframe_delays_secs;
if (sample.time < video_start_time) {
video_start_time = sample.time;
}
if (sample.time > video_end_time) {
video_end_time = sample.time;
}
video_duration_ms += sample.value;
}
double harmonic_framerate_fps = 0;
TimeDelta video_duration = video_end_time - video_start_time;
if (sum_squared_interframe_delays_secs > 0.0 && video_duration.IsFinite()) {
if (sum_squared_interframe_delays_secs > 0.0) {
harmonic_framerate_fps =
video_duration.seconds<double>() / sum_squared_interframe_delays_secs;
video_duration_ms / 1000.0 / sum_squared_interframe_delays_secs;
}
metrics_logger_->LogMetric(

View File

@ -80,6 +80,10 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
void RegisterParticipantInCall(absl::string_view peer_name) override;
void UnregisterParticipantInCall(absl::string_view peer_name) override;
void OnPeerStartedReceiveVideoStream(absl::string_view peer_name,
absl::string_view stream_label) override;
void OnPeerStoppedReceiveVideoStream(absl::string_view peer_name,
absl::string_view stream_label) override;
void Stop() override;
std::string GetStreamLabel(uint16_t frame_id) override;
@ -118,6 +122,17 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
size_t peer_index)
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Processes frames for the peer identified by `peer_index` up to
// `rendered_frame_id` (excluded). Sends each dropped frame for comparison and
// discards superfluous frames (they were not expected to be received by
// `peer_index` and not accounted in the stats).
// Returns number of dropped frames.
int ProcessNotSeenFramesBeforeRendered(size_t peer_index,
uint16_t rendered_frame_id,
const InternalStatsKey& stats_key,
StreamState& state)
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
// Report results for all metrics for all streams.
void ReportResults();
void ReportResults(const InternalStatsKey& key,

View File

@ -10,6 +10,7 @@
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h"
#include <unordered_map>
#include <utility>
#include <vector>
@ -24,7 +25,8 @@ namespace webrtc {
namespace {
template <typename T>
absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
absl::optional<T> MaybeGetValue(const std::unordered_map<size_t, T>& map,
size_t key) {
auto it = map.find(key);
if (it == map.end()) {
return absl::nullopt;
@ -63,7 +65,8 @@ std::vector<size_t> FrameInFlight::GetPeersWhichDidntReceive() const {
for (size_t peer : expected_receivers_) {
auto it = receiver_stats_.find(peer);
if (it == receiver_stats_.end() ||
(!it->second.dropped && it->second.rendered_time.IsInfinite())) {
(!it->second.dropped && !it->second.superfluous &&
it->second.rendered_time.IsInfinite())) {
out.push_back(peer);
}
}
@ -77,7 +80,8 @@ bool FrameInFlight::HaveAllPeersReceived() const {
return false;
}
if (!it->second.dropped && it->second.rendered_time.IsInfinite()) {
if (!it->second.dropped && !it->second.superfluous &&
it->second.rendered_time.IsInfinite()) {
return false;
}
}
@ -179,6 +183,8 @@ bool FrameInFlight::IsDropped(size_t peer) const {
FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const {
RTC_DCHECK_NE(frame_id_, VideoFrame::kNotSetId)
<< "Frame id isn't initialized";
RTC_DCHECK(!IsSuperfluous(peer))
<< "This frame is superfluous for peer " << peer;
FrameStats stats(frame_id_, captured_time_);
stats.pre_encode_time = pre_encode_time_;
stats.encoded_time = encoded_time_;
@ -196,6 +202,8 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const {
stats.decode_end_time = receiver_stats->decode_end_time;
stats.rendered_time = receiver_stats->rendered_time;
stats.prev_frame_rendered_time = receiver_stats->prev_frame_rendered_time;
stats.time_between_rendered_frames =
receiver_stats->time_between_rendered_frames;
stats.decoded_frame_width = receiver_stats->decoded_frame_width;
stats.decoded_frame_height = receiver_stats->decoded_frame_height;
stats.used_decoder = receiver_stats->used_decoder;
@ -206,4 +214,12 @@ FrameStats FrameInFlight::GetStatsForPeer(size_t peer) const {
return stats;
}
bool FrameInFlight::IsSuperfluous(size_t peer) const {
auto it = receiver_stats_.find(peer);
if (it == receiver_stats_.end()) {
return false;
}
return it->second.superfluous;
}
} // namespace webrtc

View File

@ -13,12 +13,14 @@
#include <map>
#include <set>
#include <unordered_map>
#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/units/data_size.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "api/video/video_frame.h"
#include "api/video/video_frame_type.h"
@ -34,6 +36,8 @@ struct ReceiverFrameStats {
Timestamp rendered_time = Timestamp::MinusInfinity();
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
TimeDelta time_between_rendered_frames = TimeDelta::Zero();
// Type and encoded size of received frame.
VideoFrameType frame_type = VideoFrameType::kEmptyFrame;
DataSize encoded_image_size = DataSize::Bytes(0);
@ -46,6 +50,9 @@ struct ReceiverFrameStats {
bool dropped = false;
bool decoder_failed = false;
// Superfluous frames should be used for stats calculation for that peer.
bool superfluous = false;
};
// Represents a frame which was sent by sender and is currently on the way to
@ -67,6 +74,9 @@ class FrameInFlight {
// Returns internal copy of source `VideoFrame` or `absl::nullopt` if it was
// removed before.
const absl::optional<VideoFrame>& frame() const { return frame_; }
Timestamp captured_time() const { return captured_time_; }
// Removes internal copy of the source `VideoFrame` to free up extra memory.
// Returns was frame removed or not.
bool RemoveFrame();
@ -82,9 +92,9 @@ class FrameInFlight {
// received it or not.
bool HaveAllPeersReceived() const;
void SetPreEncodeTime(webrtc::Timestamp time) { pre_encode_time_ = time; }
void SetPreEncodeTime(Timestamp time) { pre_encode_time_ = time; }
void OnFrameEncoded(webrtc::Timestamp time,
void OnFrameEncoded(Timestamp time,
VideoFrameType frame_type,
DataSize encoded_image_size,
uint32_t target_encode_bitrate,
@ -95,15 +105,15 @@ class FrameInFlight {
bool HasEncodedTime() const { return encoded_time_.IsFinite(); }
void OnFramePreDecode(size_t peer,
webrtc::Timestamp received_time,
webrtc::Timestamp decode_start_time,
Timestamp received_time,
Timestamp decode_start_time,
VideoFrameType frame_type,
DataSize encoded_image_size);
bool HasReceivedTime(size_t peer) const;
void OnFrameDecoded(size_t peer,
webrtc::Timestamp time,
Timestamp time,
int width,
int height,
const StreamCodecInfo& used_decoder);
@ -111,12 +121,12 @@ class FrameInFlight {
bool HasDecodeEndTime(size_t peer) const;
void OnFrameRendered(size_t peer, webrtc::Timestamp time);
void OnFrameRendered(size_t peer, Timestamp time);
bool HasRenderedTime(size_t peer) const;
// Crash if rendered time is not set for specified `peer`.
webrtc::Timestamp rendered_time(size_t peer) const {
Timestamp rendered_time(size_t peer) const {
return receiver_stats_.at(peer).rendered_time;
}
@ -124,13 +134,23 @@ class FrameInFlight {
void MarkDropped(size_t peer) { receiver_stats_[peer].dropped = true; }
bool IsDropped(size_t peer) const;
void MarkSuperfluous(size_t peer) {
receiver_stats_[peer].superfluous = true;
}
void SetPrevFrameRenderedTime(size_t peer, webrtc::Timestamp time) {
receiver_stats_[peer].prev_frame_rendered_time = time;
}
void SetTimeBetweenRenderedFrames(size_t peer, TimeDelta time) {
receiver_stats_[peer].time_between_rendered_frames = time;
}
FrameStats GetStatsForPeer(size_t peer) const;
private:
bool IsSuperfluous(size_t peer) const;
const size_t stream_;
// Set of peer's indexes who are expected to receive this frame. This is not
// the set of peer's indexes that received the frame. For example, if peer A
@ -156,12 +176,12 @@ class FrameInFlight {
DataSize encoded_image_size_ = DataSize::Bytes(0);
uint32_t target_encode_bitrate_ = 0;
// Sender side qp values per spatial or simulcast layer. If neither the
// spatial or simulcast index is set in `webrtc::EncodedImage`, 0 is used.
// spatial or simulcast index is set in `EncodedImage`, 0 is used.
std::map<int, SamplesStatsCounter> stream_layers_qp_;
// Can be not set if frame was dropped by encoder.
absl::optional<StreamCodecInfo> used_encoder_ = absl::nullopt;
// Map from the receiver peer's index to frame stats for that peer.
std::map<size_t, ReceiverFrameStats> receiver_stats_;
std::unordered_map<size_t, ReceiverFrameStats> receiver_stats_;
};
} // namespace webrtc

View File

@ -523,20 +523,21 @@ void DefaultVideoQualityAnalyzerFramesComparator::ProcessComparison(
if (frame_stats.prev_frame_rendered_time.IsFinite() &&
frame_stats.rendered_time.IsFinite()) {
TimeDelta time_between_rendered_frames =
frame_stats.rendered_time - frame_stats.prev_frame_rendered_time;
stats->time_between_rendered_frames_ms.AddSample(StatsSample(
time_between_rendered_frames, frame_stats.rendered_time, metadata));
stats->time_between_rendered_frames_ms.AddSample(
StatsSample(frame_stats.time_between_rendered_frames,
frame_stats.rendered_time, metadata));
TimeDelta average_time_between_rendered_frames = TimeDelta::Millis(
stats->time_between_rendered_frames_ms.GetAverage());
if (time_between_rendered_frames >
if (frame_stats.time_between_rendered_frames >
std::max(kFreezeThreshold + average_time_between_rendered_frames,
3 * average_time_between_rendered_frames)) {
stats->freeze_time_ms.AddSample(StatsSample(
time_between_rendered_frames, frame_stats.rendered_time, metadata));
stats->freeze_time_ms.AddSample(
StatsSample(frame_stats.time_between_rendered_frames,
frame_stats.rendered_time, metadata));
auto freeze_end_it =
stream_last_freeze_end_time_.find(comparison.stats_key);
RTC_DCHECK(freeze_end_it != stream_last_freeze_end_time_.end());
// TODO(bugs.webrtc.org/14995): rethink this metric for paused stream.
stats->time_between_freezes_ms.AddSample(StatsSample(
frame_stats.prev_frame_rendered_time - freeze_end_it->second,
frame_stats.rendered_time, metadata));

View File

@ -219,6 +219,8 @@ TEST(
FrameStats frame_stats2 = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
/*frame_id=*/2, stream_start_time + TimeDelta::Millis(15));
frame_stats2.prev_frame_rendered_time = frame_stats1.rendered_time;
frame_stats2.time_between_rendered_frames =
frame_stats2.rendered_time - frame_stats1.rendered_time;
comparator.Start(/*max_threads_count=*/1);
comparator.EnsureStatsForStream(stream, sender, peers_count,
@ -1610,6 +1612,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
FrameStats frame_stats = FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
/*frame_id=*/i + 1, stream_start_time + TimeDelta::Millis(30 * i));
frame_stats.prev_frame_rendered_time = prev_frame_rendered_time;
frame_stats.time_between_rendered_frames =
frame_stats.rendered_time - prev_frame_rendered_time;
prev_frame_rendered_time = frame_stats.rendered_time;
comparator.AddComparison(stats_key,
@ -1624,6 +1628,8 @@ TEST(DefaultVideoQualityAnalyzerFramesComparatorTest,
FrameStatsWith10msDeltaBetweenPhasesAnd10x10Frame(
/*frame_id=*/10, stream_start_time + TimeDelta::Millis(120 + 300));
freeze_frame_stats.prev_frame_rendered_time = prev_frame_rendered_time;
freeze_frame_stats.time_between_rendered_frames =
freeze_frame_stats.rendered_time - prev_frame_rendered_time;
comparator.AddComparison(stats_key,
/*skipped_between_rendered=*/4,

View File

@ -59,6 +59,10 @@ struct FrameStats {
Timestamp rendered_time = Timestamp::MinusInfinity();
Timestamp prev_frame_rendered_time = Timestamp::MinusInfinity();
// Time between this and previous rendered frame excluding time when related
// stream was paused for related receiver.
TimeDelta time_between_rendered_frames = TimeDelta::Zero();
VideoFrameType encoded_frame_type = VideoFrameType::kEmptyFrame;
DataSize encoded_image_size = DataSize::Bytes(0);
VideoFrameType pre_decoded_frame_type = VideoFrameType::kEmptyFrame;

View File

@ -10,18 +10,21 @@
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
#include <map>
#include <set>
#include <unordered_map>
#include "absl/types/optional.h"
#include "api/units/timestamp.h"
#include "rtc_base/checks.h"
#include "system_wrappers/include/clock.h"
#include "test/pc/e2e/analyzer/video/dvqa/pausable_state.h"
namespace webrtc {
namespace {
template <typename T>
absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
absl::optional<T> MaybeGetValue(const std::unordered_map<size_t, T>& map,
size_t key) {
auto it = map.find(key);
if (it == map.end()) {
return absl::nullopt;
@ -33,15 +36,18 @@ absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
StreamState::StreamState(size_t sender,
std::set<size_t> receivers,
Timestamp stream_started_time)
Timestamp stream_started_time,
Clock* clock)
: sender_(sender),
stream_started_time_(stream_started_time),
clock_(clock),
receivers_(receivers),
frame_ids_(std::move(receivers)) {
frame_ids_.AddReader(kAliveFramesQueueIndex);
RTC_CHECK_NE(sender_, kAliveFramesQueueIndex);
for (size_t receiver : receivers_) {
RTC_CHECK_NE(receiver, kAliveFramesQueueIndex);
pausable_state_.emplace(receiver, PausableState(clock_));
}
}
@ -69,12 +75,14 @@ void StreamState::AddPeer(size_t peer) {
RTC_CHECK_NE(peer, kAliveFramesQueueIndex);
frame_ids_.AddReader(peer, kAliveFramesQueueIndex);
receivers_.insert(peer);
pausable_state_.emplace(peer, PausableState(clock_));
}
void StreamState::RemovePeer(size_t peer) {
RTC_CHECK_NE(peer, kAliveFramesQueueIndex);
frame_ids_.RemoveReader(peer);
receivers_.erase(peer);
pausable_state_.erase(peer);
// If we removed the last receiver for the alive frames, we need to pop them
// from the queue, because now they received by all receivers.
@ -86,6 +94,13 @@ void StreamState::RemovePeer(size_t peer) {
}
}
PausableState* StreamState::GetPausableState(size_t peer) {
auto it = pausable_state_.find(peer);
RTC_CHECK(it != pausable_state_.end())
<< "No pausable state for receiver " << peer;
return &it->second;
}
uint16_t StreamState::MarkNextAliveFrameAsDead() {
absl::optional<uint16_t> frame_id =
frame_ids_.PopFront(kAliveFramesQueueIndex);

View File

@ -12,11 +12,13 @@
#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
#include <limits>
#include <map>
#include <set>
#include <unordered_map>
#include "absl/types/optional.h"
#include "api/units/timestamp.h"
#include "system_wrappers/include/clock.h"
#include "test/pc/e2e/analyzer/video/dvqa/pausable_state.h"
#include "test/pc/e2e/analyzer/video/multi_reader_queue.h"
namespace webrtc {
@ -37,7 +39,8 @@ class StreamState {
public:
StreamState(size_t sender,
std::set<size_t> receivers,
Timestamp stream_started_time);
Timestamp stream_started_time,
Clock* clock);
size_t sender() const { return sender_; }
Timestamp stream_started_time() const { return stream_started_time_; }
@ -59,9 +62,14 @@ class StreamState {
// DefaultVideoQualityAnalyzer still may request it for stats processing.
void RemovePeer(size_t peer);
// Returns a pointer to the PausableState of this stream for specified peer.
// The pointer is owned by StreamState and guranteed to be not null.
PausableState* GetPausableState(size_t peer);
size_t GetAliveFramesCount() const {
return frame_ids_.size(kAliveFramesQueueIndex);
}
// Returns frame id of the frame which was marked as dead.
uint16_t MarkNextAliveFrameAsDead();
void SetLastRenderedFrameTime(size_t peer, Timestamp time);
@ -78,6 +86,7 @@ class StreamState {
// Index of the owner. Owner's queue in `frame_ids_` will keep alive frames.
const size_t sender_;
const Timestamp stream_started_time_;
Clock* const clock_;
std::set<size_t> receivers_;
// To correctly determine dropped frames we have to know sequence of frames
// in each stream so we will keep a list of frame ids inside the stream.
@ -92,7 +101,9 @@ class StreamState {
// frame_id2 and consider those frames as dropped and then compare received
// frame with the one from `FrameInFlight` with id frame_id3.
MultiReaderQueue<uint16_t> frame_ids_;
std::map<size_t, Timestamp> last_rendered_frame_time_;
std::unordered_map<size_t, Timestamp> last_rendered_frame_time_;
// Mapping from peer's index to pausable state for this receiver.
std::unordered_map<size_t, PausableState> pausable_state_;
};
} // namespace webrtc

View File

@ -13,6 +13,7 @@
#include <set>
#include "api/units/timestamp.h"
#include "system_wrappers/include/clock.h"
#include "test/gtest.h"
namespace webrtc {
@ -20,8 +21,8 @@ namespace {
TEST(StreamStateTest, PopFrontAndFrontIndependentForEachPeer) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
state.PushBack(/*frame_id=*/2);
@ -37,8 +38,8 @@ TEST(StreamStateTest, PopFrontAndFrontIndependentForEachPeer) {
TEST(StreamStateTest, IsEmpty) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
EXPECT_FALSE(state.IsEmpty(/*peer=*/1));
@ -50,8 +51,8 @@ TEST(StreamStateTest, IsEmpty) {
TEST(StreamStateTest, PopFrontForOnlyOnePeerDontChangeAliveFramesCount) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
state.PushBack(/*frame_id=*/2);
@ -65,8 +66,8 @@ TEST(StreamStateTest, PopFrontForOnlyOnePeerDontChangeAliveFramesCount) {
TEST(StreamStateTest, PopFrontForAllPeersReducesAliveFramesCount) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
state.PushBack(/*frame_id=*/2);
@ -80,8 +81,8 @@ TEST(StreamStateTest, PopFrontForAllPeersReducesAliveFramesCount) {
TEST(StreamStateTest, RemovePeerForLastExpectedReceiverUpdatesAliveFrames) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
state.PushBack(/*frame_id=*/2);
@ -96,8 +97,8 @@ TEST(StreamStateTest, RemovePeerForLastExpectedReceiverUpdatesAliveFrames) {
TEST(StreamStateTest, MarkNextAliveFrameAsDeadDecreseAliveFramesCount) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
state.PushBack(/*frame_id=*/2);
@ -110,8 +111,8 @@ TEST(StreamStateTest, MarkNextAliveFrameAsDeadDecreseAliveFramesCount) {
TEST(StreamStateTest, MarkNextAliveFrameAsDeadDoesntAffectFrontFrameForPeer) {
StreamState state(/*sender=*/0,
/*receivers=*/std::set<size_t>{1, 2},
Timestamp::Seconds(1));
/*receivers=*/std::set<size_t>{1, 2}, Timestamp::Seconds(1),
Clock::GetRealTimeClock());
state.PushBack(/*frame_id=*/1);
state.PushBack(/*frame_id=*/2);

View File

@ -19,6 +19,9 @@
#include "api/rtp_packet_infos.h"
#include "api/test/create_frame_generator.h"
#include "api/test/metrics/global_metrics_logger_and_exporter.h"
#include "api/test/time_controller.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
@ -26,12 +29,18 @@
#include "rtc_base/strings/string_builder.h"
#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
#include "system_wrappers/include/sleep.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
namespace {
using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::IsEmpty;
using ::testing::Test;
using ::testing::TestWithParam;
using ::testing::ValuesIn;
@ -95,6 +104,26 @@ std::vector<StatsSample> GetSortedSamples(const SamplesStatsCounter& counter) {
return out;
}
std::vector<double> GetTimeSortedValues(const SamplesStatsCounter& counter) {
rtc::ArrayView<const StatsSample> view = counter.GetTimedSamples();
std::vector<StatsSample> sorted(view.begin(), view.end());
std::sort(sorted.begin(), sorted.end(),
[](const StatsSample& a, const StatsSample& b) {
return a.time < b.time;
});
std::vector<double> out;
out.reserve(sorted.size());
for (const StatsSample& sample : sorted) {
out.push_back(sample.value);
}
return out;
}
void ExpectRateIs(const SamplesRateCounter& rate_couter, double expected_rate) {
ASSERT_FALSE(rate_couter.IsEmpty());
EXPECT_NEAR(rate_couter.GetEventsPerSecond(), expected_rate, 1e-5);
}
std::string ToString(const std::vector<StatsSample>& values) {
rtc::StringBuilder out;
for (const auto& v : values) {
@ -112,13 +141,42 @@ void FakeCPULoad() {
ASSERT_TRUE(std::is_sorted(temp.begin(), temp.end()));
}
void PassFramesThroughAnalyzerSenderOnly(
DefaultVideoQualityAnalyzer& analyzer,
absl::string_view sender,
absl::string_view stream_label,
std::vector<absl::string_view> receivers,
int frames_count,
test::FrameGeneratorInterface& frame_generator,
int interframe_delay_ms = 0,
TimeController* time_controller = nullptr) {
for (int i = 0; i < frames_count; ++i) {
VideoFrame frame = NextFrame(&frame_generator, /*timestamp_us=*/1);
uint16_t frame_id =
analyzer.OnFrameCaptured(sender, std::string(stream_label), frame);
frame.set_id(frame_id);
analyzer.OnFramePreEncode(sender, frame);
analyzer.OnFrameEncoded(sender, frame.id(), FakeEncode(frame),
VideoQualityAnalyzerInterface::EncoderStats(),
false);
if (i < frames_count - 1 && interframe_delay_ms > 0) {
if (time_controller == nullptr) {
SleepMs(interframe_delay_ms);
} else {
time_controller->AdvanceTime(TimeDelta::Millis(interframe_delay_ms));
}
}
}
}
void PassFramesThroughAnalyzer(DefaultVideoQualityAnalyzer& analyzer,
absl::string_view sender,
absl::string_view stream_label,
std::vector<absl::string_view> receivers,
int frames_count,
test::FrameGeneratorInterface& frame_generator,
int interframe_delay_ms = 0) {
int interframe_delay_ms = 0,
TimeController* time_controller = nullptr) {
for (int i = 0; i < frames_count; ++i) {
VideoFrame frame = NextFrame(&frame_generator, /*timestamp_us=*/1);
uint16_t frame_id =
@ -137,7 +195,11 @@ void PassFramesThroughAnalyzer(DefaultVideoQualityAnalyzer& analyzer,
analyzer.OnFrameRendered(receiver, received_frame);
}
if (i < frames_count - 1 && interframe_delay_ms > 0) {
SleepMs(interframe_delay_ms);
if (time_controller == nullptr) {
SleepMs(interframe_delay_ms);
} else {
time_controller->AdvanceTime(TimeDelta::Millis(interframe_delay_ms));
}
}
}
}
@ -790,7 +852,7 @@ TEST(DefaultVideoQualityAnalyzerTest, CpuUsage) {
}
// Windows CPU clock has low accuracy. We need to fake some additional load to
// be sure that the clock ticks (https://crbug.com/webrtc/12249).
// be sure that the clock ticks (https://bugs.webrtc.org/12249).
FakeCPULoad();
for (size_t i = 1; i < frames_order.size(); i += 2) {
@ -2200,5 +2262,169 @@ INSTANTIATE_TEST_SUITE_P(WithRegisteredAndUnregisteredPeerAtTheEndOfTheCall,
DefaultVideoQualityAnalyzerTimeBetweenFreezesTest,
ValuesIn({true, false}));
class DefaultVideoQualityAnalyzerSimulatedTimeTest : public Test {
protected:
DefaultVideoQualityAnalyzerSimulatedTimeTest()
: time_controller_(std::make_unique<GlobalSimulatedTimeController>(
Timestamp::Seconds(1000))) {}
void AdvanceTime(TimeDelta time) { time_controller_->AdvanceTime(time); }
Clock* GetClock() { return time_controller_->GetClock(); }
TimeController* time_controller() { return time_controller_.get(); }
Timestamp Now() const { return time_controller_->GetClock()->CurrentTime(); }
private:
std::unique_ptr<TimeController> time_controller_;
};
TEST_F(DefaultVideoQualityAnalyzerSimulatedTimeTest,
PausedAndResumedStreamIsAccountedInStatsCorrectly) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
options.report_infra_metrics = false;
DefaultVideoQualityAnalyzer analyzer(GetClock(),
test::GetGlobalMetricsLogger(), options);
analyzer.Start("test_case",
std::vector<std::string>{"alice", "bob", "charlie"},
kAnalyzerMaxThreadsCount);
// Pass 20 frames as 20 fps.
PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
{"bob", "charlie"},
/*frames_count=*/20, *frame_generator,
/*interframe_delay_ms=*/50, time_controller());
AdvanceTime(TimeDelta::Millis(50));
// Mark stream paused for Bob, but not for Charlie.
analyzer.OnPeerStoppedReceiveVideoStream("bob", "alice_video");
// Freeze for 1 second.
PassFramesThroughAnalyzerSenderOnly(
analyzer, "alice", "alice_video", {"bob", "charlie"},
/*frames_count=*/20, *frame_generator,
/*interframe_delay_ms=*/50, time_controller());
AdvanceTime(TimeDelta::Millis(50));
// Unpause stream for Bob.
analyzer.OnPeerStartedReceiveVideoStream("bob", "alice_video");
// Pass 20 frames as 20 fps.
PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
{"bob", "charlie"},
/*frames_count=*/20, *frame_generator,
/*interframe_delay_ms=*/50, time_controller());
analyzer.Stop();
// Bob should have 20 fps without freeze and Charlie should have freeze of 1s
// and decreased fps.
std::map<StatsKey, StreamStats> streams_stats = analyzer.GetStats();
std::map<StatsKey, FrameCounters> frame_counters =
analyzer.GetPerStreamCounters();
StreamStats bob_stream_stats =
streams_stats.at(StatsKey("alice_video", "bob"));
FrameCounters bob_frame_counters =
frame_counters.at(StatsKey("alice_video", "bob"));
EXPECT_THAT(bob_frame_counters.dropped, Eq(0));
EXPECT_THAT(bob_frame_counters.rendered, Eq(40));
EXPECT_THAT(GetTimeSortedValues(bob_stream_stats.freeze_time_ms),
ElementsAre(0.0));
// TODO(bugs.webrtc.org/14995): value should exclude pause
EXPECT_THAT(GetTimeSortedValues(bob_stream_stats.time_between_freezes_ms),
ElementsAre(2950.0));
// TODO(bugs.webrtc.org/14995): Fix capture_frame_rate (has to be ~20.0)
ExpectRateIs(bob_stream_stats.capture_frame_rate, 13.559322);
// TODO(bugs.webrtc.org/14995): Fix encode_frame_rate (has to be ~20.0)
ExpectRateIs(bob_stream_stats.encode_frame_rate, 13.559322);
// TODO(bugs.webrtc.org/14995): Assert on harmonic fps
StreamStats charlie_stream_stats =
streams_stats.at(StatsKey("alice_video", "charlie"));
FrameCounters charlie_frame_counters =
frame_counters.at(StatsKey("alice_video", "charlie"));
EXPECT_THAT(charlie_frame_counters.dropped, Eq(20));
EXPECT_THAT(charlie_frame_counters.rendered, Eq(40));
EXPECT_THAT(GetTimeSortedValues(charlie_stream_stats.freeze_time_ms),
ElementsAre(1050.0));
EXPECT_THAT(GetTimeSortedValues(charlie_stream_stats.time_between_freezes_ms),
ElementsAre(950.0, 950.0));
// TODO(bugs.webrtc.org/14995): Assert on harmonic fps
}
TEST_F(DefaultVideoQualityAnalyzerSimulatedTimeTest,
PausedStreamIsAccountedInStatsCorrectly) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzerOptions options = AnalyzerOptionsForTest();
options.report_infra_metrics = false;
DefaultVideoQualityAnalyzer analyzer(GetClock(),
test::GetGlobalMetricsLogger(), options);
analyzer.Start("test_case",
std::vector<std::string>{"alice", "bob", "charlie"},
kAnalyzerMaxThreadsCount);
// Pass 20 frames as 20 fps.
PassFramesThroughAnalyzer(analyzer, "alice", "alice_video",
{"bob", "charlie"},
/*frames_count=*/20, *frame_generator,
/*interframe_delay_ms=*/50, time_controller());
AdvanceTime(TimeDelta::Millis(50));
// Mark stream paused for Bob, but not for Charlie.
analyzer.OnPeerStoppedReceiveVideoStream("bob", "alice_video");
// Freeze for 1 second.
PassFramesThroughAnalyzerSenderOnly(
analyzer, "alice", "alice_video", {"bob", "charlie"},
/*frames_count=*/20, *frame_generator,
/*interframe_delay_ms=*/50, time_controller());
AdvanceTime(TimeDelta::Millis(50));
// Pass 20 frames as 20 fps.
PassFramesThroughAnalyzer(analyzer, "alice", "alice_video", {"charlie"},
/*frames_count=*/20, *frame_generator,
/*interframe_delay_ms=*/50, time_controller());
analyzer.Stop();
// Bob should have 20 fps without freeze and Charlie should have freeze of 1s
// and decreased fps.
std::map<StatsKey, StreamStats> streams_stats = analyzer.GetStats();
std::map<StatsKey, FrameCounters> frame_counters =
analyzer.GetPerStreamCounters();
StreamStats bob_stream_stats =
streams_stats.at(StatsKey("alice_video", "bob"));
FrameCounters bob_frame_counters =
frame_counters.at(StatsKey("alice_video", "bob"));
EXPECT_THAT(bob_frame_counters.dropped, Eq(0));
EXPECT_THAT(bob_frame_counters.rendered, Eq(20));
EXPECT_THAT(GetTimeSortedValues(bob_stream_stats.freeze_time_ms),
ElementsAre(0.0));
EXPECT_THAT(GetTimeSortedValues(bob_stream_stats.time_between_freezes_ms),
ElementsAre(950.0));
ExpectRateIs(bob_stream_stats.capture_frame_rate, 21.052631);
ExpectRateIs(bob_stream_stats.encode_frame_rate, 21.052631);
// TODO(bugs.webrtc.org/14995): Assert on harmonic fps
StreamStats charlie_stream_stats =
streams_stats.at(StatsKey("alice_video", "charlie"));
FrameCounters charlie_frame_counters =
frame_counters.at(StatsKey("alice_video", "charlie"));
EXPECT_THAT(charlie_frame_counters.dropped, Eq(20));
EXPECT_THAT(charlie_frame_counters.rendered, Eq(40));
EXPECT_THAT(GetTimeSortedValues(charlie_stream_stats.freeze_time_ms),
ElementsAre(1050.0));
EXPECT_THAT(GetTimeSortedValues(charlie_stream_stats.time_between_freezes_ms),
ElementsAre(950.0, 950.0));
// TODO(bugs.webrtc.org/14995): Assert on harmonic fps
}
} // namespace
} // namespace webrtc

View File

@ -0,0 +1,65 @@
# Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../../../../../webrtc.gni")
if (!build_with_chromium) {
group("dvqa") {
testonly = true
deps = [ ":pausable_state" ]
}
if (rtc_include_tests) {
group("dvqa_unittests") {
testonly = true
deps = [ ":pausable_state_test" ]
}
}
}
# These targets contains implementation details of DefaultVideoQualityAnalyzer,
# so headers exported by it shouldn't be used in other places.
rtc_library("pausable_state") {
visibility = [
":dvqa",
":pausable_state_test",
"..:default_video_quality_analyzer_internal",
]
testonly = true
sources = [
"pausable_state.cc",
"pausable_state.h",
]
deps = [
"../../../../../../api/units:time_delta",
"../../../../../../api/units:timestamp",
"../../../../../../rtc_base:checks",
"../../../../../../system_wrappers",
]
}
if (rtc_include_tests) {
rtc_library("pausable_state_test") {
testonly = true
sources = [ "pausable_state_test.cc" ]
deps = [
":pausable_state",
"../../../../..:test_support",
"../../../../../../api:time_controller",
"../../../../../../api/units:time_delta",
"../../../../../../api/units:timestamp",
"../../../../../../system_wrappers",
"../../../../../time_controller",
]
}
}

View File

@ -0,0 +1,99 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/pc/e2e/analyzer/video/dvqa/pausable_state.h"
#include <cstdint>
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "rtc_base/checks.h"
namespace webrtc {
void PausableState::Pause() {
RTC_CHECK(!IsPaused());
events_.push_back(Event{.time = clock_->CurrentTime(), .is_paused = true});
}
void PausableState::Resume() {
RTC_CHECK(IsPaused());
events_.push_back(Event{.time = clock_->CurrentTime(), .is_paused = false});
}
bool PausableState::IsPaused() const {
return !events_.empty() && events_.back().is_paused;
}
bool PausableState::WasPausedAt(Timestamp time) const {
if (events_.empty()) {
return false;
}
int64_t pos = GetPos(time);
return pos != -1 && events_[pos].is_paused;
}
bool PausableState::WasResumedAfter(Timestamp time) const {
if (events_.empty()) {
return false;
}
int64_t pos = GetPos(time);
return (pos + 1 < static_cast<int64_t>(events_.size())) &&
!events_[pos + 1].is_paused;
}
Timestamp PausableState::GetLastEventTime() const {
if (events_.empty()) {
return Timestamp::PlusInfinity();
}
return events_.back().time;
}
TimeDelta PausableState::GetActiveDurationFrom(Timestamp time) const {
if (events_.empty()) {
return clock_->CurrentTime() - time;
}
int64_t pos = GetPos(time);
TimeDelta duration = TimeDelta::Zero();
for (int64_t i = pos; i < static_cast<int64_t>(events_.size()); ++i) {
if (i == -1 || !events_[i].is_paused) {
Timestamp start_time = (i == pos) ? time : events_[i].time;
Timestamp end_time = (i + 1 == static_cast<int64_t>(events_.size()))
? clock_->CurrentTime()
: events_[i + 1].time;
duration += end_time - start_time;
}
}
return duration;
}
int64_t PausableState::GetPos(Timestamp time) const {
int64_t l = 0, r = events_.size() - 1;
while (l < r) {
int64_t pos = (l + r) / 2;
if (time < events_[pos].time) {
r = pos;
} else if (time >= events_[pos].time) {
l = pos + 1;
}
}
if (time < events_[l].time) {
return l - 1;
} else {
return l;
}
}
} // namespace webrtc

View File

@ -0,0 +1,89 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DVQA_PAUSABLE_STATE_H_
#define TEST_PC_E2E_ANALYZER_VIDEO_DVQA_PAUSABLE_STATE_H_
#include <cstdint>
#include <vector>
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
// Provides ability to pause and resume and tell at any point was state paused
// or active.
class PausableState {
public:
// Creates a state as active.
explicit PausableState(Clock* clock) : clock_(clock) {}
PausableState(const PausableState&) = delete;
PausableState& operator=(const PausableState&) = delete;
PausableState(PausableState&&) = default;
PausableState& operator=(PausableState&&) = default;
// Pauses current state. States MUST be active.
//
// Complexity: O(1)
void Pause();
// Activates current state. State MUST be paused.
//
// Complexity: O(1)
void Resume();
// Returns is state is paused right now.
//
// Complexity: O(1)
bool IsPaused() const;
// Returns if last event before `time` was "pause".
//
// Complexity: O(log(n))
bool WasPausedAt(Timestamp time) const;
// Returns if next event after `time` was "resume".
//
// Complexity: O(log(n))
bool WasResumedAfter(Timestamp time) const;
// Returns time of last event or plus infinity if no events happened.
//
// Complexity O(1)
Timestamp GetLastEventTime() const;
// Returns sum of durations during which state was active starting from
// time `time`.
//
// Complexity O(n)
TimeDelta GetActiveDurationFrom(Timestamp time) const;
private:
struct Event {
Timestamp time;
bool is_paused;
};
// Returns position in `events_` which has time:
// 1. Most right of the equals
// 2. The biggest which is smaller
// 3. -1 otherwise (first time is bigger than `time`)
int64_t GetPos(Timestamp time) const;
Clock* clock_;
std::vector<Event> events_;
};
} // namespace webrtc
#endif // TEST_PC_E2E_ANALYZER_VIDEO_DVQA_PAUSABLE_STATE_H_

View File

@ -0,0 +1,373 @@
/*
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/pc/e2e/analyzer/video/dvqa/pausable_state.h"
#include <memory>
#include "api/test/time_controller.h"
#include "api/units/time_delta.h"
#include "api/units/timestamp.h"
#include "system_wrappers/include/clock.h"
#include "test/gtest.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc {
namespace {
class PausableStateTest : public testing::Test {
protected:
PausableStateTest()
: time_controller_(std::make_unique<GlobalSimulatedTimeController>(
Timestamp::Seconds(1000))) {}
void AdvanceTime(TimeDelta time) { time_controller_->AdvanceTime(time); }
Clock* GetClock() { return time_controller_->GetClock(); }
Timestamp Now() const { return time_controller_->GetClock()->CurrentTime(); }
private:
std::unique_ptr<TimeController> time_controller_;
};
TEST_F(PausableStateTest, NewIsActive) {
PausableState state(GetClock());
EXPECT_FALSE(state.IsPaused());
}
TEST_F(PausableStateTest, IsPausedAfterPaused) {
PausableState state(GetClock());
state.Pause();
EXPECT_TRUE(state.IsPaused());
}
TEST_F(PausableStateTest, IsActiveAfterResume) {
PausableState state(GetClock());
state.Pause();
state.Resume();
EXPECT_FALSE(state.IsPaused());
}
TEST_F(PausableStateTest, WasPausedAtFalseWhenMultiplePauseResumeAtSameTime) {
PausableState state(GetClock());
state.Pause();
state.Resume();
state.Pause();
state.Resume();
state.Pause();
state.Resume();
EXPECT_FALSE(state.WasPausedAt(Now()));
}
TEST_F(PausableStateTest,
WasPausedAtTrueWhenMultiplePauseResumeAtSameTimeAndThenPause) {
PausableState state(GetClock());
state.Pause();
state.Resume();
state.Pause();
state.Resume();
state.Pause();
state.Resume();
state.Pause();
EXPECT_TRUE(state.WasPausedAt(Now()));
}
TEST_F(PausableStateTest, WasPausedAtFalseBeforeFirstPause) {
PausableState state(GetClock());
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
EXPECT_FALSE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasPausedAtTrueAfterPauseBeforeResume) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_TRUE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasPausedAtFalseAfterResumeBeforePause) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_FALSE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasPausedAtTrueAtPauseBeforeResume) {
PausableState state(GetClock());
state.Pause();
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_TRUE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasPausedAtFalseAfterPauseAtResume) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_FALSE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasPausedAtTrueAfterPause) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
EXPECT_TRUE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasPausedAtFalseAfterResume) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
EXPECT_FALSE(state.WasPausedAt(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterFalseBeforeFirstPause) {
PausableState state(GetClock());
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
EXPECT_FALSE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterTrueAfterPauseBeforeResume) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_TRUE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterFalseAfterResumeBeforePause) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_FALSE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterTrueAtPauseBeforeResume) {
PausableState state(GetClock());
state.Pause();
Timestamp test_time = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_TRUE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterFalseAfterPauseAtResume) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
EXPECT_FALSE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterFalseAfterPause) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
EXPECT_FALSE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, WasResumedAfterFalseAfterResume) {
PausableState state(GetClock());
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp test_time = Now();
EXPECT_FALSE(state.WasResumedAfter(test_time));
}
TEST_F(PausableStateTest, GetActiveDurationFromWithoutPausesReturnAllTime) {
PausableState state(GetClock());
Timestamp time_from = Now();
AdvanceTime(TimeDelta::Seconds(5));
EXPECT_EQ(state.GetActiveDurationFrom(time_from), TimeDelta::Seconds(5));
}
TEST_F(PausableStateTest, GetActiveDurationFromRespectsPauses) {
PausableState state(GetClock());
Timestamp time_from = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
EXPECT_EQ(state.GetActiveDurationFrom(time_from), TimeDelta::Seconds(3));
}
TEST_F(PausableStateTest, GetActiveDurationFromMiddleOfPauseAccountOnlyActive) {
PausableState state(GetClock());
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp time_from = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
EXPECT_EQ(state.GetActiveDurationFrom(time_from), TimeDelta::Seconds(2));
}
TEST_F(PausableStateTest, GetActiveDurationFromMiddleOfActiveAccountAllActive) {
PausableState state(GetClock());
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp time_from = Now();
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
EXPECT_EQ(state.GetActiveDurationFrom(time_from), TimeDelta::Seconds(2));
}
TEST_F(PausableStateTest, GetActiveDurationFromWhenPauseReturnsZero) {
PausableState state(GetClock());
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp time_from = Now();
EXPECT_EQ(state.GetActiveDurationFrom(time_from), TimeDelta::Zero());
}
TEST_F(PausableStateTest, GetActiveDurationFromWhenActiveReturnsAllTime) {
PausableState state(GetClock());
AdvanceTime(TimeDelta::Seconds(1));
state.Pause();
AdvanceTime(TimeDelta::Seconds(1));
state.Resume();
AdvanceTime(TimeDelta::Seconds(1));
Timestamp time_from = Now();
AdvanceTime(TimeDelta::Seconds(1));
EXPECT_EQ(state.GetActiveDurationFrom(time_from), TimeDelta::Seconds(1));
}
} // namespace
} // namespace webrtc