[DVQA] Extract StreamState into separate file
Bug: b/231397778 Change-Id: Ic7207cd17e8179151824a2257b3173481e9f4f6e Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/265399 Commit-Queue: Artem Titov <titovartem@webrtc.org> Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> Cr-Commit-Position: refs/heads/main@{#37187}
This commit is contained in:
parent
9a665402d7
commit
71f406e52b
@ -738,12 +738,15 @@ if (!build_with_chromium) {
|
||||
"analyzer/video/default_video_quality_analyzer_frames_comparator.h",
|
||||
"analyzer/video/default_video_quality_analyzer_internal_shared_objects.cc",
|
||||
"analyzer/video/default_video_quality_analyzer_internal_shared_objects.h",
|
||||
"analyzer/video/default_video_quality_analyzer_stream_state.cc",
|
||||
"analyzer/video/default_video_quality_analyzer_stream_state.h",
|
||||
"analyzer/video/names_collection.cc",
|
||||
"analyzer/video/names_collection.h",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":default_video_quality_analyzer_shared",
|
||||
":multi_head_queue",
|
||||
"../../../api:array_view",
|
||||
"../../../api:scoped_refptr",
|
||||
"../../../api/numerics:numerics",
|
||||
|
||||
@ -28,6 +28,7 @@
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
@ -987,82 +988,6 @@ DefaultVideoQualityAnalyzer::GetStreamFrames() const {
|
||||
return out;
|
||||
}
|
||||
|
||||
uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront(size_t peer) {
|
||||
size_t peer_queue = GetPeerQueueIndex(peer);
|
||||
size_t alive_frames_queue = GetAliveFramesQueueIndex();
|
||||
absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer_queue);
|
||||
RTC_DCHECK(frame_id.has_value());
|
||||
|
||||
// If alive's frame queue is longer than all others, than also pop frame from
|
||||
// it, because that frame is received by all receivers.
|
||||
size_t alive_size = frame_ids_.size(alive_frames_queue);
|
||||
size_t other_size = 0;
|
||||
for (size_t i = 0; i < frame_ids_.readers_count(); ++i) {
|
||||
size_t cur_size = frame_ids_.size(i);
|
||||
if (i != alive_frames_queue && cur_size > other_size) {
|
||||
other_size = cur_size;
|
||||
}
|
||||
}
|
||||
// Pops frame from alive queue if alive's queue is the longest one.
|
||||
if (alive_size > other_size) {
|
||||
absl::optional<uint16_t> alive_frame_id =
|
||||
frame_ids_.PopFront(alive_frames_queue);
|
||||
RTC_DCHECK(alive_frame_id.has_value());
|
||||
RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
|
||||
}
|
||||
|
||||
return frame_id.value();
|
||||
}
|
||||
|
||||
uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() {
|
||||
absl::optional<uint16_t> frame_id =
|
||||
frame_ids_.PopFront(GetAliveFramesQueueIndex());
|
||||
RTC_DCHECK(frame_id.has_value());
|
||||
return frame_id.value();
|
||||
}
|
||||
|
||||
void DefaultVideoQualityAnalyzer::StreamState::SetLastRenderedFrameTime(
|
||||
size_t peer,
|
||||
Timestamp time) {
|
||||
auto it = last_rendered_frame_time_.find(peer);
|
||||
if (it == last_rendered_frame_time_.end()) {
|
||||
last_rendered_frame_time_.insert({peer, time});
|
||||
} else {
|
||||
it->second = time;
|
||||
}
|
||||
}
|
||||
|
||||
absl::optional<Timestamp>
|
||||
DefaultVideoQualityAnalyzer::StreamState::last_rendered_frame_time(
|
||||
size_t peer) const {
|
||||
return MaybeGetValue(last_rendered_frame_time_, peer);
|
||||
}
|
||||
|
||||
size_t DefaultVideoQualityAnalyzer::StreamState::GetPeerQueueIndex(
|
||||
size_t peer_index) const {
|
||||
// When sender isn't expecting to receive its own stream we will use their
|
||||
// queue for tracking alive frames. Otherwise we will use the queue #0 to
|
||||
// track alive frames and will shift all other queues for peers on 1.
|
||||
// It means when `enable_receive_own_stream_` is true peer's queue will have
|
||||
// index equal to `peer_index` + 1 and when `enable_receive_own_stream_` is
|
||||
// false peer's queue will have index equal to `peer_index`.
|
||||
if (!enable_receive_own_stream_) {
|
||||
return peer_index;
|
||||
}
|
||||
return peer_index + 1;
|
||||
}
|
||||
|
||||
size_t DefaultVideoQualityAnalyzer::StreamState::GetAliveFramesQueueIndex()
|
||||
const {
|
||||
// When sender isn't expecting to receive its own stream we will use their
|
||||
// queue for tracking alive frames. Otherwise we will use the queue #0 to
|
||||
// track alive frames and will shift all other queues for peers on 1.
|
||||
if (!enable_receive_own_stream_) {
|
||||
return owner_;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool DefaultVideoQualityAnalyzer::FrameInFlight::RemoveFrame() {
|
||||
if (!frame_) {
|
||||
return false;
|
||||
|
||||
@ -37,6 +37,7 @@
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
|
||||
#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
|
||||
#include "test/pc/e2e/analyzer/video/names_collection.h"
|
||||
#include "test/testsupport/perf_test.h"
|
||||
@ -105,77 +106,6 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
|
||||
std::map<std::string, std::vector<uint16_t>> GetStreamFrames() const;
|
||||
|
||||
private:
|
||||
// Represents a current state of video stream.
|
||||
class StreamState {
|
||||
public:
|
||||
StreamState(size_t owner,
|
||||
size_t peers_count,
|
||||
bool enable_receive_own_stream,
|
||||
Timestamp stream_started_time)
|
||||
: owner_(owner),
|
||||
enable_receive_own_stream_(enable_receive_own_stream),
|
||||
stream_started_time_(stream_started_time),
|
||||
frame_ids_(enable_receive_own_stream ? peers_count + 1
|
||||
: peers_count) {}
|
||||
|
||||
size_t owner() const { return owner_; }
|
||||
Timestamp stream_started_time() const { return stream_started_time_; }
|
||||
|
||||
void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
|
||||
// Crash if state is empty. Guarantees that there can be no alive frames
|
||||
// that are not in the owner queue
|
||||
uint16_t PopFront(size_t peer);
|
||||
bool IsEmpty(size_t peer) const {
|
||||
return frame_ids_.IsEmpty(GetPeerQueueIndex(peer));
|
||||
}
|
||||
// Crash if state is empty.
|
||||
uint16_t Front(size_t peer) const {
|
||||
return frame_ids_.Front(GetPeerQueueIndex(peer)).value();
|
||||
}
|
||||
|
||||
// When new peer is added - all current alive frames will be sent to it as
|
||||
// well. So we need to register them as expected by copying owner_ head to
|
||||
// the new head.
|
||||
void AddPeer() { frame_ids_.AddHead(GetAliveFramesQueueIndex()); }
|
||||
|
||||
size_t GetAliveFramesCount() const {
|
||||
return frame_ids_.size(GetAliveFramesQueueIndex());
|
||||
}
|
||||
uint16_t MarkNextAliveFrameAsDead();
|
||||
|
||||
void SetLastRenderedFrameTime(size_t peer, Timestamp time);
|
||||
absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
|
||||
|
||||
private:
|
||||
// Returns index of the `frame_ids_` queue which is used for specified
|
||||
// `peer_index`.
|
||||
size_t GetPeerQueueIndex(size_t peer_index) const;
|
||||
|
||||
// Returns index of the `frame_ids_` queue which is used to track alive
|
||||
// frames for this stream. The frame is alive if it contains VideoFrame
|
||||
// payload in `captured_frames_in_flight_`.
|
||||
size_t GetAliveFramesQueueIndex() const;
|
||||
|
||||
// Index of the owner. Owner's queue in `frame_ids_` will keep alive frames.
|
||||
const size_t owner_;
|
||||
const bool enable_receive_own_stream_;
|
||||
const Timestamp stream_started_time_;
|
||||
// To correctly determine dropped frames we have to know sequence of frames
|
||||
// in each stream so we will keep a list of frame ids inside the stream.
|
||||
// This list is represented by multi head queue of frame ids with separate
|
||||
// head for each receiver. When the frame is rendered, we will pop ids from
|
||||
// the corresponding head until id will match with rendered one. All ids
|
||||
// before matched one can be considered as dropped:
|
||||
//
|
||||
// | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
|
||||
//
|
||||
// If we received frame with id frame_id3, then we will pop frame_id1 and
|
||||
// frame_id2 and consider that frames as dropped and then compare received
|
||||
// frame with the one from `captured_frames_in_flight_` with id frame_id3.
|
||||
MultiHeadQueue<uint16_t> frame_ids_;
|
||||
std::map<size_t, Timestamp> last_rendered_frame_time_;
|
||||
};
|
||||
|
||||
enum State { kNew, kActive, kStopped };
|
||||
|
||||
struct ReceiverFrameStats {
|
||||
|
||||
@ -0,0 +1,104 @@
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
|
||||
|
||||
#include <map>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
template <typename T>
|
||||
absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
|
||||
auto it = map.find(key);
|
||||
if (it == map.end()) {
|
||||
return absl::nullopt;
|
||||
}
|
||||
return it->second;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
uint16_t StreamState::PopFront(size_t peer) {
|
||||
size_t peer_queue = GetPeerQueueIndex(peer);
|
||||
size_t alive_frames_queue = GetAliveFramesQueueIndex();
|
||||
absl::optional<uint16_t> frame_id = frame_ids_.PopFront(peer_queue);
|
||||
RTC_DCHECK(frame_id.has_value());
|
||||
|
||||
// If alive's frame queue is longer than all others, than also pop frame from
|
||||
// it, because that frame is received by all receivers.
|
||||
size_t alive_size = frame_ids_.size(alive_frames_queue);
|
||||
size_t other_size = 0;
|
||||
for (size_t i = 0; i < frame_ids_.readers_count(); ++i) {
|
||||
size_t cur_size = frame_ids_.size(i);
|
||||
if (i != alive_frames_queue && cur_size > other_size) {
|
||||
other_size = cur_size;
|
||||
}
|
||||
}
|
||||
// Pops frame from alive queue if alive's queue is the longest one.
|
||||
if (alive_size > other_size) {
|
||||
absl::optional<uint16_t> alive_frame_id =
|
||||
frame_ids_.PopFront(alive_frames_queue);
|
||||
RTC_DCHECK(alive_frame_id.has_value());
|
||||
RTC_DCHECK_EQ(frame_id.value(), alive_frame_id.value());
|
||||
}
|
||||
|
||||
return frame_id.value();
|
||||
}
|
||||
|
||||
uint16_t StreamState::MarkNextAliveFrameAsDead() {
|
||||
absl::optional<uint16_t> frame_id =
|
||||
frame_ids_.PopFront(GetAliveFramesQueueIndex());
|
||||
RTC_DCHECK(frame_id.has_value());
|
||||
return frame_id.value();
|
||||
}
|
||||
|
||||
void StreamState::SetLastRenderedFrameTime(size_t peer, Timestamp time) {
|
||||
auto it = last_rendered_frame_time_.find(peer);
|
||||
if (it == last_rendered_frame_time_.end()) {
|
||||
last_rendered_frame_time_.insert({peer, time});
|
||||
} else {
|
||||
it->second = time;
|
||||
}
|
||||
}
|
||||
|
||||
absl::optional<Timestamp> StreamState::last_rendered_frame_time(
|
||||
size_t peer) const {
|
||||
return MaybeGetValue(last_rendered_frame_time_, peer);
|
||||
}
|
||||
|
||||
size_t StreamState::GetPeerQueueIndex(size_t peer_index) const {
|
||||
// When sender isn't expecting to receive its own stream we will use their
|
||||
// queue for tracking alive frames. Otherwise we will use the queue #0 to
|
||||
// track alive frames and will shift all other queues for peers on 1.
|
||||
// It means when `enable_receive_own_stream_` is true peer's queue will have
|
||||
// index equal to `peer_index` + 1 and when `enable_receive_own_stream_` is
|
||||
// false peer's queue will have index equal to `peer_index`.
|
||||
if (!enable_receive_own_stream_) {
|
||||
return peer_index;
|
||||
}
|
||||
return peer_index + 1;
|
||||
}
|
||||
|
||||
size_t StreamState::GetAliveFramesQueueIndex() const {
|
||||
// When sender isn't expecting to receive its own stream we will use their
|
||||
// queue for tracking alive frames. Otherwise we will use the queue #0 to
|
||||
// track alive frames and will shift all other queues for peers on 1.
|
||||
if (!enable_receive_own_stream_) {
|
||||
return owner_;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
|
||||
#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
|
||||
|
||||
#include <map>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Represents a current state of video stream inside
|
||||
// DefaultVideoQualityAnalyzer.
|
||||
class StreamState {
|
||||
public:
|
||||
StreamState(size_t owner,
|
||||
size_t peers_count,
|
||||
bool enable_receive_own_stream,
|
||||
Timestamp stream_started_time)
|
||||
: owner_(owner),
|
||||
enable_receive_own_stream_(enable_receive_own_stream),
|
||||
stream_started_time_(stream_started_time),
|
||||
frame_ids_(enable_receive_own_stream ? peers_count + 1 : peers_count) {}
|
||||
|
||||
size_t owner() const { return owner_; }
|
||||
Timestamp stream_started_time() const { return stream_started_time_; }
|
||||
|
||||
void PushBack(uint16_t frame_id) { frame_ids_.PushBack(frame_id); }
|
||||
// Crash if state is empty. Guarantees that there can be no alive frames
|
||||
// that are not in the owner queue
|
||||
uint16_t PopFront(size_t peer);
|
||||
bool IsEmpty(size_t peer) const {
|
||||
return frame_ids_.IsEmpty(GetPeerQueueIndex(peer));
|
||||
}
|
||||
// Crash if state is empty.
|
||||
uint16_t Front(size_t peer) const {
|
||||
return frame_ids_.Front(GetPeerQueueIndex(peer)).value();
|
||||
}
|
||||
|
||||
// When new peer is added - all current alive frames will be sent to it as
|
||||
// well. So we need to register them as expected by copying owner_ head to
|
||||
// the new head.
|
||||
void AddPeer() { frame_ids_.AddHead(GetAliveFramesQueueIndex()); }
|
||||
|
||||
size_t GetAliveFramesCount() const {
|
||||
return frame_ids_.size(GetAliveFramesQueueIndex());
|
||||
}
|
||||
uint16_t MarkNextAliveFrameAsDead();
|
||||
|
||||
void SetLastRenderedFrameTime(size_t peer, Timestamp time);
|
||||
absl::optional<Timestamp> last_rendered_frame_time(size_t peer) const;
|
||||
|
||||
private:
|
||||
// Returns index of the `frame_ids_` queue which is used for specified
|
||||
// `peer_index`.
|
||||
size_t GetPeerQueueIndex(size_t peer_index) const;
|
||||
|
||||
// Returns index of the `frame_ids_` queue which is used to track alive
|
||||
// frames for this stream. The frame is alive if it contains VideoFrame
|
||||
// payload in `captured_frames_in_flight_`.
|
||||
size_t GetAliveFramesQueueIndex() const;
|
||||
|
||||
// Index of the owner. Owner's queue in `frame_ids_` will keep alive frames.
|
||||
const size_t owner_;
|
||||
const bool enable_receive_own_stream_;
|
||||
const Timestamp stream_started_time_;
|
||||
// To correctly determine dropped frames we have to know sequence of frames
|
||||
// in each stream so we will keep a list of frame ids inside the stream.
|
||||
// This list is represented by multi head queue of frame ids with separate
|
||||
// head for each receiver. When the frame is rendered, we will pop ids from
|
||||
// the corresponding head until id will match with rendered one. All ids
|
||||
// before matched one can be considered as dropped:
|
||||
//
|
||||
// | frame_id1 |->| frame_id2 |->| frame_id3 |->| frame_id4 |
|
||||
//
|
||||
// If we received frame with id frame_id3, then we will pop frame_id1 and
|
||||
// frame_id2 and consider those frames as dropped and then compare received
|
||||
// frame with the one from `FrameInFlight` with id frame_id3.
|
||||
MultiHeadQueue<uint16_t> frame_ids_;
|
||||
std::map<size_t, Timestamp> last_rendered_frame_time_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_STREAM_STATE_H_
|
||||
Loading…
x
Reference in New Issue
Block a user