Avoid potential deadlock due to queue in corruption detection.

In particular, some platforms have a limited pool of frames in the
capturer stack, so we need to avoid stashing raw frames in the frame
instrumentation generator that may be dropped by limiting the size of
the queue and avoid putting anything in there until we know we will
send it to the encoder.

Bug: webrtc:358039777
Change-Id: I054ae53dd5e6ac6a22da39c5049f47788561e77a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/368641
Reviewed-by: Åsa Persson <asapersson@webrtc.org>
Commit-Queue: Erik Språng <sprang@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43432}
This commit is contained in:
Erik Språng 2024-11-20 14:01:33 +01:00 committed by WebRTC LUCI CQ
parent c181432772
commit 9aeed0c5f4
4 changed files with 102 additions and 3 deletions

View File

@ -36,6 +36,10 @@
namespace webrtc { namespace webrtc {
namespace { namespace {
// Avoid holding on to frames that might have been dropped by encoder, as that
// can lead to frame buffer pools draining.
constexpr size_t kMaxPendingFrames = 3;
std::optional<FilterSettings> GetCorruptionFilterSettings( std::optional<FilterSettings> GetCorruptionFilterSettings(
const EncodedImage& encoded_image, const EncodedImage& encoded_image,
VideoCodecType video_codec_type, VideoCodecType video_codec_type,
@ -75,6 +79,9 @@ FrameInstrumentationGenerator::FrameInstrumentationGenerator(
: video_codec_type_(video_codec_type) {} : video_codec_type_(video_codec_type) {}
void FrameInstrumentationGenerator::OnCapturedFrame(VideoFrame frame) { void FrameInstrumentationGenerator::OnCapturedFrame(VideoFrame frame) {
while (captured_frames_.size() >= kMaxPendingFrames) {
captured_frames_.pop();
}
captured_frames_.push(frame); captured_frames_.push(frame);
} }

View File

@ -11,6 +11,7 @@
#include "video/corruption_detection/frame_instrumentation_generator.h" #include "video/corruption_detection/frame_instrumentation_generator.h"
#include <cstdint> #include <cstdint>
#include <memory>
#include <optional> #include <optional>
#include <vector> #include <vector>
@ -686,5 +687,40 @@ TEST(FrameInstrumentationGeneratorTest, GetterAndSetterOperatesAsExpected) {
#endif // GTEST_HAS_DEATH_TEST #endif // GTEST_HAS_DEATH_TEST
} }
TEST(FrameInstrumentationGeneratorTest, QueuesAtMostThreeInputFrames) {
auto generator = std::make_unique<FrameInstrumentationGenerator>(
VideoCodecType::kVideoCodecVP8);
bool frames_destroyed[4] = {};
class TestBuffer : public webrtc::I420Buffer {
public:
TestBuffer(int width, int height, bool* frame_destroyed_indicator)
: I420Buffer(width, height),
frame_destroyed_indicator_(frame_destroyed_indicator) {}
private:
friend class RefCountedObject<TestBuffer>;
~TestBuffer() override { *frame_destroyed_indicator_ = true; }
bool* frame_destroyed_indicator_;
};
// Insert four frames, the first one should expire and be released.
for (int i = 0; i < 4; ++i) {
generator->OnCapturedFrame(
VideoFrame::Builder()
.set_video_frame_buffer(rtc::make_ref_counted<TestBuffer>(
kDefaultScaledWidth, kDefaultScaledHeight,
&frames_destroyed[i]))
.set_rtp_timestamp(1 + (33 * i))
.build());
}
EXPECT_THAT(frames_destroyed, ElementsAre(true, false, false, false));
generator.reset();
EXPECT_THAT(frames_destroyed, ElementsAre(true, true, true, true));
}
} // namespace } // namespace
} // namespace webrtc } // namespace webrtc

View File

@ -1560,9 +1560,6 @@ void VideoStreamEncoder::OnFrame(Timestamp post_time,
encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(), encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(),
incoming_frame.height()); incoming_frame.height());
if (frame_instrumentation_generator_) {
frame_instrumentation_generator_->OnCapturedFrame(incoming_frame);
}
++captured_frame_count_; ++captured_frame_count_;
bool cwnd_frame_drop = bool cwnd_frame_drop =
cwnd_frame_drop_interval_ && cwnd_frame_drop_interval_ &&
@ -2032,6 +2029,10 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
frame_encode_metadata_writer_.OnEncodeStarted(out_frame); frame_encode_metadata_writer_.OnEncodeStarted(out_frame);
if (frame_instrumentation_generator_) {
frame_instrumentation_generator_->OnCapturedFrame(out_frame);
}
const int32_t encode_status = encoder_->Encode(out_frame, &next_frame_types_); const int32_t encode_status = encoder_->Encode(out_frame, &next_frame_types_);
was_encode_called_since_last_initialization_ = true; was_encode_called_since_last_initialization_ = true;

View File

@ -1702,6 +1702,61 @@ TEST_F(VideoStreamEncoderTest, PopulatesFrameInstrumentationDataWhenSetTo) {
video_stream_encoder_->Stop(); video_stream_encoder_->Stop();
} }
TEST_F(VideoStreamEncoderTest,
FrameInstrumentationGeneratorDoesNotStashDroppedFrames) {
// Set low rate but high resolution. Make sure input frame is dropped and
// instance is released, even with corruption detection enabled.
const DataRate kLowRate = DataRate::KilobitsPerSec(300);
codec_width_ = 1280;
codec_height_ = 720;
video_send_config_.encoder_settings.enable_frame_instrumentation_generator =
true;
ConfigureEncoder(video_encoder_config_.Copy());
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
kLowRate, kLowRate, kLowRate, 0, 0, 0);
rtc::Event frame_destroyed_event;
// Insert two frames, so that the first one isn't stored in the encoder queue.
video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event));
video_source_.IncomingCapturedFrame(CreateFrame(34, /*event=*/nullptr));
EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeout));
EXPECT_FALSE(sink_.GetLastFrameInstrumentationData().has_value());
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest,
FrameInstrumentationGeneratorHandlesQueuedFrames) {
video_send_config_.encoder_settings.enable_frame_instrumentation_generator =
true;
ConfigureEncoder(video_encoder_config_.Copy());
// Mark stream as suspended.
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::Zero(), DataRate::Zero(), DataRate::Zero(), 0, 0, 0);
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
// We need a QP for the encoded frame.
fake_encoder_.SetEncodedImageData(EncodedImageBuffer::Create(
kCodedFrameVp8Qp25, sizeof(kCodedFrameVp8Qp25)));
// Insert a frame, that should be treated as dropped due to suspended state.
video_source_.IncomingCapturedFrame(
CreateFrame(1, codec_width_, codec_height_));
ExpectDroppedFrame();
// Resume and increase bitrate budget, process stashed frames.
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
kTargetBitrate, kTargetBitrate, kTargetBitrate, 0, 0, 0);
WaitForEncodedFrame(1);
EXPECT_TRUE(sink_.GetLastFrameInstrumentationData().has_value());
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest,
DoesNotPopulateFrameInstrumentationDataWhenSetNotTo) { DoesNotPopulateFrameInstrumentationDataWhenSetNotTo) {
video_send_config_.encoder_settings.enable_frame_instrumentation_generator = video_send_config_.encoder_settings.enable_frame_instrumentation_generator =