From dcc7e88cc79ab4f7aeb87c13f402e007e1320fd8 Mon Sep 17 00:00:00 2001 From: Sebastian Jansson Date: Fri, 6 Apr 2018 17:31:38 +0200 Subject: [PATCH] Storing frame if encoder is paused. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds a pending frame to VideoStreamEncoder that is used to store frames that are not sent because the encoder is paused. If the encoder is resumed within 200 ms, the pending frame will be encoded and sent. This ensures that resuming a stream instantly starts sending frames if it is possible. This also protects against a race between submitting the first frame and enabling the encoder that caused flakiness in end to end tests when using the task queue based congestion controller. Bug: webrtc:8415 Change-Id: If4bd897187fbfdc4926855f39503230bdad4a93a Reviewed-on: https://webrtc-review.googlesource.com/67141 Commit-Queue: Sebastian Jansson Reviewed-by: Erik Språng Cr-Commit-Position: refs/heads/master@{#22781} --- .../end_to_end_tests/call_operation_tests.cc | 16 +----- video/video_stream_encoder.cc | 56 +++++++++++++------ video/video_stream_encoder.h | 8 +++ video/video_stream_encoder_unittest.cc | 8 ++- 4 files changed, 55 insertions(+), 33 deletions(-) diff --git a/video/end_to_end_tests/call_operation_tests.cc b/video/end_to_end_tests/call_operation_tests.cc index 495b658bf1..d90b85ef05 100644 --- a/video/end_to_end_tests/call_operation_tests.cc +++ b/video/end_to_end_tests/call_operation_tests.cc @@ -154,16 +154,7 @@ TEST_P(CallOperationEndToEndTest, RendersSingleDelayedFrame) { }); } -// TODO(bugs.webrtc.org/9060): Re-enable this test with the TaskQueue when it -// is no longer flaky. -class CallOperationEndToEndTestNoTaskQueueCongestionControl - : public CallOperationEndToEndTest {}; -INSTANTIATE_TEST_CASE_P(FieldTrials, - CallOperationEndToEndTestNoTaskQueueCongestionControl, - ::testing::Values("WebRTC-RoundRobinPacing/Disabled/", - "WebRTC-RoundRobinPacing/Enabled/")); -TEST_P(CallOperationEndToEndTestNoTaskQueueCongestionControl, - TransmitsFirstFrame) { +TEST_P(CallOperationEndToEndTest, TransmitsFirstFrame) { class Renderer : public rtc::VideoSinkInterface { public: Renderer() : event_(false, false) {} @@ -219,10 +210,7 @@ TEST_P(CallOperationEndToEndTestNoTaskQueueCongestionControl, }); } -// TODO(bugs.webrtc.org/9060): Re-enable this test with the TaskQueue when it -// is no longer flaky. -TEST_P(CallOperationEndToEndTestNoTaskQueueCongestionControl, - ObserversEncodedFrames) { +TEST_P(CallOperationEndToEndTest, ObserversEncodedFrames) { class EncodedFrameTestObserver : public EncodedFrameObserver { public: EncodedFrameTestObserver() diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 6474fffe74..8c966d8818 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -44,17 +44,6 @@ const int kMaxFramerateFps = 120; // to try and achieve desired bitrate. const int kMaxInitialFramedrop = 4; -uint32_t MaximumFrameSizeForBitrate(uint32_t kbps) { - if (kbps > 0) { - if (kbps < 300 /* qvga */) { - return 320 * 240; - } else if (kbps < 500 /* vga */) { - return 640 * 480; - } - } - return std::numeric_limits::max(); -} - // Initial limits for kBalanced degradation preference. int MinFps(int pixels) { if (pixels <= 320 * 240) { @@ -661,7 +650,7 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) { posted_frames_waiting_for_encode_.fetch_sub(1); RTC_DCHECK_GT(posted_frames_waiting_for_encode, 0); if (posted_frames_waiting_for_encode == 1) { - EncodeVideoFrame(incoming_frame, post_time_us); + MaybeEncodeVideoFrame(incoming_frame, post_time_us); } else { // There is a newer frame in flight. Do not encode this frame. RTC_LOG(LS_VERBOSE) @@ -712,8 +701,8 @@ void VideoStreamEncoder::TraceFrameDropEnd() { encoder_paused_and_dropped_frame_ = false; } -void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, - int64_t time_when_posted_us) { +void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame, + int64_t time_when_posted_us) { RTC_DCHECK_RUN_ON(&encoder_queue_); if (pre_encode_callback_) @@ -731,9 +720,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, << ", texture=" << last_frame_info_->is_texture << "."; } - if (initial_rampup_ < kMaxInitialFramedrop && - video_frame.size() > - MaximumFrameSizeForBitrate(encoder_start_bitrate_bps_ / 1000)) { + if (DropDueToSize(video_frame.size())) { RTC_LOG(LS_INFO) << "Dropping frame. Too large for target bitrate."; int count = GetConstAdaptCounter().ResolutionCount(kQuality); AdaptDown(kQuality); @@ -741,6 +728,8 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, stats_proxy_->OnInitialQualityResolutionAdaptDown(); } ++initial_rampup_; + pending_frame_ = video_frame; + pending_frame_post_time_us_ = time_when_posted_us; return; } initial_rampup_ = kMaxInitialFramedrop; @@ -758,9 +747,20 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, } if (EncoderPaused()) { - TraceFrameDropStart(); + if (pending_frame_) + TraceFrameDropStart(); + pending_frame_ = video_frame; + pending_frame_post_time_us_ = time_when_posted_us; return; } + + pending_frame_.reset(); + EncodeVideoFrame(video_frame, time_when_posted_us); +} + +void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, + int64_t time_when_posted_us) { + RTC_DCHECK_RUN_ON(&encoder_queue_); TraceFrameDropEnd(); VideoFrame out_frame(video_frame); @@ -891,6 +891,26 @@ void VideoStreamEncoder::OnBitrateUpdated(uint32_t bitrate_bps, << (video_is_suspended ? "suspended" : "not suspended"); stats_proxy_->OnSuspendChange(video_is_suspended); } + if (video_suspension_changed && !video_is_suspended && pending_frame_ && + !DropDueToSize(pending_frame_->size())) { + const int64_t kPendingFrameTimeoutUs = 200000; + if (rtc::TimeMicros() - pending_frame_post_time_us_ < + kPendingFrameTimeoutUs) + EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_); + pending_frame_.reset(); + } +} + +bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const { + if (initial_rampup_ < kMaxInitialFramedrop && + encoder_start_bitrate_bps_ > 0) { + if (encoder_start_bitrate_bps_ < 300000 /* qvga */) { + return pixel_count > 320 * 240; + } else if (encoder_start_bitrate_bps_ < 500000 /* vga */) { + return pixel_count > 640 * 480; + } + } + return false; } void VideoStreamEncoder::AdaptDown(AdaptReason reason) { diff --git a/video/video_stream_encoder.h b/video/video_stream_encoder.h index f51654189a..a81160e8c5 100644 --- a/video/video_stream_encoder.h +++ b/video/video_stream_encoder.h @@ -144,8 +144,14 @@ class VideoStreamEncoder : public rtc::VideoSinkInterface, void OnFrame(const VideoFrame& video_frame) override; void OnDiscardedFrame() override; + void MaybeEncodeVideoFrame(const VideoFrame& frame, + int64_t time_when_posted_in_ms); + void EncodeVideoFrame(const VideoFrame& frame, int64_t time_when_posted_in_ms); + // Indicates wether frame should be dropped because the pixel count is too + // large for the current bitrate configuration. + bool DropDueToSize(uint32_t pixel_count) const RTC_RUN_ON(&encoder_queue_); // Implements EncodedImageCallback. EncodedImageCallback::Result OnEncodedImage( @@ -281,6 +287,8 @@ class VideoStreamEncoder : public rtc::VideoSinkInterface, int64_t last_frame_log_ms_ RTC_GUARDED_BY(incoming_frame_race_checker_); int captured_frame_count_ RTC_GUARDED_BY(&encoder_queue_); int dropped_frame_count_ RTC_GUARDED_BY(&encoder_queue_); + rtc::Optional pending_frame_ RTC_GUARDED_BY(&encoder_queue_); + int64_t pending_frame_post_time_us_ RTC_GUARDED_BY(&encoder_queue_); VideoBitrateAllocationObserver* bitrate_observer_ RTC_GUARDED_BY(&encoder_queue_); diff --git a/video/video_stream_encoder_unittest.cc b/video/video_stream_encoder_unittest.cc index 09d259abf5..3d117a79bb 100644 --- a/video/video_stream_encoder_unittest.cc +++ b/video/video_stream_encoder_unittest.cc @@ -703,12 +703,18 @@ TEST_F(VideoStreamEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { // Dropped since no target bitrate has been set. rtc::Event frame_destroyed_event(false, false); video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); + // Fill the pending frame cache with a new frame so the previous frame is + // dropped. + video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs)); video_stream_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); + // The pending frame should be received. WaitForEncodedFrame(2); + video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); + + WaitForEncodedFrame(3); video_stream_encoder_->Stop(); }