From 5390c4814d7880ea79edcd55596ea25e0d9b97ad Mon Sep 17 00:00:00 2001 From: sprang Date: Wed, 7 Jun 2017 06:17:49 -0700 Subject: [PATCH] Revert of Periodically update codec bit/frame rate settings. (patchset #8 id:140001 of https://codereview.webrtc.org/2883963002/ ) Reason for revert: Breaks some Call perf tests that are not run by the try bots.... Original issue's description: > Fix bug in vie_encoder.cc which caused channel parameters not to be updated at regular intervals, as it was intended. > > That however exposes a bunch of failed test, so this CL also fixed a few other things: > * FakeEncoder should trust the configured FPS value rather than guesstimating itself based on the realtime clock, so as not to completely undershoot targets in offline mode. Also, compensate for key-frame overshoots when outputting delta frames. > * FrameDropper should not assuming incoming frame rate is 0 if no frames have been seen. > * Fix a bunch of test cases that started failing because they were relying on the fake encoder undershooting. > * Fix test > > BUG=7664 > > Review-Url: https://codereview.webrtc.org/2883963002 > Cr-Commit-Position: refs/heads/master@{#18473} > Committed: https://chromium.googlesource.com/external/webrtc/+/6431e21da672a5f3bbf166d3d4d98b171d015706 TBR=stefan@webrtc.org,holmer@google.com # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=7664 Review-Url: https://codereview.webrtc.org/2923993002 Cr-Commit-Position: refs/heads/master@{#18475} --- webrtc/media/engine/simulcast.cc | 102 +++-- .../video_coding/media_optimization.cc | 8 +- webrtc/modules/video_coding/video_sender.cc | 5 - webrtc/test/fake_encoder.cc | 87 ++-- webrtc/test/fake_encoder.h | 8 +- webrtc/video/video_send_stream_tests.cc | 37 +- webrtc/video/vie_encoder.cc | 8 +- webrtc/video/vie_encoder_unittest.cc | 380 +++++++----------- 8 files changed, 259 insertions(+), 376 deletions(-) diff --git a/webrtc/media/engine/simulcast.cc b/webrtc/media/engine/simulcast.cc index f1cd2ced45..4dd8c3147b 100644 --- a/webrtc/media/engine/simulcast.cc +++ b/webrtc/media/engine/simulcast.cc @@ -49,7 +49,7 @@ const SimulcastFormat kSimulcastFormats[] = { {0, 0, 1, 200, 150, 30} }; -const int kDefaultScreenshareSimulcastStreams = 2; +const int kMaxScreenshareSimulcastStreams = 2; // Multiway: Number of temporal layers for each simulcast stream, for maximum // possible number of simulcast streams |kMaxSimulcastStreams|. The array @@ -176,8 +176,12 @@ std::vector GetSimulcastConfig(size_t max_streams, bool is_screencast) { size_t num_simulcast_layers; if (is_screencast) { - num_simulcast_layers = - UseSimulcastScreenshare() ? kDefaultScreenshareSimulcastStreams : 1; + if (UseSimulcastScreenshare()) { + num_simulcast_layers = + std::min(max_streams, kMaxScreenshareSimulcastStreams); + } else { + num_simulcast_layers = 1; + } } else { num_simulcast_layers = FindSimulcastMaxLayers(width, height); } @@ -194,33 +198,60 @@ std::vector GetSimulcastConfig(size_t max_streams, std::vector streams; streams.resize(num_simulcast_layers); - if (!is_screencast) { + if (is_screencast) { + ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault(); + // For legacy screenshare in conference mode, tl0 and tl1 bitrates are + // piggybacked on the VideoCodec struct as target and max bitrates, + // respectively. See eg. webrtc::VP8EncoderImpl::SetRates(). + streams[0].width = width; + streams[0].height = height; + streams[0].max_qp = max_qp; + streams[0].max_framerate = 5; + streams[0].min_bitrate_bps = kMinVideoBitrateKbps * 1000; + streams[0].target_bitrate_bps = config.tl0_bitrate_kbps * 1000; + streams[0].max_bitrate_bps = config.tl1_bitrate_kbps * 1000; + streams[0].temporal_layer_thresholds_bps.clear(); + streams[0].temporal_layer_thresholds_bps.push_back(config.tl0_bitrate_kbps * + 1000); + + // With simulcast enabled, add another spatial layer. This one will have a + // more normal layout, with the regular 3 temporal layer pattern and no fps + // restrictions. The base simulcast stream will still use legacy setup. + if (num_simulcast_layers == kMaxScreenshareSimulcastStreams) { + // Add optional upper simulcast layer. + // Lowest temporal layers of a 3 layer setup will have 40% of the total + // bitrate allocation for that stream. Make sure the gap between the + // target of the lower stream and first temporal layer of the higher one + // is at most 2x the bitrate, so that upswitching is not hampered by + // stalled bitrate estimates. + int max_bitrate_bps = 2 * ((streams[0].target_bitrate_bps * 10) / 4); + // Cap max bitrate so it isn't overly high for the given resolution. + max_bitrate_bps = std::min( + max_bitrate_bps, FindSimulcastMaxBitrateBps(width, height)); + + streams[1].width = width; + streams[1].height = height; + streams[1].max_qp = max_qp; + streams[1].max_framerate = max_framerate; + // Three temporal layers means two thresholds. + streams[1].temporal_layer_thresholds_bps.resize(2); + streams[1].min_bitrate_bps = streams[0].target_bitrate_bps * 2; + streams[1].target_bitrate_bps = max_bitrate_bps; + streams[1].max_bitrate_bps = max_bitrate_bps; + } + } else { // Format width and height has to be divisible by |2 ^ number_streams - 1|. width = NormalizeSimulcastSize(width, num_simulcast_layers); height = NormalizeSimulcastSize(height, num_simulcast_layers); - } - // Add simulcast sub-streams from lower resolution to higher resolutions. - // Add simulcast streams, from highest resolution (|s| = number_streams -1) - // to lowest resolution at |s| = 0. - for (size_t s = num_simulcast_layers - 1;; --s) { - streams[s].width = width; - streams[s].height = height; - // TODO(pbos): Fill actual temporal-layer bitrate thresholds. - streams[s].max_qp = max_qp; - if (is_screencast && s == 0) { - ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault(); - // For legacy screenshare in conference mode, tl0 and tl1 bitrates are - // piggybacked on the VideoCodec struct as target and max bitrates, - // respectively. See eg. webrtc::VP8EncoderImpl::SetRates(). - streams[s].min_bitrate_bps = kMinVideoBitrateKbps * 1000; - streams[s].target_bitrate_bps = config.tl0_bitrate_kbps * 1000; - streams[s].max_bitrate_bps = config.tl1_bitrate_kbps * 1000; - streams[s].temporal_layer_thresholds_bps.clear(); - streams[s].temporal_layer_thresholds_bps.push_back( - config.tl0_bitrate_kbps * 1000); - streams[s].max_framerate = 5; - } else { + // Add simulcast sub-streams from lower resolution to higher resolutions. + // Add simulcast streams, from highest resolution (|s| = number_streams -1) + // to lowest resolution at |s| = 0. + for (size_t s = num_simulcast_layers - 1;; --s) { + streams[s].width = width; + streams[s].height = height; + // TODO(pbos): Fill actual temporal-layer bitrate thresholds. + streams[s].max_qp = max_qp; streams[s].temporal_layer_thresholds_bps.resize( kDefaultConferenceNumberOfTemporalLayers[s] - 1); streams[s].max_bitrate_bps = FindSimulcastMaxBitrateBps(width, height); @@ -228,20 +259,19 @@ std::vector GetSimulcastConfig(size_t max_streams, FindSimulcastTargetBitrateBps(width, height); streams[s].min_bitrate_bps = FindSimulcastMinBitrateBps(width, height); streams[s].max_framerate = max_framerate; - } - if (!is_screencast) { width /= 2; height /= 2; - } - if (s == 0) - break; - } - // Spend additional bits to boost the max stream. - int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams); - if (bitrate_left_bps > 0) { - streams.back().max_bitrate_bps += bitrate_left_bps; + if (s == 0) + break; + } + + // Spend additional bits to boost the max stream. + int bitrate_left_bps = max_bitrate_bps - GetTotalMaxBitrateBps(streams); + if (bitrate_left_bps > 0) { + streams.back().max_bitrate_bps += bitrate_left_bps; + } } return streams; diff --git a/webrtc/modules/video_coding/media_optimization.cc b/webrtc/modules/video_coding/media_optimization.cc index f0cbd62c9b..ccd23f6ae4 100644 --- a/webrtc/modules/video_coding/media_optimization.cc +++ b/webrtc/modules/video_coding/media_optimization.cc @@ -118,13 +118,7 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate) { // Update encoding rates following protection settings. float target_video_bitrate_kbps = static_cast(video_target_bitrate_) / 1000.0f; - float framerate = incoming_frame_rate_; - if (framerate == 0.0) { - // No framerate estimate available, use configured max framerate instead. - framerate = user_frame_rate_; - } - - frame_dropper_->SetRates(target_video_bitrate_kbps, framerate); + frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_); return video_target_bitrate_; } diff --git a/webrtc/modules/video_coding/video_sender.cc b/webrtc/modules/video_coding/video_sender.cc index 50d56206bc..0b54d13b29 100644 --- a/webrtc/modules/video_coding/video_sender.cc +++ b/webrtc/modules/video_coding/video_sender.cc @@ -103,11 +103,6 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec, numLayers = sendCodec->VP8().numberOfTemporalLayers; } else if (sendCodec->codecType == kVideoCodecVP9) { numLayers = sendCodec->VP9().numberOfTemporalLayers; - } else if (sendCodec->codecType == kVideoCodecGeneric && - sendCodec->numberOfSimulcastStreams > 0) { - // This is mainly for unit testing, disabling frame dropping. - // TODO(sprang): Add a better way to disable frame dropping. - numLayers = sendCodec->simulcastStream[0].numberOfTemporalLayers; } else { numLayers = 1; } diff --git a/webrtc/test/fake_encoder.cc b/webrtc/test/fake_encoder.cc index 962cd657a1..fce12c61a8 100644 --- a/webrtc/test/fake_encoder.cc +++ b/webrtc/test/fake_encoder.cc @@ -24,15 +24,11 @@ namespace webrtc { namespace test { -const int kKeyframeSizeFactor = 10; - FakeEncoder::FakeEncoder(Clock* clock) : clock_(clock), callback_(nullptr), - configured_input_framerate_(-1), max_target_bitrate_kbps_(-1), - pending_keyframe_(true), - debt_bytes_(0) { + last_encode_time_ms_(0) { // Generate some arbitrary not-all-zero data for (size_t i = 0; i < sizeof(encoded_buffer_); ++i) { encoded_buffer_[i] = static_cast(i); @@ -51,8 +47,6 @@ int32_t FakeEncoder::InitEncode(const VideoCodec* config, rtc::CritScope cs(&crit_sect_); config_ = *config; target_bitrate_.SetBitrate(0, 0, config_.startBitrate * 1000); - configured_input_framerate_ = config_.maxFramerate; - pending_keyframe_ = true; return 0; } @@ -65,10 +59,9 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, EncodedImageCallback* callback; uint32_t target_bitrate_sum_kbps; int max_target_bitrate_kbps; + int64_t last_encode_time_ms; size_t num_encoded_bytes; - int framerate; VideoCodecMode mode; - bool keyframe; { rtc::CritScope cs(&crit_sect_); max_framerate = config_.maxFramerate; @@ -79,33 +72,42 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, callback = callback_; target_bitrate_sum_kbps = target_bitrate_.get_sum_kbps(); max_target_bitrate_kbps = max_target_bitrate_kbps_; + last_encode_time_ms = last_encode_time_ms_; num_encoded_bytes = sizeof(encoded_buffer_); mode = config_.mode; - if (configured_input_framerate_ > 0) { - framerate = configured_input_framerate_; - } else { - framerate = max_framerate; - } - keyframe = pending_keyframe_; - pending_keyframe_ = false; - } - - for (FrameType frame_type : *frame_types) { - if (frame_type == kVideoFrameKey) { - keyframe = true; - break; - } } + int64_t time_now_ms = clock_->TimeInMilliseconds(); + const bool first_encode = (last_encode_time_ms == 0); RTC_DCHECK_GT(max_framerate, 0); + int64_t time_since_last_encode_ms = 1000 / max_framerate; + if (!first_encode) { + // For all frames but the first we can estimate the display time by looking + // at the display time of the previous frame. + time_since_last_encode_ms = time_now_ms - last_encode_time_ms; + } + if (time_since_last_encode_ms > 3 * 1000 / max_framerate) { + // Rudimentary check to make sure we don't widely overshoot bitrate target + // when resuming encoding after a suspension. + time_since_last_encode_ms = 3 * 1000 / max_framerate; + } - size_t bitrate = target_bitrate_sum_kbps; - bitrate = - std::max(bitrate, static_cast(simulcast_streams[0].minBitrate)); - if (max_target_bitrate_kbps > 0) - bitrate = std::min(bitrate, static_cast(max_target_bitrate_kbps)); + size_t bits_available = + static_cast(target_bitrate_sum_kbps * time_since_last_encode_ms); + size_t min_bits = static_cast(simulcast_streams[0].minBitrate * + time_since_last_encode_ms); - size_t bits_available = target_bitrate_sum_kbps * 1000 / framerate; + if (bits_available < min_bits) + bits_available = min_bits; + size_t max_bits = + static_cast(max_target_bitrate_kbps * time_since_last_encode_ms); + if (max_bits > 0 && max_bits < bits_available) + bits_available = max_bits; + + { + rtc::CritScope cs(&crit_sect_); + last_encode_time_ms_ = time_now_ms; + } RTC_DCHECK_GT(num_simulcast_streams, 0); for (unsigned char i = 0; i < num_simulcast_streams; ++i) { @@ -114,27 +116,18 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image, specifics.codecType = kVideoCodecGeneric; specifics.codecSpecific.generic.simulcast_idx = i; size_t min_stream_bits = static_cast( - (simulcast_streams[i].minBitrate * 1000) / framerate); + simulcast_streams[i].minBitrate * time_since_last_encode_ms); size_t max_stream_bits = static_cast( - (simulcast_streams[i].maxBitrate * 1000) / framerate); + simulcast_streams[i].maxBitrate * time_since_last_encode_ms); size_t stream_bits = (bits_available > max_stream_bits) ? max_stream_bits : bits_available; size_t stream_bytes = (stream_bits + 7) / 8; - if (keyframe) { + if (first_encode) { // The first frame is a key frame and should be larger. - // Store the overshoot bytes and distribute them over the coming frames, - // so that we on average meet the bitrate target. - debt_bytes_ += (kKeyframeSizeFactor - 1) * stream_bytes; - stream_bytes *= kKeyframeSizeFactor; - } else { - if (debt_bytes_ > 0) { - // Pay at most half of the frame size for old debts. - size_t payment_size = std::min(stream_bytes / 2, debt_bytes_); - debt_bytes_ -= payment_size; - stream_bytes -= payment_size; - } + // TODO(holmer): The FakeEncoder should store the bits_available between + // encodes so that it can compensate for oversized frames. + stream_bytes *= 10; } - if (stream_bytes > num_encoded_bytes) stream_bytes = num_encoded_bytes; @@ -182,7 +175,6 @@ int32_t FakeEncoder::SetRateAllocation(const BitrateAllocation& rate_allocation, uint32_t framerate) { rtc::CritScope cs(&crit_sect_); target_bitrate_ = rate_allocation; - configured_input_framerate_ = framerate; return 0; } @@ -191,11 +183,6 @@ const char* FakeEncoder::ImplementationName() const { return kImplementationName; } -int FakeEncoder::GetConfiguredInputFramerate() const { - rtc::CritScope cs(&crit_sect_); - return configured_input_framerate_; -} - FakeH264Encoder::FakeH264Encoder(Clock* clock) : FakeEncoder(clock), callback_(nullptr), idr_counter_(0) { FakeEncoder::RegisterEncodeCompleteCallback(this); diff --git a/webrtc/test/fake_encoder.h b/webrtc/test/fake_encoder.h index 4487c52e1e..e3878ecd4a 100644 --- a/webrtc/test/fake_encoder.h +++ b/webrtc/test/fake_encoder.h @@ -45,7 +45,6 @@ class FakeEncoder : public VideoEncoder { int32_t SetRateAllocation(const BitrateAllocation& rate_allocation, uint32_t framerate) override; const char* ImplementationName() const override; - int GetConfiguredInputFramerate() const; static const char* kImplementationName; @@ -54,16 +53,11 @@ class FakeEncoder : public VideoEncoder { VideoCodec config_ GUARDED_BY(crit_sect_); EncodedImageCallback* callback_ GUARDED_BY(crit_sect_); BitrateAllocation target_bitrate_ GUARDED_BY(crit_sect_); - int configured_input_framerate_ GUARDED_BY(crit_sect_); int max_target_bitrate_kbps_ GUARDED_BY(crit_sect_); - bool pending_keyframe_ GUARDED_BY(crit_sect_); + int64_t last_encode_time_ms_ GUARDED_BY(crit_sect_); rtc::CriticalSection crit_sect_; uint8_t encoded_buffer_[100000]; - - // Current byte debt to be payed over a number of frames. - // The debt is acquired by keyframes overshooting the bitrate target. - size_t debt_bytes_; }; class FakeH264Encoder : public FakeEncoder, public EncodedImageCallback { diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index 97b2f6acb3..de3da25385 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -937,12 +937,10 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, void TriggerLossReport(const RTPHeader& header) { // Send lossy receive reports to trigger FEC enabling. - const int kLossPercent = 5; - if (packet_count_++ % (100 / kLossPercent) != 0) { + if (packet_count_++ % 2 != 0) { + // Receive statistics reporting having lost 50% of the packets. FakeReceiveStatistics lossy_receive_stats( - kVideoSendSsrcs[0], header.sequenceNumber, - (packet_count_ * (100 - kLossPercent)) / 100, // Cumulative lost. - static_cast((255 * kLossPercent) / 100)); // Loss percent. + kVideoSendSsrcs[0], header.sequenceNumber, packet_count_ / 2, 127); RTCPSender rtcp_sender(false, Clock::GetRealTimeClock(), &lossy_receive_stats, nullptr, nullptr, transport_adapter_.get()); @@ -995,35 +993,6 @@ void VideoSendStreamTest::TestPacketFragmentationSize(VideoFormat format, // Make sure there is at least one extension header, to make the RTP // header larger than the base length of 12 bytes. EXPECT_FALSE(send_config->rtp.extensions.empty()); - - // Setup screen content disables frame dropping which makes this easier. - class VideoStreamFactory - : public VideoEncoderConfig::VideoStreamFactoryInterface { - public: - explicit VideoStreamFactory(size_t num_temporal_layers) - : num_temporal_layers_(num_temporal_layers) { - EXPECT_GT(num_temporal_layers, 0u); - } - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { - std::vector streams = - test::CreateVideoStreams(width, height, encoder_config); - for (VideoStream& stream : streams) { - stream.temporal_layer_thresholds_bps.resize(num_temporal_layers_ - - 1); - } - return streams; - } - const size_t num_temporal_layers_; - }; - - encoder_config->video_stream_factory = - new rtc::RefCountedObject(2); - encoder_config->content_type = VideoEncoderConfig::ContentType::kScreen; } void PerformTest() override { diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc index 6e48f97e66..203d1c405a 100644 --- a/webrtc/video/vie_encoder.cc +++ b/webrtc/video/vie_encoder.cc @@ -211,6 +211,7 @@ class ViEEncoder::VideoSourceProxy { // the used degradation_preference. switch (degradation_preference_) { case VideoSendStream::DegradationPreference::kBalanced: + FALLTHROUGH(); case VideoSendStream::DegradationPreference::kMaintainFramerate: wants.max_framerate_fps = std::numeric_limits::max(); break; @@ -676,14 +677,13 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame, int64_t now_ms = clock_->TimeInMilliseconds(); if (pending_encoder_reconfiguration_) { ReconfigureEncoder(); - last_parameters_update_ms_.emplace(now_ms); } else if (!last_parameters_update_ms_ || now_ms - *last_parameters_update_ms_ >= vcm::VCMProcessTimer::kDefaultProcessIntervalMs) { video_sender_.UpdateChannelParemeters(rate_allocator_.get(), bitrate_observer_); - last_parameters_update_ms_.emplace(now_ms); } + last_parameters_update_ms_.emplace(now_ms); if (EncoderPaused()) { TraceFrameDropStart(); @@ -806,6 +806,7 @@ void ViEEncoder::AdaptDown(AdaptReason reason) { int max_downgrades = 0; switch (degradation_preference_) { case VideoSendStream::DegradationPreference::kBalanced: + FALLTHROUGH(); case VideoSendStream::DegradationPreference::kMaintainFramerate: max_downgrades = kMaxCpuResolutionDowngrades; if (downgrade_requested && @@ -841,6 +842,7 @@ void ViEEncoder::AdaptDown(AdaptReason reason) { switch (degradation_preference_) { case VideoSendStream::DegradationPreference::kBalanced: + FALLTHROUGH(); case VideoSendStream::DegradationPreference::kMaintainFramerate: // Scale down resolution. if (!source_proxy_->RequestResolutionLowerThan( @@ -888,6 +890,7 @@ void ViEEncoder::AdaptUp(AdaptReason reason) { switch (degradation_preference_) { case VideoSendStream::DegradationPreference::kBalanced: + FALLTHROUGH(); case VideoSendStream::DegradationPreference::kMaintainFramerate: if (adapt_up_requested && adaptation_request.input_pixel_count_ <= @@ -907,6 +910,7 @@ void ViEEncoder::AdaptUp(AdaptReason reason) { switch (degradation_preference_) { case VideoSendStream::DegradationPreference::kBalanced: + FALLTHROUGH(); case VideoSendStream::DegradationPreference::kMaintainFramerate: { // Scale up resolution. int pixel_count = adaptation_request.input_pixel_count_; diff --git a/webrtc/video/vie_encoder_unittest.cc b/webrtc/video/vie_encoder_unittest.cc index 4d72a826f2..7a54fbe925 100644 --- a/webrtc/video/vie_encoder_unittest.cc +++ b/webrtc/video/vie_encoder_unittest.cc @@ -218,7 +218,6 @@ class ViEEncoderTest : public ::testing::Test { : video_send_config_(VideoSendStream::Config(nullptr)), codec_width_(320), codec_height_(240), - max_framerate_(30), fake_encoder_(), stats_proxy_(new MockableSendStatisticsProxy( Clock::GetRealTimeClock(), @@ -235,17 +234,7 @@ class ViEEncoderTest : public ::testing::Test { VideoEncoderConfig video_encoder_config; test::FillEncoderConfiguration(1, &video_encoder_config); - video_encoder_config.video_stream_factory = - new rtc::RefCountedObject(1); video_encoder_config_ = video_encoder_config.Copy(); - - // Framerate limit is specified by the VideoStreamFactory. - std::vector streams = - video_encoder_config.video_stream_factory->CreateEncoderStreams( - codec_width_, codec_height_, video_encoder_config); - max_framerate_ = streams[0].max_framerate; - fake_clock_.SetTimeMicros(1234); - ConfigureEncoder(std::move(video_encoder_config), true /* nack_enabled */); } @@ -268,8 +257,7 @@ class ViEEncoderTest : public ::testing::Test { void ResetEncoder(const std::string& payload_name, size_t num_streams, size_t num_temporal_layers, - bool nack_enabled, - bool screenshare) { + bool nack_enabled) { video_send_config_.encoder_settings.payload_name = payload_name; VideoEncoderConfig video_encoder_config; @@ -277,9 +265,6 @@ class ViEEncoderTest : public ::testing::Test { video_encoder_config.max_bitrate_bps = kTargetBitrateBps; video_encoder_config.video_stream_factory = new rtc::RefCountedObject(num_temporal_layers); - video_encoder_config.content_type = - screenshare ? VideoEncoderConfig::ContentType::kScreen - : VideoEncoderConfig::ContentType::kRealtimeVideo; ConfigureEncoder(std::move(video_encoder_config), nack_enabled); } @@ -339,33 +324,6 @@ class ViEEncoderTest : public ::testing::Test { EXPECT_FALSE(wants.target_pixel_count); } - void WaitForEncodedFrame(int64_t expected_ntp_time) { - sink_.WaitForEncodedFrame(expected_ntp_time); - fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_); - } - - bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, int64_t timeout_ms) { - bool ok = sink_.TimedWaitForEncodedFrame(expected_ntp_time, timeout_ms); - fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_); - return ok; - } - - void WaitForEncodedFrame(uint32_t expected_width, uint32_t expected_height) { - sink_.WaitForEncodedFrame(expected_width, expected_height); - fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_); - } - - void ExpectDroppedFrame() { - sink_.ExpectDroppedFrame(); - fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_); - } - - bool WaitForFrame(int64_t timeout_ms) { - bool ok = sink_.WaitForFrame(timeout_ms); - fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerSec / max_framerate_); - return ok; - } - class TestEncoder : public test::FakeEncoder { public: TestEncoder() @@ -474,21 +432,13 @@ class ViEEncoderTest : public ::testing::Test { : test_encoder_(test_encoder), encoded_frame_event_(false, false) {} void WaitForEncodedFrame(int64_t expected_ntp_time) { - EXPECT_TRUE( - TimedWaitForEncodedFrame(expected_ntp_time, kDefaultTimeoutMs)); - } - - bool TimedWaitForEncodedFrame(int64_t expected_ntp_time, - int64_t timeout_ms) { uint32_t timestamp = 0; - if (!encoded_frame_event_.Wait(timeout_ms)) - return false; + EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs)); { rtc::CritScope lock(&crit_); timestamp = last_timestamp_; } test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp); - return true; } void WaitForEncodedFrame(uint32_t expected_width, @@ -567,20 +517,18 @@ class ViEEncoderTest : public ::testing::Test { VideoEncoderConfig video_encoder_config_; int codec_width_; int codec_height_; - int max_framerate_; TestEncoder fake_encoder_; std::unique_ptr stats_proxy_; TestSink sink_; AdaptingFrameForwarder video_source_; std::unique_ptr vie_encoder_; - rtc::ScopedFakeClock fake_clock_; }; TEST_F(ViEEncoderTest, EncodeOneFrame) { vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); rtc::Event frame_destroyed_event(false, false); video_source_.IncomingCapturedFrame(CreateFrame(1, &frame_destroyed_event)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); EXPECT_TRUE(frame_destroyed_event.Wait(kDefaultTimeoutMs)); vie_encoder_->Stop(); } @@ -594,14 +542,14 @@ TEST_F(ViEEncoderTest, DropsFramesBeforeFirstOnBitrateUpdated) { vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); vie_encoder_->Stop(); } TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) { vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); vie_encoder_->OnBitrateUpdated(0, 0, 0); // Dropped since bitrate is zero. @@ -609,20 +557,20 @@ TEST_F(ViEEncoderTest, DropsFramesWhenRateSetToZero) { vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); vie_encoder_->Stop(); } TEST_F(ViEEncoderTest, DropsFramesWithSameOrOldNtpTimestamp) { vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // This frame will be dropped since it has the same ntp timestamp. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); vie_encoder_->Stop(); } @@ -630,7 +578,7 @@ TEST_F(ViEEncoderTest, DropsFrameAfterStop) { vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); vie_encoder_->Stop(); sink_.SetExpectNoFrames(); @@ -644,13 +592,13 @@ TEST_F(ViEEncoderTest, DropsPendingFramesOnSlowEncode) { fake_encoder_.BlockNextEncode(); video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // Here, the encoder thread will be blocked in the TestEncoder waiting for a // call to ContinueEncode. video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); video_source_.IncomingCapturedFrame(CreateFrame(3, nullptr)); fake_encoder_.ContinueEncode(); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); vie_encoder_->Stop(); } @@ -661,7 +609,7 @@ TEST_F(ViEEncoderTest, ConfigureEncoderTriggersOnEncoderConfigurationChanged) { // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // The encoder will have been configured once when the first frame is // received. EXPECT_EQ(1, sink_.number_of_reconfigurations()); @@ -674,7 +622,7 @@ TEST_F(ViEEncoderTest, ConfigureEncoderTriggersOnEncoderConfigurationChanged) { // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); EXPECT_EQ(2, sink_.number_of_reconfigurations()); EXPECT_EQ(9999, sink_.last_min_transmit_bitrate()); @@ -686,7 +634,7 @@ TEST_F(ViEEncoderTest, FrameResolutionChangeReconfigureEncoder) { // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // The encoder will have been configured once. EXPECT_EQ(1, sink_.number_of_reconfigurations()); EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width); @@ -697,7 +645,7 @@ TEST_F(ViEEncoderTest, FrameResolutionChangeReconfigureEncoder) { // Capture a frame with a higher resolution and wait for it to synchronize // with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(2, nullptr)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); EXPECT_EQ(codec_width_, fake_encoder_.codec_config().width); EXPECT_EQ(codec_height_, fake_encoder_.codec_config().height); EXPECT_EQ(2, sink_.number_of_reconfigurations()); @@ -709,12 +657,12 @@ TEST_F(ViEEncoderTest, Vp8ResilienceIsOffFor1S1TLWithNackEnabled) { const bool kNackEnabled = true; const size_t kNumStreams = 1; const size_t kNumTl = 1; - ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false); + ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled); vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // The encoder have been configured once when the first frame is received. EXPECT_EQ(1, sink_.number_of_reconfigurations()); EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType); @@ -729,12 +677,12 @@ TEST_F(ViEEncoderTest, Vp8ResilienceIsOffFor2S1TlWithNackEnabled) { const bool kNackEnabled = true; const size_t kNumStreams = 2; const size_t kNumTl = 1; - ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false); + ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled); vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // The encoder have been configured once when the first frame is received. EXPECT_EQ(1, sink_.number_of_reconfigurations()); EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType); @@ -749,12 +697,12 @@ TEST_F(ViEEncoderTest, Vp8ResilienceIsOnFor1S1TLWithNackDisabled) { const bool kNackEnabled = false; const size_t kNumStreams = 1; const size_t kNumTl = 1; - ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false); + ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled); vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // The encoder have been configured once when the first frame is received. EXPECT_EQ(1, sink_.number_of_reconfigurations()); EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType); @@ -769,12 +717,12 @@ TEST_F(ViEEncoderTest, Vp8ResilienceIsOnFor1S2TlWithNackEnabled) { const bool kNackEnabled = true; const size_t kNumStreams = 1; const size_t kNumTl = 2; - ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled, false); + ResetEncoder("VP8", kNumStreams, kNumTl, kNackEnabled); vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); // Capture a frame and wait for it to synchronize with the encoder thread. video_source_.IncomingCapturedFrame(CreateFrame(1, nullptr)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // The encoder have been configured once when the first frame is received. EXPECT_EQ(1, sink_.number_of_reconfigurations()); EXPECT_EQ(kVideoCodecVP8, fake_encoder_.codec_config().codecType); @@ -818,7 +766,7 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) { for (int i = 1; i <= kMaxDowngrades; ++i) { video_source_.IncomingCapturedFrame( CreateFrame(i, frame_width, frame_height)); - WaitForEncodedFrame(i); + sink_.WaitForEncodedFrame(i); vie_encoder_->TriggerCpuOveruse(); @@ -837,7 +785,7 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) { rtc::VideoSinkWants current_wants = video_source_.sink_wants(); video_source_.IncomingCapturedFrame( CreateFrame(kMaxDowngrades + 1, frame_width, frame_height)); - WaitForEncodedFrame(kMaxDowngrades + 1); + sink_.WaitForEncodedFrame(kMaxDowngrades + 1); vie_encoder_->TriggerCpuOveruse(); EXPECT_EQ(video_source_.sink_wants().target_pixel_count, current_wants.target_pixel_count); @@ -872,14 +820,14 @@ TEST_F(ViEEncoderTest, SinkWantsStoredByDegradationPreference) { video_source_.IncomingCapturedFrame( CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(frame_timestamp); + sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; // Trigger CPU overuse. vie_encoder_->TriggerCpuOveruse(); video_source_.IncomingCapturedFrame( CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(frame_timestamp); + sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; // Default degradation preference is maintain-framerate, so will lower max @@ -909,7 +857,7 @@ TEST_F(ViEEncoderTest, SinkWantsStoredByDegradationPreference) { vie_encoder_->TriggerCpuOveruse(); new_video_source.IncomingCapturedFrame( CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(frame_timestamp); + sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; // Some framerate constraint should be set. @@ -927,7 +875,7 @@ TEST_F(ViEEncoderTest, SinkWantsStoredByDegradationPreference) { vie_encoder_->TriggerCpuOveruse(); new_video_source.IncomingCapturedFrame( CreateFrame(frame_timestamp, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(frame_timestamp); + sink_.WaitForEncodedFrame(frame_timestamp); frame_timestamp += kFrameIntervalMs; // Still no degradation. @@ -961,7 +909,7 @@ TEST_F(ViEEncoderTest, StatsTracksQualityAdaptationStats) { const int kWidth = 1280; const int kHeight = 720; video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VideoSendStream::Stats stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_EQ(0, stats.number_of_quality_adapt_changes); @@ -969,7 +917,7 @@ TEST_F(ViEEncoderTest, StatsTracksQualityAdaptationStats) { // Trigger adapt down. vie_encoder_->TriggerQualityLow(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.bw_limited_resolution); @@ -978,7 +926,7 @@ TEST_F(ViEEncoderTest, StatsTracksQualityAdaptationStats) { // Trigger adapt up. vie_encoder_->TriggerQualityHigh(); video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); @@ -994,7 +942,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStats) { const int kWidth = 1280; const int kHeight = 720; video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VideoSendStream::Stats stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.cpu_limited_resolution); EXPECT_EQ(0, stats.number_of_cpu_adapt_changes); @@ -1002,7 +950,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStats) { // Trigger CPU overuse. vie_encoder_->TriggerCpuOveruse(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.cpu_limited_resolution); @@ -1011,7 +959,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStats) { // Trigger CPU normal use. vie_encoder_->TriggerCpuNormalUsage(); video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1027,7 +975,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) { const int kWidth = 1280; const int kHeight = 720; video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VideoSendStream::Stats stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1036,7 +984,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) { // Trigger CPU overuse. vie_encoder_->TriggerCpuOveruse(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_TRUE(stats.cpu_limited_resolution); @@ -1049,7 +997,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) { VideoSendStream::DegradationPreference::kMaintainFramerate); new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_TRUE(stats.cpu_limited_resolution); @@ -1061,7 +1009,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) { VideoSendStream::DegradationPreference::kDegradationDisabled); new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); - WaitForEncodedFrame(4); + sink_.WaitForEncodedFrame(4); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1073,7 +1021,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) { VideoSendStream::DegradationPreference::kMaintainFramerate); new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight)); - WaitForEncodedFrame(5); + sink_.WaitForEncodedFrame(5); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_TRUE(stats.cpu_limited_resolution); @@ -1082,7 +1030,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsCpuAdaptation) { // Trigger CPU normal use. vie_encoder_->TriggerCpuNormalUsage(); new_video_source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight)); - WaitForEncodedFrame(6); + sink_.WaitForEncodedFrame(6); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1098,7 +1046,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsQualityAdaptation) { const int kWidth = 1280; const int kHeight = 720; video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VideoSendStream::Stats stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_FALSE(stats.bw_limited_framerate); @@ -1110,7 +1058,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsQualityAdaptation) { VideoSendStream::DegradationPreference::kBalanced); new_video_source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_FALSE(stats.bw_limited_framerate); @@ -1119,7 +1067,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsQualityAdaptation) { // Trigger adapt down. vie_encoder_->TriggerQualityLow(); new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.bw_limited_resolution); EXPECT_FALSE(stats.bw_limited_framerate); @@ -1130,7 +1078,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsQualityAdaptation) { VideoSendStream::DegradationPreference::kBalanced); new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); - WaitForEncodedFrame(4); + sink_.WaitForEncodedFrame(4); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.bw_limited_resolution); EXPECT_FALSE(stats.bw_limited_framerate); @@ -1142,7 +1090,7 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsQualityAdaptation) { VideoSendStream::DegradationPreference::kMaintainResolution); new_video_source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight)); - WaitForEncodedFrame(5); + sink_.WaitForEncodedFrame(5); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.bw_limited_resolution); EXPECT_FALSE(stats.bw_limited_framerate); @@ -1159,7 +1107,7 @@ TEST_F(ViEEncoderTest, QualityAdaptationStatsAreResetWhenScalerIsDisabled) { const int kHeight = 720; video_source_.set_adaptation_enabled(true); video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1167,7 +1115,7 @@ TEST_F(ViEEncoderTest, QualityAdaptationStatsAreResetWhenScalerIsDisabled) { // Trigger adapt down. vie_encoder_->TriggerQualityLow(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1175,7 +1123,7 @@ TEST_F(ViEEncoderTest, QualityAdaptationStatsAreResetWhenScalerIsDisabled) { // Trigger overuse. vie_encoder_->TriggerCpuOveruse(); video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1187,7 +1135,7 @@ TEST_F(ViEEncoderTest, QualityAdaptationStatsAreResetWhenScalerIsDisabled) { VideoSendStream::DegradationPreference::kMaintainFramerate); video_source_.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); - WaitForEncodedFrame(4); + sink_.WaitForEncodedFrame(4); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1203,7 +1151,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { int sequence = 1; video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); VideoSendStream::Stats stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.cpu_limited_resolution); EXPECT_FALSE(stats.cpu_limited_framerate); @@ -1212,7 +1160,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { // Trigger CPU overuse, should now adapt down. vie_encoder_->TriggerCpuOveruse(); video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.cpu_limited_resolution); EXPECT_FALSE(stats.cpu_limited_framerate); @@ -1226,7 +1174,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.cpu_limited_resolution); EXPECT_FALSE(stats.cpu_limited_framerate); @@ -1238,7 +1186,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { VideoSendStream::DegradationPreference::kMaintainResolution); new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); // Not adapted at first. EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1255,7 +1203,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); // Framerate now adapted. stats = stats_proxy_->GetStats(); @@ -1269,7 +1217,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { VideoSendStream::DegradationPreference::kDegradationDisabled); new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1291,7 +1239,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { &video_source_, VideoSendStream::DegradationPreference::kMaintainFramerate); video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); EXPECT_TRUE(stats.cpu_limited_resolution); EXPECT_FALSE(stats.cpu_limited_framerate); @@ -1300,7 +1248,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { // Trigger CPU normal usage. vie_encoder_->TriggerCpuNormalUsage(); video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.cpu_limited_resolution); EXPECT_FALSE(stats.cpu_limited_framerate); @@ -1312,7 +1260,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { VideoSendStream::DegradationPreference::kMaintainResolution); new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); // Disabled, since we previously switched the source to disabled. EXPECT_FALSE(stats.cpu_limited_resolution); @@ -1323,7 +1271,7 @@ TEST_F(ViEEncoderTest, StatsTracksCpuAdaptationStatsWhenSwitchingSource) { vie_encoder_->TriggerCpuNormalUsage(); new_video_source.IncomingCapturedFrame( CreateFrame(sequence, kWidth, kHeight)); - WaitForEncodedFrame(sequence++); + sink_.WaitForEncodedFrame(sequence++); stats = stats_proxy_->GetStats(); EXPECT_FALSE(stats.cpu_limited_resolution); EXPECT_FALSE(stats.cpu_limited_framerate); @@ -1339,7 +1287,7 @@ TEST_F(ViEEncoderTest, StatsTracksPreferredBitrate) { const int kWidth = 1280; const int kHeight = 720; video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VideoSendStream::Stats stats = stats_proxy_->GetStats(); EXPECT_EQ(video_encoder_config_.max_bitrate_bps, @@ -1357,13 +1305,13 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) { VerifyNoLimitation(video_source_.sink_wants()); video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); // Trigger scale down. vie_encoder_->TriggerQualityLow(); video_source_.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); // Expect a scale down. EXPECT_TRUE(video_source_.sink_wants().max_pixel_count); @@ -1378,7 +1326,7 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) { // Trigger scale down. vie_encoder_->TriggerQualityLow(); new_video_source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); // Expect no scaling. EXPECT_EQ(std::numeric_limits::max(), @@ -1387,7 +1335,7 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) { // Trigger scale up. vie_encoder_->TriggerQualityHigh(); new_video_source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); - WaitForEncodedFrame(4); + sink_.WaitForEncodedFrame(4); // Expect nothing to change, still no scaling. EXPECT_EQ(std::numeric_limits::max(), @@ -1407,7 +1355,7 @@ TEST_F(ViEEncoderTest, SkipsSameAdaptDownRequest_MaintainFramerateMode) { &source, VideoSendStream::DegradationPreference::kMaintainFramerate); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1439,7 +1387,7 @@ TEST_F(ViEEncoderTest, NoChangeForInitialNormalUsage_MaintainFramerateMode) { &source, VideoSendStream::DegradationPreference::kMaintainFramerate); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + sink_.WaitForEncodedFrame(kWidth, kHeight); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1464,7 +1412,7 @@ TEST_F(ViEEncoderTest, NoChangeForInitialNormalUsage_MaintainResolutionMode) { &source, VideoSendStream::DegradationPreference::kMaintainResolution); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + sink_.WaitForEncodedFrame(kWidth, kHeight); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1517,7 +1465,7 @@ TEST_F(ViEEncoderTest, AdaptsResolutionForLowQuality_MaintainFramerateMode) { &source, VideoSendStream::DegradationPreference::kMaintainFramerate); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -1525,7 +1473,7 @@ TEST_F(ViEEncoderTest, AdaptsResolutionForLowQuality_MaintainFramerateMode) { // Trigger adapt down, expect scaled down resolution. vie_encoder_->TriggerQualityLow(); source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes); @@ -1598,7 +1546,7 @@ TEST_F(ViEEncoderTest, DoesNotScaleBelowSetResolutionLimit) { int downscales = 0; for (size_t i = 1; i <= kNumFrames; i++) { video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - WaitForEncodedFrame(i); + sink_.WaitForEncodedFrame(i); // Trigger scale down. rtc::VideoSinkWants last_wants = video_source_.sink_wants(); @@ -1629,7 +1577,7 @@ TEST_F(ViEEncoderTest, &source, VideoSendStream::DegradationPreference::kMaintainFramerate); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + sink_.WaitForEncodedFrame(kWidth, kHeight); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1637,7 +1585,7 @@ TEST_F(ViEEncoderTest, // Trigger adapt down, expect scaled down resolution. vie_encoder_->TriggerCpuOveruse(); source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1645,7 +1593,7 @@ TEST_F(ViEEncoderTest, // Trigger adapt up, expect no restriction. vie_encoder_->TriggerCpuNormalUsage(); source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + sink_.WaitForEncodedFrame(kWidth, kHeight); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(2, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1653,7 +1601,7 @@ TEST_F(ViEEncoderTest, // Trigger adapt down, expect scaled down resolution. vie_encoder_->TriggerCpuOveruse(); source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); - WaitForEncodedFrame(4); + sink_.WaitForEncodedFrame(4); VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes); @@ -1682,7 +1630,7 @@ TEST_F(ViEEncoderTest, &source, VideoSendStream::DegradationPreference::kMaintainFramerate); source.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + sink_.WaitForEncodedFrame(kWidth, kHeight); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); @@ -1692,7 +1640,7 @@ TEST_F(ViEEncoderTest, // Trigger cpu adapt down, expect scaled down resolution (960x540). vie_encoder_->TriggerCpuOveruse(); source.IncomingCapturedFrame(CreateFrame(2, kWidth, kHeight)); - WaitForEncodedFrame(2); + sink_.WaitForEncodedFrame(2); VerifyFpsMaxResolutionLt(source.sink_wants(), kWidth * kHeight); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); @@ -1702,7 +1650,7 @@ TEST_F(ViEEncoderTest, // Trigger cpu adapt down, expect scaled down resolution (640x360). vie_encoder_->TriggerCpuOveruse(); source.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight)); - WaitForEncodedFrame(3); + sink_.WaitForEncodedFrame(3); VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); rtc::VideoSinkWants last_wants = source.sink_wants(); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -1713,7 +1661,7 @@ TEST_F(ViEEncoderTest, // Trigger cpu adapt down, max cpu downgrades reached, expect no change. vie_encoder_->TriggerCpuOveruse(); source.IncomingCapturedFrame(CreateFrame(4, kWidth, kHeight)); - WaitForEncodedFrame(4); + sink_.WaitForEncodedFrame(4); VerifyFpsEqResolutionEq(source.sink_wants(), last_wants); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_FALSE(stats_proxy_->GetStats().bw_limited_resolution); @@ -1723,7 +1671,7 @@ TEST_F(ViEEncoderTest, // Trigger quality adapt down, expect scaled down resolution (480x270). vie_encoder_->TriggerQualityLow(); source.IncomingCapturedFrame(CreateFrame(5, kWidth, kHeight)); - WaitForEncodedFrame(5); + sink_.WaitForEncodedFrame(5); VerifyFpsMaxResolutionLt(source.sink_wants(), source.last_wants()); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); @@ -1733,7 +1681,7 @@ TEST_F(ViEEncoderTest, // Trigger cpu adapt up, expect upscaled resolution (640x360). vie_encoder_->TriggerCpuNormalUsage(); source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight)); - WaitForEncodedFrame(6); + sink_.WaitForEncodedFrame(6); VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); EXPECT_TRUE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); @@ -1743,7 +1691,7 @@ TEST_F(ViEEncoderTest, // Trigger cpu adapt up, expect upscaled resolution (960x540). vie_encoder_->TriggerCpuNormalUsage(); source.IncomingCapturedFrame(CreateFrame(7, kWidth, kHeight)); - WaitForEncodedFrame(7); + sink_.WaitForEncodedFrame(7); VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); last_wants = source.sink_wants(); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -1754,7 +1702,7 @@ TEST_F(ViEEncoderTest, // Trigger cpu adapt up, no cpu downgrades, expect no change (960x540). vie_encoder_->TriggerCpuNormalUsage(); source.IncomingCapturedFrame(CreateFrame(8, kWidth, kHeight)); - WaitForEncodedFrame(8); + sink_.WaitForEncodedFrame(8); VerifyFpsEqResolutionEq(source.sink_wants(), last_wants); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); EXPECT_TRUE(stats_proxy_->GetStats().bw_limited_resolution); @@ -1764,7 +1712,7 @@ TEST_F(ViEEncoderTest, // Trigger quality adapt up, expect no restriction (1280x720). vie_encoder_->TriggerQualityHigh(); source.IncomingCapturedFrame(CreateFrame(9, kWidth, kHeight)); - WaitForEncodedFrame(kWidth, kHeight); + sink_.WaitForEncodedFrame(kWidth, kHeight); VerifyFpsMaxResolutionGt(source.sink_wants(), source.last_wants()); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -1776,21 +1724,21 @@ TEST_F(ViEEncoderTest, } TEST_F(ViEEncoderTest, CpuLimitedHistogramIsReported) { + vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); const int kWidth = 640; const int kHeight = 360; - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - WaitForEncodedFrame(i); + sink_.WaitForEncodedFrame(i); } vie_encoder_->TriggerCpuOveruse(); for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { video_source_.IncomingCapturedFrame(CreateFrame( SendStatisticsProxy::kMinRequiredMetricsSamples + i, kWidth, kHeight)); - WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + i); + sink_.WaitForEncodedFrame(SendStatisticsProxy::kMinRequiredMetricsSamples + + i); } vie_encoder_->Stop(); @@ -1814,7 +1762,7 @@ TEST_F(ViEEncoderTest, CpuLimitedHistogramIsNotReportedForDisabledDegradation) { for (int i = 1; i <= SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - WaitForEncodedFrame(i); + sink_.WaitForEncodedFrame(i); } vie_encoder_->Stop(); @@ -1845,25 +1793,26 @@ TEST_F(ViEEncoderTest, CallsBitrateObserver) { const int64_t kStartTimeMs = 1; video_source_.IncomingCapturedFrame( CreateFrame(kStartTimeMs, codec_width_, codec_height_)); - WaitForEncodedFrame(kStartTimeMs); + sink_.WaitForEncodedFrame(kStartTimeMs); // Not called on second frame. EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) .Times(0); video_source_.IncomingCapturedFrame( CreateFrame(kStartTimeMs + 1, codec_width_, codec_height_)); - WaitForEncodedFrame(kStartTimeMs + 1); + sink_.WaitForEncodedFrame(kStartTimeMs + 1); // Called after a process interval. const int64_t kProcessIntervalMs = vcm::VCMProcessTimer::kDefaultProcessIntervalMs; - fake_clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerMillisec * - (kProcessIntervalMs + (1000 / kDefaultFps))); + // TODO(sprang): ViEEncoder should die and/or get injectable clock. + // Sleep for one processing interval plus one frame to avoid flakiness. + SleepMs(kProcessIntervalMs + 1000 / kDefaultFps); EXPECT_CALL(bitrate_observer, OnBitrateAllocationUpdated(expected_bitrate)) .Times(1); video_source_.IncomingCapturedFrame(CreateFrame( kStartTimeMs + kProcessIntervalMs, codec_width_, codec_height_)); - WaitForEncodedFrame(kStartTimeMs + kProcessIntervalMs); + sink_.WaitForEncodedFrame(kStartTimeMs + kProcessIntervalMs); vie_encoder_->Stop(); } @@ -1877,7 +1826,7 @@ TEST_F(ViEEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); // Expect to drop this frame, the wait should time out. - ExpectDroppedFrame(); + sink_.ExpectDroppedFrame(); // Expect the sink_wants to specify a scaled frame. EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight); @@ -1889,7 +1838,7 @@ TEST_F(ViEEncoderTest, DropsFramesAndScalesWhenBitrateIsTooLow) { CreateFrame(2, kWidth * 3 / 4, kHeight * 3 / 4)); // Expect to drop this frame, the wait should time out. - ExpectDroppedFrame(); + sink_.ExpectDroppedFrame(); EXPECT_LT(video_source_.sink_wants().max_pixel_count, last_pixel_count); @@ -1906,11 +1855,11 @@ TEST_F(ViEEncoderTest, NumberOfDroppedFramesLimitedWhenBitrateIsTooLow) { int i; for (i = 1; i <= kMaxInitialFramedrop; ++i) { video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - ExpectDroppedFrame(); + sink_.ExpectDroppedFrame(); } // The n+1th frame should not be dropped, even though it's size is too large. video_source_.IncomingCapturedFrame(CreateFrame(i, kWidth, kHeight)); - WaitForEncodedFrame(i); + sink_.WaitForEncodedFrame(i); // Expect the sink_wants to specify a scaled frame. EXPECT_LT(video_source_.sink_wants().max_pixel_count, kWidth * kHeight); @@ -1930,7 +1879,7 @@ TEST_F(ViEEncoderTest, InitialFrameDropOffWithMaintainResolutionPreference) { video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); // Frame should not be dropped, even if it's too large. - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); vie_encoder_->Stop(); } @@ -1947,7 +1896,7 @@ TEST_F(ViEEncoderTest, InitialFrameDropOffWhenEncoderDisabledScaling) { video_source_.IncomingCapturedFrame(CreateFrame(1, kWidth, kHeight)); // Frame should not be dropped, even if it's too large. - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); vie_encoder_->Stop(); fake_encoder_.SetQualityScaling(true); @@ -1968,7 +1917,7 @@ TEST_F(ViEEncoderTest, // Trigger adapt down, too small frame, expect no change. source.IncomingCapturedFrame(CreateFrame(1, kTooSmallWidth, kTooSmallHeight)); - WaitForEncodedFrame(1); + sink_.WaitForEncodedFrame(1); vie_encoder_->TriggerCpuOveruse(); VerifyNoLimitation(source.sink_wants()); EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution); @@ -1980,12 +1929,12 @@ TEST_F(ViEEncoderTest, TEST_F(ViEEncoderTest, FailingInitEncodeDoesntCauseCrash) { fake_encoder_.ForceInitEncodeFailure(true); vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - ResetEncoder("VP8", 2, 1, true, false); + ResetEncoder("VP8", 2, 1, true); const int kFrameWidth = 1280; const int kFrameHeight = 720; video_source_.IncomingCapturedFrame( CreateFrame(1, kFrameWidth, kFrameHeight)); - ExpectDroppedFrame(); + sink_.ExpectDroppedFrame(); vie_encoder_->Stop(); } @@ -2001,28 +1950,29 @@ TEST_F(ViEEncoderTest, AdaptsResolutionOnOveruse_MaintainFramerateMode) { video_source_.IncomingCapturedFrame( CreateFrame(1, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(kFrameWidth, kFrameHeight); + sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); // Trigger CPU overuse, downscale by 3/4. vie_encoder_->TriggerCpuOveruse(); video_source_.IncomingCapturedFrame( CreateFrame(2, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); + sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4); // Trigger CPU normal use, return to original resolution. vie_encoder_->TriggerCpuNormalUsage(); video_source_.IncomingCapturedFrame( CreateFrame(3, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(kFrameWidth, kFrameHeight); + sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight); vie_encoder_->Stop(); } TEST_F(ViEEncoderTest, AdaptsFramerateOnOveruse_MaintainResolutionMode) { - // const int kDefaultFramerateFps = 30; + const int kDefaultFramerateFps = 30; + const int kFrameIntervalMs = rtc::kNumMillisecsPerSec / kDefaultFramerateFps; const int kFrameWidth = 1280; const int kFrameHeight = 720; - int kFrameIntervalMs = rtc::kNumMillisecsPerSec / max_framerate_; + rtc::ScopedFakeClock fake_clock; vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); vie_encoder_->SetSource( @@ -2030,82 +1980,93 @@ TEST_F(ViEEncoderTest, AdaptsFramerateOnOveruse_MaintainResolutionMode) { VideoSendStream::DegradationPreference::kMaintainResolution); video_source_.set_adaptation_enabled(true); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + fake_clock.SetTimeMicros(kFrameIntervalMs * 1000); + int64_t timestamp_ms = kFrameIntervalMs; video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(timestamp_ms); + sink_.WaitForEncodedFrame(timestamp_ms); // Try to trigger overuse. No fps estimate available => no effect. vie_encoder_->TriggerCpuOveruse(); // Insert frames for one second to get a stable estimate. - for (int i = 0; i < max_framerate_; ++i) { + for (int i = 0; i < kDefaultFramerateFps; ++i) { timestamp_ms += kFrameIntervalMs; + fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(timestamp_ms); + sink_.WaitForEncodedFrame(timestamp_ms); } // Trigger CPU overuse, reduce framerate by 2/3. vie_encoder_->TriggerCpuOveruse(); int num_frames_dropped = 0; - for (int i = 0; i < max_framerate_; ++i) { + for (int i = 0; i < kDefaultFramerateFps; ++i) { timestamp_ms += kFrameIntervalMs; + fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - if (!WaitForFrame(kFrameTimeoutMs)) { + if (!sink_.WaitForFrame(kFrameTimeoutMs)) { ++num_frames_dropped; } else { sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); } } - // Add some slack to account for frames dropped by the frame dropper. - const int kErrorMargin = 1; - EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3), + // TODO(sprang): Find where there's rounding errors or stuff causing the + // margin here to be a little larger than we'd like (input fps estimate is + // off) and the frame dropping is a little too aggressive. + const int kErrorMargin = 5; + EXPECT_NEAR(num_frames_dropped, + kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3), kErrorMargin); // Trigger CPU overuse, reduce framerate by 2/3 again. vie_encoder_->TriggerCpuOveruse(); num_frames_dropped = 0; - for (int i = 0; i < max_framerate_; ++i) { + for (int i = 0; i < kDefaultFramerateFps; ++i) { timestamp_ms += kFrameIntervalMs; + fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - if (!WaitForFrame(kFrameTimeoutMs)) { + if (!sink_.WaitForFrame(kFrameTimeoutMs)) { ++num_frames_dropped; } else { sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); } } - EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 4 / 9), + EXPECT_NEAR(num_frames_dropped, + kDefaultFramerateFps - (kDefaultFramerateFps * 4 / 9), kErrorMargin); // Go back up one step. vie_encoder_->TriggerCpuNormalUsage(); num_frames_dropped = 0; - for (int i = 0; i < max_framerate_; ++i) { + for (int i = 0; i < kDefaultFramerateFps; ++i) { timestamp_ms += kFrameIntervalMs; + fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - if (!WaitForFrame(kFrameTimeoutMs)) { + if (!sink_.WaitForFrame(kFrameTimeoutMs)) { ++num_frames_dropped; } else { sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); } } - EXPECT_NEAR(num_frames_dropped, max_framerate_ - (max_framerate_ * 2 / 3), + EXPECT_NEAR(num_frames_dropped, + kDefaultFramerateFps - (kDefaultFramerateFps * 2 / 3), kErrorMargin); // Go back up to original mode. vie_encoder_->TriggerCpuNormalUsage(); num_frames_dropped = 0; - for (int i = 0; i < max_framerate_; ++i) { + for (int i = 0; i < kDefaultFramerateFps; ++i) { timestamp_ms += kFrameIntervalMs; + fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - if (!WaitForFrame(kFrameTimeoutMs)) { + if (!sink_.WaitForFrame(kFrameTimeoutMs)) { ++num_frames_dropped; } else { sink_.CheckLastFrameSizeMathces(kFrameWidth, kFrameHeight); @@ -2123,17 +2084,15 @@ TEST_F(ViEEncoderTest, DoesntAdaptDownPastMinFramerate) { const int kFrameWidth = 1280; const int kFrameHeight = 720; - // Reconfigure encoder with two temporal layers and screensharing, which will - // disable frame dropping and make testing easier. - ResetEncoder("VP8", 1, 2, true, true); - + rtc::ScopedFakeClock fake_clock; vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); vie_encoder_->SetSource( &video_source_, VideoSendStream::DegradationPreference::kMaintainResolution); video_source_.set_adaptation_enabled(true); - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; + fake_clock.SetTimeMicros(kFrameIntervalMs * 1000); + int64_t timestamp_ms = kFrameIntervalMs; // Trigger overuse as much as we can. for (int i = 0; i < ViEEncoder::kMaxCpuResolutionDowngrades; ++i) { @@ -2142,72 +2101,23 @@ TEST_F(ViEEncoderTest, DoesntAdaptDownPastMinFramerate) { video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); timestamp_ms += kFrameIntervalMs; + fake_clock.AdvanceTimeMicros(kFrameIntervalMs * 1000); } // ...and then try to adapt again. vie_encoder_->TriggerCpuOveruse(); } // Drain any frame in the pipeline. - WaitForFrame(kDefaultTimeoutMs); + sink_.WaitForFrame(kDefaultTimeoutMs); // Insert frames at min fps, all should go through. for (int i = 0; i < 10; ++i) { timestamp_ms += kMinFpsFrameInterval; + fake_clock.AdvanceTimeMicros(kMinFpsFrameInterval * 1000); video_source_.IncomingCapturedFrame( CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(timestamp_ms); + sink_.WaitForEncodedFrame(timestamp_ms); } vie_encoder_->Stop(); } - -TEST_F(ViEEncoderTest, PriodicallyUpdatesChannelParameters) { - const int kFrameWidth = 1280; - const int kFrameHeight = 720; - const int kLowFps = 2; - const int kHighFps = 30; - - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - - int64_t timestamp_ms = fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec; - max_framerate_ = kLowFps; - - // Insert 2 seconds of 2fps video. - for (int i = 0; i < kLowFps * 2; ++i) { - video_source_.IncomingCapturedFrame( - CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(timestamp_ms); - timestamp_ms += 1000 / kLowFps; - } - - // Make sure encoder is updated with new target. - vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0); - video_source_.IncomingCapturedFrame( - CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - WaitForEncodedFrame(timestamp_ms); - timestamp_ms += 1000 / kLowFps; - - EXPECT_EQ(kLowFps, fake_encoder_.GetConfiguredInputFramerate()); - - // Insert 30fps frames for just a little more than the forced update period. - const int kVcmTimerIntervalFrames = - (vcm::VCMProcessTimer::kDefaultProcessIntervalMs * kHighFps) / 1000; - const int kFrameIntervalMs = 1000 / kHighFps; - max_framerate_ = kHighFps; - for (int i = 0; i < kVcmTimerIntervalFrames + 2; ++i) { - video_source_.IncomingCapturedFrame( - CreateFrame(timestamp_ms, kFrameWidth, kFrameHeight)); - // Wait for encoded frame, but skip ahead if it doesn't arrive as it might - // be dropped if the encoder hans't been updated with the new higher target - // framerate yet, causing it to overshoot the target bitrate and then - // suffering the wrath of the media optimizer. - TimedWaitForEncodedFrame(timestamp_ms, 2 * kFrameIntervalMs); - timestamp_ms += kFrameIntervalMs; - } - - // Don expect correct measurement just yet, but it should be higher than - // before. - EXPECT_GT(fake_encoder_.GetConfiguredInputFramerate(), kLowFps); - - vie_encoder_->Stop(); -} } // namespace webrtc