diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 32abc33171..5d8a173656 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -2698,22 +2698,26 @@ std::vector EncoderStreamFactory::CreateEncoderStreams( ((absl::EqualsIgnoreCase(codec_name_, kVp8CodecName) || absl::EqualsIgnoreCase(codec_name_, kH264CodecName)) && is_screenshare_ && screenshare_config_explicitly_enabled_)) { - bool temporal_layers_supported = + const bool temporal_layers_supported = absl::EqualsIgnoreCase(codec_name_, kVp8CodecName); layers = GetSimulcastConfig(encoder_config.number_of_streams, width, height, 0 /*not used*/, encoder_config.bitrate_priority, max_qp_, 0 /*not_used*/, is_screenshare_, temporal_layers_supported); // The maximum |max_framerate| is currently used for video. - int max_framerate = GetMaxFramerate(encoder_config, layers.size()); + const int max_framerate = GetMaxFramerate(encoder_config, layers.size()); // Update the active simulcast layers and configured bitrates. bool is_highest_layer_max_bitrate_configured = false; - bool has_scale_resolution_down_by = + const bool has_scale_resolution_down_by = std::any_of(encoder_config.simulcast_layers.begin(), encoder_config.simulcast_layers.end(), [](const webrtc::VideoStream& layer) { return layer.scale_resolution_down_by != -1.; }); + const int normalized_width = + NormalizeSimulcastSize(width, encoder_config.number_of_streams); + const int normalized_height = + NormalizeSimulcastSize(height, encoder_config.number_of_streams); for (size_t i = 0; i < layers.size(); ++i) { layers[i].active = encoder_config.simulcast_layers[i].active; if (!is_screenshare_) { @@ -2727,16 +2731,14 @@ std::vector EncoderStreamFactory::CreateEncoderStreams( *encoder_config.simulcast_layers[i].num_temporal_layers; } if (has_scale_resolution_down_by) { - double scale_resolution_down_by = std::max( + const double scale_resolution_down_by = std::max( encoder_config.simulcast_layers[i].scale_resolution_down_by, 1.0); - layers[i].width = - std::max(NormalizeSimulcastSize(width / scale_resolution_down_by, - encoder_config.number_of_streams), - kMinLayerSize); - layers[i].height = - std::max(NormalizeSimulcastSize(height / scale_resolution_down_by, - encoder_config.number_of_streams), - kMinLayerSize); + layers[i].width = std::max( + static_cast(normalized_width / scale_resolution_down_by), + kMinLayerSize); + layers[i].height = std::max( + static_cast(normalized_height / scale_resolution_down_by), + kMinLayerSize); } // Update simulcast bitrates with configured min and max bitrate. if (encoder_config.simulcast_layers[i].min_bitrate_bps > 0) { diff --git a/media/engine/webrtc_video_engine_unittest.cc b/media/engine/webrtc_video_engine_unittest.cc index ac2bf4b6b3..ef45779c27 100644 --- a/media/engine/webrtc_video_engine_unittest.cc +++ b/media/engine/webrtc_video_engine_unittest.cc @@ -5549,14 +5549,13 @@ TEST_F(WebRtcVideoChannelTest, SetRtpSendParametersInvalidNetworkPriority) { TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersScaleResolutionDownByVP8) { - cricket::VideoSendParameters parameters; - parameters.codecs.push_back(cricket::VideoCodec("VP8")); + VideoSendParameters parameters; + parameters.codecs.push_back(VideoCodec(kVp8CodecName)); ASSERT_TRUE(channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, false); webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); VideoOptions options; EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -5653,17 +5652,66 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } +TEST_F(WebRtcVideoChannelTest, + GetAndSetRtpSendParametersScaleResolutionDownByVP8WithOddResolution) { + // Ensure that the top layer has width and height divisible by 2^3, + // so that the bottom layer has width and height divisible by 2. + // TODO(bugs.webrtc.org/8785): Remove this field trial when we fully trust + // the number of simulcast layers set by the app. + webrtc::test::ScopedFieldTrials field_trial( + "WebRTC-NormalizeSimulcastResolution/Enabled-3/"); + + // Set up WebRtcVideoChannel for 3-layer VP8 simulcast. + VideoSendParameters parameters; + parameters.codecs.push_back(VideoCodec(kVp8CodecName)); + ASSERT_TRUE(channel_->SetSendParameters(parameters)); + FakeVideoSendStream* stream = SetUpSimulcast(true, false); + webrtc::test::FrameForwarder frame_forwarder; + EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, + &frame_forwarder)); + channel_->SetSend(true); + + // Set |scale_resolution_down_by|'s. + auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + ASSERT_EQ(rtp_parameters.encodings.size(), 3u); + rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; + rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; + rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; + const auto result = + channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + ASSERT_TRUE(result.ok()); + + // Use a capture resolution whose width and height are not divisible by 2^3. + // (See field trial set at the top of the test.) + FakeFrameSource frame_source(2007, 1207, rtc::kNumMicrosecsPerSec / 30); + frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); + + // Ensure the scaling is correct. + const auto video_streams = stream->GetVideoStreams(); + ASSERT_EQ(video_streams.size(), 3u); + // Ensure that we round the capture resolution down for the top layer... + EXPECT_EQ(video_streams[0].width, 2000u); + EXPECT_EQ(video_streams[0].height, 1200u); + EXPECT_EQ(video_streams[1].width, 1000u); + EXPECT_EQ(video_streams[1].height, 600u); + // ...and that the bottom layer has a width/height divisible by 2. + EXPECT_EQ(video_streams[2].width, 500u); + EXPECT_EQ(video_streams[2].height, 300u); + + // Tear down. + EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersScaleResolutionDownByH264) { - encoder_factory_->AddSupportedVideoCodecType("H264"); - cricket::VideoSendParameters parameters; - parameters.codecs.push_back(cricket::VideoCodec("H264")); + encoder_factory_->AddSupportedVideoCodecType(kH264CodecName); + VideoSendParameters parameters; + parameters.codecs.push_back(VideoCodec(kH264CodecName)); ASSERT_TRUE(channel_->SetSendParameters(parameters)); FakeVideoSendStream* stream = SetUpSimulcast(true, false); webrtc::test::FrameForwarder frame_forwarder; - cricket::FakeFrameSource frame_source(1280, 720, - rtc::kNumMicrosecsPerSec / 30); + FakeFrameSource frame_source(1280, 720, rtc::kNumMicrosecsPerSec / 30); VideoOptions options; EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, &options, &frame_forwarder)); @@ -5759,6 +5807,57 @@ TEST_F(WebRtcVideoChannelTest, EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); } +TEST_F(WebRtcVideoChannelTest, + GetAndSetRtpSendParametersScaleResolutionDownByH264WithOddResolution) { + // Ensure that the top layer has width and height divisible by 2^3, + // so that the bottom layer has width and height divisible by 2. + // TODO(bugs.webrtc.org/8785): Remove this field trial when we fully trust + // the number of simulcast layers set by the app. + webrtc::test::ScopedFieldTrials field_trial( + "WebRTC-NormalizeSimulcastResolution/Enabled-3/"); + + // Set up WebRtcVideoChannel for 3-layer H264 simulcast. + encoder_factory_->AddSupportedVideoCodecType(kH264CodecName); + VideoSendParameters parameters; + parameters.codecs.push_back(VideoCodec(kH264CodecName)); + ASSERT_TRUE(channel_->SetSendParameters(parameters)); + FakeVideoSendStream* stream = SetUpSimulcast(true, false); + webrtc::test::FrameForwarder frame_forwarder; + EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, /*options=*/nullptr, + &frame_forwarder)); + channel_->SetSend(true); + + // Set |scale_resolution_down_by|'s. + auto rtp_parameters = channel_->GetRtpSendParameters(last_ssrc_); + ASSERT_EQ(rtp_parameters.encodings.size(), 3u); + rtp_parameters.encodings[0].scale_resolution_down_by = 1.0; + rtp_parameters.encodings[1].scale_resolution_down_by = 2.0; + rtp_parameters.encodings[2].scale_resolution_down_by = 4.0; + const auto result = + channel_->SetRtpSendParameters(last_ssrc_, rtp_parameters); + ASSERT_TRUE(result.ok()); + + // Use a capture resolution whose width and height are not divisible by 2^3. + // (See field trial set at the top of the test.) + FakeFrameSource frame_source(2007, 1207, rtc::kNumMicrosecsPerSec / 30); + frame_forwarder.IncomingCapturedFrame(frame_source.GetFrame()); + + // Ensure the scaling is correct. + const auto video_streams = stream->GetVideoStreams(); + ASSERT_EQ(video_streams.size(), 3u); + // Ensure that we round the capture resolution down for the top layer... + EXPECT_EQ(video_streams[0].width, 2000u); + EXPECT_EQ(video_streams[0].height, 1200u); + EXPECT_EQ(video_streams[1].width, 1000u); + EXPECT_EQ(video_streams[1].height, 600u); + // ...and that the bottom layer has a width/height divisible by 2. + EXPECT_EQ(video_streams[2].width, 500u); + EXPECT_EQ(video_streams[2].height, 300u); + + // Tear down. + EXPECT_TRUE(channel_->SetVideoSend(last_ssrc_, nullptr, nullptr)); +} + TEST_F(WebRtcVideoChannelTest, GetAndSetRtpSendParametersMaxFramerate) { const size_t kNumSimulcastStreams = 3; SetUpSimulcast(true, false);