Use pixels from single active stream if set in CanDecreaseResolutionTo

Simulcast with one active stream:
Use pixels from single active stream if set (instead of input stream which could be larger) to avoid going below the min_pixel_per_frame limit when downgrading resolution.

Bug: none
Change-Id: I65acb12cc53e46f726ccb5bfab8ce08ff0c4cf78
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/208101
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33309}
This commit is contained in:
Åsa Persson 2021-02-18 14:20:03 +01:00 committed by Commit Bot
parent 42dd9bc077
commit 1dd94a023a
11 changed files with 108 additions and 36 deletions

View File

@ -62,13 +62,14 @@ int GetIncreasedMaxPixelsWanted(int target_pixels) {
} }
bool CanDecreaseResolutionTo(int target_pixels, bool CanDecreaseResolutionTo(int target_pixels,
int target_pixels_min,
const VideoStreamInputState& input_state, const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions) { const VideoSourceRestrictions& restrictions) {
int max_pixels_per_frame = int max_pixels_per_frame =
rtc::dchecked_cast<int>(restrictions.max_pixels_per_frame().value_or( rtc::dchecked_cast<int>(restrictions.max_pixels_per_frame().value_or(
std::numeric_limits<int>::max())); std::numeric_limits<int>::max()));
return target_pixels < max_pixels_per_frame && return target_pixels < max_pixels_per_frame &&
target_pixels >= input_state.min_pixels_per_frame(); target_pixels_min >= input_state.min_pixels_per_frame();
} }
bool CanIncreaseResolutionTo(int target_pixels, bool CanIncreaseResolutionTo(int target_pixels,
@ -96,6 +97,11 @@ bool CanIncreaseFrameRateTo(int max_frame_rate,
} }
bool MinPixelLimitReached(const VideoStreamInputState& input_state) { bool MinPixelLimitReached(const VideoStreamInputState& input_state) {
if (input_state.single_active_stream_pixels().has_value()) {
return GetLowerResolutionThan(
input_state.single_active_stream_pixels().value()) <
input_state.min_pixels_per_frame();
}
return input_state.frame_size_pixels().has_value() && return input_state.frame_size_pixels().has_value() &&
GetLowerResolutionThan(input_state.frame_size_pixels().value()) < GetLowerResolutionThan(input_state.frame_size_pixels().value()) <
input_state.min_pixels_per_frame(); input_state.min_pixels_per_frame();
@ -470,7 +476,11 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution(
const RestrictionsWithCounters& current_restrictions) { const RestrictionsWithCounters& current_restrictions) {
int target_pixels = int target_pixels =
GetLowerResolutionThan(input_state.frame_size_pixels().value()); GetLowerResolutionThan(input_state.frame_size_pixels().value());
if (!CanDecreaseResolutionTo(target_pixels, input_state, // Use single active stream if set, this stream could be lower than the input.
int target_pixels_min =
GetLowerResolutionThan(input_state.single_active_stream_pixels().value_or(
input_state.frame_size_pixels().value()));
if (!CanDecreaseResolutionTo(target_pixels, target_pixels_min, input_state,
current_restrictions.restrictions)) { current_restrictions.restrictions)) {
return Adaptation::Status::kLimitReached; return Adaptation::Status::kLimitReached;
} }
@ -693,4 +703,27 @@ VideoStreamAdapter::AwaitingFrameSizeChange::AwaitingFrameSizeChange(
: pixels_increased(pixels_increased), : pixels_increased(pixels_increased),
frame_size_pixels(frame_size_pixels) {} frame_size_pixels(frame_size_pixels) {}
absl::optional<uint32_t> VideoStreamAdapter::GetSingleActiveLayerPixels(
const VideoCodec& codec) {
int num_active = 0;
absl::optional<uint32_t> pixels;
if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) {
if (codec.spatialLayers[i].active) {
++num_active;
pixels = codec.spatialLayers[i].width * codec.spatialLayers[i].height;
}
}
} else {
for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
if (codec.simulcastStream[i].active) {
++num_active;
pixels =
codec.simulcastStream[i].width * codec.simulcastStream[i].height;
}
}
}
return (num_active > 1) ? absl::nullopt : pixels;
}
} // namespace webrtc } // namespace webrtc

View File

@ -163,6 +163,9 @@ class VideoStreamAdapter {
VideoAdaptationCounters counters; VideoAdaptationCounters counters;
}; };
static absl::optional<uint32_t> GetSingleActiveLayerPixels(
const VideoCodec& codec);
private: private:
void BroadcastVideoRestrictionsUpdate( void BroadcastVideoRestrictionsUpdate(
const VideoStreamInputState& input_state, const VideoStreamInputState& input_state,

View File

@ -19,7 +19,8 @@ VideoStreamInputState::VideoStreamInputState()
frame_size_pixels_(absl::nullopt), frame_size_pixels_(absl::nullopt),
frames_per_second_(0), frames_per_second_(0),
video_codec_type_(VideoCodecType::kVideoCodecGeneric), video_codec_type_(VideoCodecType::kVideoCodecGeneric),
min_pixels_per_frame_(kDefaultMinPixelsPerFrame) {} min_pixels_per_frame_(kDefaultMinPixelsPerFrame),
single_active_stream_pixels_(absl::nullopt) {}
void VideoStreamInputState::set_has_input(bool has_input) { void VideoStreamInputState::set_has_input(bool has_input) {
has_input_ = has_input; has_input_ = has_input;
@ -43,6 +44,11 @@ void VideoStreamInputState::set_min_pixels_per_frame(int min_pixels_per_frame) {
min_pixels_per_frame_ = min_pixels_per_frame; min_pixels_per_frame_ = min_pixels_per_frame;
} }
void VideoStreamInputState::set_single_active_stream_pixels(
absl::optional<int> single_active_stream_pixels) {
single_active_stream_pixels_ = single_active_stream_pixels;
}
bool VideoStreamInputState::has_input() const { bool VideoStreamInputState::has_input() const {
return has_input_; return has_input_;
} }
@ -63,6 +69,10 @@ int VideoStreamInputState::min_pixels_per_frame() const {
return min_pixels_per_frame_; return min_pixels_per_frame_;
} }
absl::optional<int> VideoStreamInputState::single_active_stream_pixels() const {
return single_active_stream_pixels_;
}
bool VideoStreamInputState::HasInputFrameSizeAndFramesPerSecond() const { bool VideoStreamInputState::HasInputFrameSizeAndFramesPerSecond() const {
return has_input_ && frame_size_pixels_.has_value(); return has_input_ && frame_size_pixels_.has_value();
} }

View File

@ -27,12 +27,15 @@ class VideoStreamInputState {
void set_frames_per_second(int frames_per_second); void set_frames_per_second(int frames_per_second);
void set_video_codec_type(VideoCodecType video_codec_type); void set_video_codec_type(VideoCodecType video_codec_type);
void set_min_pixels_per_frame(int min_pixels_per_frame); void set_min_pixels_per_frame(int min_pixels_per_frame);
void set_single_active_stream_pixels(
absl::optional<int> single_active_stream_pixels);
bool has_input() const; bool has_input() const;
absl::optional<int> frame_size_pixels() const; absl::optional<int> frame_size_pixels() const;
int frames_per_second() const; int frames_per_second() const;
VideoCodecType video_codec_type() const; VideoCodecType video_codec_type() const;
int min_pixels_per_frame() const; int min_pixels_per_frame() const;
absl::optional<int> single_active_stream_pixels() const;
bool HasInputFrameSizeAndFramesPerSecond() const; bool HasInputFrameSizeAndFramesPerSecond() const;
@ -42,6 +45,7 @@ class VideoStreamInputState {
int frames_per_second_; int frames_per_second_;
VideoCodecType video_codec_type_; VideoCodecType video_codec_type_;
int min_pixels_per_frame_; int min_pixels_per_frame_;
absl::optional<int> single_active_stream_pixels_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -10,6 +10,8 @@
#include "call/adaptation/video_stream_input_state_provider.h" #include "call/adaptation/video_stream_input_state_provider.h"
#include "call/adaptation/video_stream_adapter.h"
namespace webrtc { namespace webrtc {
VideoStreamInputStateProvider::VideoStreamInputStateProvider( VideoStreamInputStateProvider::VideoStreamInputStateProvider(
@ -36,6 +38,9 @@ void VideoStreamInputStateProvider::OnEncoderSettingsChanged(
encoder_settings.encoder_config().codec_type); encoder_settings.encoder_config().codec_type);
input_state_.set_min_pixels_per_frame( input_state_.set_min_pixels_per_frame(
encoder_settings.encoder_info().scaling_settings.min_pixels_per_frame); encoder_settings.encoder_info().scaling_settings.min_pixels_per_frame);
input_state_.set_single_active_stream_pixels(
VideoStreamAdapter::GetSingleActiveLayerPixels(
encoder_settings.video_codec()));
} }
VideoStreamInputState VideoStreamInputStateProvider::InputState() { VideoStreamInputState VideoStreamInputStateProvider::InputState() {

View File

@ -28,6 +28,7 @@ TEST(VideoStreamInputStateProviderTest, DefaultValues) {
EXPECT_EQ(0, input_state.frames_per_second()); EXPECT_EQ(0, input_state.frames_per_second());
EXPECT_EQ(VideoCodecType::kVideoCodecGeneric, input_state.video_codec_type()); EXPECT_EQ(VideoCodecType::kVideoCodecGeneric, input_state.video_codec_type());
EXPECT_EQ(kDefaultMinPixelsPerFrame, input_state.min_pixels_per_frame()); EXPECT_EQ(kDefaultMinPixelsPerFrame, input_state.min_pixels_per_frame());
EXPECT_EQ(absl::nullopt, input_state.single_active_stream_pixels());
} }
TEST(VideoStreamInputStateProviderTest, ValuesSet) { TEST(VideoStreamInputStateProviderTest, ValuesSet) {
@ -40,14 +41,22 @@ TEST(VideoStreamInputStateProviderTest, ValuesSet) {
encoder_info.scaling_settings.min_pixels_per_frame = 1337; encoder_info.scaling_settings.min_pixels_per_frame = 1337;
VideoEncoderConfig encoder_config; VideoEncoderConfig encoder_config;
encoder_config.codec_type = VideoCodecType::kVideoCodecVP9; encoder_config.codec_type = VideoCodecType::kVideoCodecVP9;
VideoCodec video_codec;
video_codec.codecType = VideoCodecType::kVideoCodecVP8;
video_codec.numberOfSimulcastStreams = 2;
video_codec.simulcastStream[0].active = false;
video_codec.simulcastStream[1].active = true;
video_codec.simulcastStream[1].width = 111;
video_codec.simulcastStream[1].height = 222;
input_state_provider.OnEncoderSettingsChanged(EncoderSettings( input_state_provider.OnEncoderSettingsChanged(EncoderSettings(
std::move(encoder_info), std::move(encoder_config), VideoCodec())); std::move(encoder_info), std::move(encoder_config), video_codec));
VideoStreamInputState input_state = input_state_provider.InputState(); VideoStreamInputState input_state = input_state_provider.InputState();
EXPECT_EQ(true, input_state.has_input()); EXPECT_EQ(true, input_state.has_input());
EXPECT_EQ(42, input_state.frame_size_pixels()); EXPECT_EQ(42, input_state.frame_size_pixels());
EXPECT_EQ(123, input_state.frames_per_second()); EXPECT_EQ(123, input_state.frames_per_second());
EXPECT_EQ(VideoCodecType::kVideoCodecVP9, input_state.video_codec_type()); EXPECT_EQ(VideoCodecType::kVideoCodecVP9, input_state.video_codec_type());
EXPECT_EQ(1337, input_state.min_pixels_per_frame()); EXPECT_EQ(1337, input_state.min_pixels_per_frame());
EXPECT_EQ(111 * 222, input_state.single_active_stream_pixels());
} }
} // namespace webrtc } // namespace webrtc

View File

@ -61,7 +61,7 @@ bool BitrateConstraint::IsAdaptationUpAllowed(
} }
absl::optional<uint32_t> current_frame_size_px = absl::optional<uint32_t> current_frame_size_px =
VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels( VideoStreamAdapter::GetSingleActiveLayerPixels(
encoder_settings_->video_codec()); encoder_settings_->video_codec());
if (!current_frame_size_px.has_value()) { if (!current_frame_size_px.has_value()) {
return true; return true;

View File

@ -173,7 +173,8 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper {
"stream parameters"; "stream parameters";
initial_framedrop_ = 0; initial_framedrop_ = 0;
if (single_active_stream_pixels_ && if (single_active_stream_pixels_ &&
GetSingleActiveLayerPixels(codec) > *single_active_stream_pixels_) { VideoStreamAdapter::GetSingleActiveLayerPixels(codec) >
*single_active_stream_pixels_) {
// Resolution increased. // Resolution increased.
use_bandwidth_allocation_ = true; use_bandwidth_allocation_ = true;
} }
@ -183,7 +184,8 @@ class VideoStreamEncoderResourceManager::InitialFrameDropper {
last_active_flags_ = active_flags; last_active_flags_ = active_flags;
last_input_width_ = codec.width; last_input_width_ = codec.width;
last_input_height_ = codec.height; last_input_height_ = codec.height;
single_active_stream_pixels_ = GetSingleActiveLayerPixels(codec); single_active_stream_pixels_ =
VideoStreamAdapter::GetSingleActiveLayerPixels(codec);
} }
void OnFrameDroppedDueToSize() { ++initial_framedrop_; } void OnFrameDroppedDueToSize() { ++initial_framedrop_; }
@ -712,32 +714,6 @@ void VideoStreamEncoderResourceManager::OnQualityRampUp() {
quality_rampup_experiment_.reset(); quality_rampup_experiment_.reset();
} }
absl::optional<uint32_t>
VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(
const VideoCodec& codec) {
int num_active = 0;
absl::optional<uint32_t> pixels;
if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) {
if (codec.spatialLayers[i].active) {
++num_active;
pixels = codec.spatialLayers[i].width * codec.spatialLayers[i].height;
}
}
} else {
for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
if (codec.simulcastStream[i].active) {
++num_active;
pixels =
codec.simulcastStream[i].width * codec.simulcastStream[i].height;
}
}
}
if (num_active > 1)
return absl::nullopt;
return pixels;
}
bool VideoStreamEncoderResourceManager::IsSimulcast( bool VideoStreamEncoderResourceManager::IsSimulcast(
const VideoEncoderConfig& encoder_config) { const VideoEncoderConfig& encoder_config) {
const std::vector<VideoStream>& simulcast_layers = const std::vector<VideoStream>& simulcast_layers =

View File

@ -147,8 +147,6 @@ class VideoStreamEncoderResourceManager
// QualityRampUpExperimentListener implementation. // QualityRampUpExperimentListener implementation.
void OnQualityRampUp() override; void OnQualityRampUp() override;
static absl::optional<uint32_t> GetSingleActiveLayerPixels(
const VideoCodec& codec);
static bool IsSimulcast(const VideoEncoderConfig& encoder_config); static bool IsSimulcast(const VideoEncoderConfig& encoder_config);
private: private:

View File

@ -197,6 +197,14 @@ TEST_F(QualityScalingTest, NoAdaptDownForLowStartBitrate_Simulcast) {
/*automatic_resize=*/false, /*expect_adaptation=*/false); /*automatic_resize=*/false, /*expect_adaptation=*/false);
} }
TEST_F(QualityScalingTest, AdaptsDownForHighQp_HighestStreamActive_Vp8) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
RunTest("VP8", {false, false, true}, kHighStartBps,
/*automatic_resize=*/true, /*expect_adaptation=*/true);
}
TEST_F(QualityScalingTest, TEST_F(QualityScalingTest,
AdaptsDownForLowStartBitrate_HighestStreamActive_Vp8) { AdaptsDownForLowStartBitrate_HighestStreamActive_Vp8) {
// qp_low:1, qp_high:127 -> kNormalQp // qp_low:1, qp_high:127 -> kNormalQp
@ -206,6 +214,14 @@ TEST_F(QualityScalingTest,
/*automatic_resize=*/true, /*expect_adaptation=*/true); /*automatic_resize=*/true, /*expect_adaptation=*/true);
} }
TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp8) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "1,1,0,0,0,0" + kEnd);
RunTest("VP8", {true, false, false}, kHighStartBps,
/*automatic_resize=*/true, /*expect_adaptation=*/false);
}
TEST_F(QualityScalingTest, TEST_F(QualityScalingTest,
NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp8) { NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp8) {
// qp_low:1, qp_high:127 -> kNormalQp // qp_low:1, qp_high:127 -> kNormalQp
@ -250,6 +266,15 @@ TEST_F(QualityScalingTest, AdaptsDownForLowStartBitrate_Vp9) {
/*automatic_resize=*/true, /*expect_adaptation=*/true); /*automatic_resize=*/true, /*expect_adaptation=*/true);
} }
TEST_F(QualityScalingTest, NoAdaptDownForHighQp_LowestStreamActive_Vp9) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
RunTest("VP9", {true, false, false}, kHighStartBps,
/*automatic_resize=*/true, /*expect_adaptation=*/false);
}
TEST_F(QualityScalingTest, TEST_F(QualityScalingTest,
NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp9) { NoAdaptDownForLowStartBitrate_LowestStreamActive_Vp9) {
// qp_low:1, qp_high:255 -> kNormalQp // qp_low:1, qp_high:255 -> kNormalQp
@ -260,6 +285,15 @@ TEST_F(QualityScalingTest,
/*automatic_resize=*/true, /*expect_adaptation=*/false); /*automatic_resize=*/true, /*expect_adaptation=*/false);
} }
TEST_F(QualityScalingTest, AdaptsDownForHighQp_MiddleStreamActive_Vp9) {
// qp_low:1, qp_high:1 -> kHighQp
test::ScopedFieldTrials field_trials(kPrefix + "0,0,1,1,0,0" + kEnd +
"WebRTC-VP9QualityScaler/Enabled/");
RunTest("VP9", {false, true, false}, kHighStartBps,
/*automatic_resize=*/true, /*expect_adaptation=*/true);
}
TEST_F(QualityScalingTest, TEST_F(QualityScalingTest,
AdaptsDownForLowStartBitrate_MiddleStreamActive_Vp9) { AdaptsDownForLowStartBitrate_MiddleStreamActive_Vp9) {
// qp_low:1, qp_high:255 -> kNormalQp // qp_low:1, qp_high:255 -> kNormalQp

View File

@ -359,7 +359,7 @@ void ApplyVp9BitrateLimits(const VideoEncoder::EncoderInfo& encoder_info,
// Get bitrate limits for active stream. // Get bitrate limits for active stream.
absl::optional<uint32_t> pixels = absl::optional<uint32_t> pixels =
VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(*codec); VideoStreamAdapter::GetSingleActiveLayerPixels(*codec);
if (!pixels.has_value()) { if (!pixels.has_value()) {
return; return;
} }