Wire up trial for alternative EncoderBitrateAdjuster behavior.

Behind a flag, the new behavior changes how the "media rate" utilization
is calculated:

* Instead of per spatial & temporal layer, it's per spatial layer only.
* Overshoot is compared to real target vs adjusted target.
* Window takes quite periods/frame drops more into consideration.

This should lead to less push-back when not network constrained and
complex content is used causing bursty behavior.

Bug: b/349561566
Change-Id: I402e6531183493c963fec48ae363ce0b859b396a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/356480
Commit-Queue: Erik Språng <sprang@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#42593}
This commit is contained in:
Erik Språng 2024-07-05 11:41:29 +02:00 committed by WebRTC LUCI CQ
parent 6ed0a3c3c6
commit db65fda82f
6 changed files with 134 additions and 58 deletions

View File

@ -59,6 +59,9 @@ ACTIVE_FIELD_TRIALS: FrozenSet[FieldTrial] = frozenset([
FieldTrial('WebRTC-Av1-GetEncoderInfoOverride', FieldTrial('WebRTC-Av1-GetEncoderInfoOverride',
42225234, 42225234,
date(2024, 4, 1)), date(2024, 4, 1)),
FieldTrial('WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment',
349561566,
date(2025, 8, 26)),
FieldTrial('WebRTC-Bwe-LimitPacingFactorByUpperLinkCapacityEstimate', FieldTrial('WebRTC-Bwe-LimitPacingFactorByUpperLinkCapacityEstimate',
42220543, 42220543,
date(2025, 1, 1)), date(2025, 1, 1)),

View File

@ -41,19 +41,23 @@ struct LayerRateInfo {
} }
}; };
} // namespace } // namespace
constexpr int64_t EncoderBitrateAdjuster::kWindowSizeMs; constexpr TimeDelta EncoderBitrateAdjuster::kWindowSize;
constexpr size_t EncoderBitrateAdjuster::kMinFramesSinceLayoutChange; constexpr size_t EncoderBitrateAdjuster::kMinFramesSinceLayoutChange;
constexpr double EncoderBitrateAdjuster::kDefaultUtilizationFactor; constexpr double EncoderBitrateAdjuster::kDefaultUtilizationFactor;
EncoderBitrateAdjuster::EncoderBitrateAdjuster( EncoderBitrateAdjuster::EncoderBitrateAdjuster(
const VideoCodec& codec_settings, const VideoCodec& codec_settings,
const FieldTrialsView& field_trials) const FieldTrialsView& field_trials,
Clock& clock)
: utilize_bandwidth_headroom_(RateControlSettings(field_trials) : utilize_bandwidth_headroom_(RateControlSettings(field_trials)
.BitrateAdjusterCanUseNetworkHeadroom()), .BitrateAdjusterCanUseNetworkHeadroom()),
use_newfangled_headroom_adjustment_(field_trials.IsEnabled(
"WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment")),
frames_since_layout_change_(0), frames_since_layout_change_(0),
min_bitrates_bps_{}, min_bitrates_bps_{},
codec_(codec_settings.codecType), codec_(codec_settings.codecType),
codec_mode_(codec_settings.mode) { codec_mode_(codec_settings.mode),
clock_(clock) {
// TODO(https://crbug.com/webrtc/14891): If we want to support simulcast of // TODO(https://crbug.com/webrtc/14891): If we want to support simulcast of
// SVC streams, EncoderBitrateAdjuster needs to be updated to care about both // SVC streams, EncoderBitrateAdjuster needs to be updated to care about both
// `simulcastStream` and `spatialLayers` at the same time. // `simulcastStream` and `spatialLayers` at the same time.
@ -94,6 +98,7 @@ EncoderBitrateAdjuster::~EncoderBitrateAdjuster() = default;
VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation( VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
const VideoEncoder::RateControlParameters& rates) { const VideoEncoder::RateControlParameters& rates) {
current_rate_control_parameters_ = rates; current_rate_control_parameters_ = rates;
const Timestamp now = clock_.CurrentTime();
// First check that overshoot detectors exist, and store per simulcast/spatial // First check that overshoot detectors exist, and store per simulcast/spatial
// layer how many active temporal layers we have. // layer how many active temporal layers we have.
@ -109,7 +114,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
if (!overshoot_detectors_[si][ti]) { if (!overshoot_detectors_[si][ti]) {
overshoot_detectors_[si][ti] = overshoot_detectors_[si][ti] =
std::make_unique<EncoderOvershootDetector>( std::make_unique<EncoderOvershootDetector>(
kWindowSizeMs, codec_, kWindowSize.ms(), codec_,
codec_mode_ == VideoCodecMode::kScreensharing); codec_mode_ == VideoCodecMode::kScreensharing);
frames_since_layout_change_ = 0; frames_since_layout_change_ = 0;
} }
@ -119,10 +124,26 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
frames_since_layout_change_ = 0; frames_since_layout_change_ = 0;
} }
} }
if (use_newfangled_headroom_adjustment_) {
// Instantiate average media rate trackers, one per active spatial layer.
DataRate spatial_layer_rate =
DataRate::BitsPerSec(rates.bitrate.GetSpatialLayerSum(si));
if (spatial_layer_rate.IsZero()) {
media_rate_trackers_[si].reset();
} else {
if (media_rate_trackers_[si] == nullptr) {
constexpr int kMaxDataPointsInUtilizationTrackers = 100;
media_rate_trackers_[si] = std::make_unique<RateUtilizationTracker>(
kMaxDataPointsInUtilizationTrackers, kWindowSize);
}
// Media rate trackers use the unadjusted target rate.
media_rate_trackers_[si]->OnDataRateChanged(spatial_layer_rate, now);
}
}
} }
// Next poll the overshoot detectors and populate the adjusted allocation. // Next poll the overshoot detectors and populate the adjusted allocation.
const int64_t now_ms = rtc::TimeMillis();
VideoBitrateAllocation adjusted_allocation; VideoBitrateAllocation adjusted_allocation;
std::vector<LayerRateInfo> layer_infos; std::vector<LayerRateInfo> layer_infos;
DataRate wanted_overshoot_sum = DataRate::Zero(); DataRate wanted_overshoot_sum = DataRate::Zero();
@ -153,12 +174,16 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
RTC_DCHECK(overshoot_detectors_[si][0]); RTC_DCHECK(overshoot_detectors_[si][0]);
layer_info.link_utilization_factor = layer_info.link_utilization_factor =
overshoot_detectors_[si][0] overshoot_detectors_[si][0]
->GetNetworkRateUtilizationFactor(now_ms) ->GetNetworkRateUtilizationFactor(now.ms())
.value_or(kDefaultUtilizationFactor); .value_or(kDefaultUtilizationFactor);
layer_info.media_utilization_factor = layer_info.media_utilization_factor =
overshoot_detectors_[si][0] use_newfangled_headroom_adjustment_
->GetMediaRateUtilizationFactor(now_ms) ? media_rate_trackers_[si]
.value_or(kDefaultUtilizationFactor); ->GetRateUtilizationFactor(now)
.value_or(kDefaultUtilizationFactor)
: overshoot_detectors_[si][0]
->GetMediaRateUtilizationFactor(now.ms())
.value_or(kDefaultUtilizationFactor);
} else if (layer_info.target_rate > DataRate::Zero()) { } else if (layer_info.target_rate > DataRate::Zero()) {
// Multiple temporal layers enabled for this simulcast/spatial layer. // Multiple temporal layers enabled for this simulcast/spatial layer.
// Update rate for each of them and make a weighted average of utilization // Update rate for each of them and make a weighted average of utilization
@ -170,9 +195,11 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
RTC_DCHECK(overshoot_detectors_[si][ti]); RTC_DCHECK(overshoot_detectors_[si][ti]);
const absl::optional<double> ti_link_utilization_factor = const absl::optional<double> ti_link_utilization_factor =
overshoot_detectors_[si][ti]->GetNetworkRateUtilizationFactor( overshoot_detectors_[si][ti]->GetNetworkRateUtilizationFactor(
now_ms); now.ms());
const absl::optional<double> ti_media_utilization_factor = const absl::optional<double> ti_media_utilization_factor =
overshoot_detectors_[si][ti]->GetMediaRateUtilizationFactor(now_ms); overshoot_detectors_[si][ti]->GetMediaRateUtilizationFactor(
now.ms());
if (!ti_link_utilization_factor || !ti_media_utilization_factor) { if (!ti_link_utilization_factor || !ti_media_utilization_factor) {
layer_info.link_utilization_factor = kDefaultUtilizationFactor; layer_info.link_utilization_factor = kDefaultUtilizationFactor;
layer_info.media_utilization_factor = kDefaultUtilizationFactor; layer_info.media_utilization_factor = kDefaultUtilizationFactor;
@ -186,14 +213,17 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
layer_info.media_utilization_factor += layer_info.media_utilization_factor +=
weight * ti_media_utilization_factor.value(); weight * ti_media_utilization_factor.value();
} }
if (use_newfangled_headroom_adjustment_) {
layer_info.media_utilization_factor =
media_rate_trackers_[si]->GetRateUtilizationFactor(now).value_or(
kDefaultUtilizationFactor);
}
} else { } else {
RTC_DCHECK_NOTREACHED(); RTC_DCHECK_NOTREACHED();
} }
if (layer_info.link_utilization_factor < 1.0) { if (layer_info.link_utilization_factor < 1.0) {
// TODO(sprang): Consider checking underuse and allowing it to cancel some
// potential overuse by other streams.
// Don't boost target bitrate if encoder is under-using. // Don't boost target bitrate if encoder is under-using.
layer_info.link_utilization_factor = 1.0; layer_info.link_utilization_factor = 1.0;
} else { } else {
@ -313,7 +343,7 @@ VideoBitrateAllocation EncoderBitrateAdjuster::AdjustRateAllocation(
overshoot_detectors_[si][ti]->SetTargetRate( overshoot_detectors_[si][ti]->SetTargetRate(
DataRate::BitsPerSec(layer_bitrate_bps), DataRate::BitsPerSec(layer_bitrate_bps),
fps_fraction * rates.framerate_fps, now_ms); fps_fraction * rates.framerate_fps, now.ms());
} }
} }
} }
@ -345,6 +375,10 @@ void EncoderBitrateAdjuster::OnEncodedFrame(DataSize size,
if (detector) { if (detector) {
detector->OnEncodedFrame(size.bytes(), rtc::TimeMillis()); detector->OnEncodedFrame(size.bytes(), rtc::TimeMillis());
} }
if (media_rate_trackers_[stream_index]) {
media_rate_trackers_[stream_index]->OnDataProduced(size,
clock_.CurrentTime());
}
} }
void EncoderBitrateAdjuster::Reset() { void EncoderBitrateAdjuster::Reset() {
@ -352,6 +386,7 @@ void EncoderBitrateAdjuster::Reset() {
for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
overshoot_detectors_[si][ti].reset(); overshoot_detectors_[si][ti].reset();
} }
media_rate_trackers_[si].reset();
} }
// Call AdjustRateAllocation() with the last know bitrate allocation, so that // Call AdjustRateAllocation() with the last know bitrate allocation, so that
// the appropriate overuse detectors are immediately re-created. // the appropriate overuse detectors are immediately re-created.

View File

@ -14,17 +14,20 @@
#include <memory> #include <memory>
#include "api/field_trials_view.h" #include "api/field_trials_view.h"
#include "api/units/time_delta.h"
#include "api/video/encoded_image.h" #include "api/video/encoded_image.h"
#include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocation.h"
#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder.h"
#include "system_wrappers/include/clock.h"
#include "video/encoder_overshoot_detector.h" #include "video/encoder_overshoot_detector.h"
#include "video/rate_utilization_tracker.h"
namespace webrtc { namespace webrtc {
class EncoderBitrateAdjuster { class EncoderBitrateAdjuster {
public: public:
// Size of sliding window used to track overshoot rate. // Size of sliding window used to track overshoot rate.
static constexpr int64_t kWindowSizeMs = 3000; static constexpr TimeDelta kWindowSize = TimeDelta::Seconds(3);
// Minimum number of frames since last layout change required to trust the // Minimum number of frames since last layout change required to trust the
// overshoot statistics. Otherwise falls back to default utilization. // overshoot statistics. Otherwise falls back to default utilization.
// By layout change, we mean any simulcast/spatial/temporal layer being either // By layout change, we mean any simulcast/spatial/temporal layer being either
@ -36,7 +39,8 @@ class EncoderBitrateAdjuster {
static constexpr double kDefaultUtilizationFactor = 1.2; static constexpr double kDefaultUtilizationFactor = 1.2;
EncoderBitrateAdjuster(const VideoCodec& codec_settings, EncoderBitrateAdjuster(const VideoCodec& codec_settings,
const FieldTrialsView& field_trials); const FieldTrialsView& field_trials,
Clock& clock);
~EncoderBitrateAdjuster(); ~EncoderBitrateAdjuster();
// Adjusts the given rate allocation to make it paceable within the target // Adjusts the given rate allocation to make it paceable within the target
@ -59,6 +63,7 @@ class EncoderBitrateAdjuster {
private: private:
const bool utilize_bandwidth_headroom_; const bool utilize_bandwidth_headroom_;
const bool use_newfangled_headroom_adjustment_;
VideoEncoder::RateControlParameters current_rate_control_parameters_; VideoEncoder::RateControlParameters current_rate_control_parameters_;
// FPS allocation of temporal layers, per simulcast/spatial layer. Represented // FPS allocation of temporal layers, per simulcast/spatial layer. Represented
@ -73,6 +78,10 @@ class EncoderBitrateAdjuster {
std::unique_ptr<EncoderOvershootDetector> std::unique_ptr<EncoderOvershootDetector>
overshoot_detectors_[kMaxSpatialLayers][kMaxTemporalStreams]; overshoot_detectors_[kMaxSpatialLayers][kMaxTemporalStreams];
// Per spatial layer track of average media utilization.
std::unique_ptr<RateUtilizationTracker>
media_rate_trackers_[kMaxSpatialLayers];
// Minimum bitrates allowed, per spatial layer. // Minimum bitrates allowed, per spatial layer.
uint32_t min_bitrates_bps_[kMaxSpatialLayers]; uint32_t min_bitrates_bps_[kMaxSpatialLayers];
@ -81,6 +90,8 @@ class EncoderBitrateAdjuster {
// Codec mode: { kRealtimeVideo, kScreensharing }. // Codec mode: { kRealtimeVideo, kScreensharing }.
VideoCodecMode codec_mode_; VideoCodecMode codec_mode_;
Clock& clock_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -11,19 +11,26 @@
#include "video/encoder_bitrate_adjuster.h" #include "video/encoder_bitrate_adjuster.h"
#include <memory> #include <memory>
#include <string>
#include <vector> #include <vector>
#include "api/field_trials_view.h" #include "api/field_trials_view.h"
#include "api/units/data_rate.h" #include "api/units/data_rate.h"
#include "rtc_base/fake_clock.h" #include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/safe_conversions.h"
#include "test/gtest.h" #include "test/gtest.h"
#include "test/scoped_key_value_config.h" #include "test/scoped_key_value_config.h"
#include "test/time_controller/simulated_time_controller.h"
namespace webrtc { namespace webrtc {
namespace test { namespace test {
class EncoderBitrateAdjusterTest : public ::testing::Test { using ::testing::Test;
using ::testing::Values;
using ::testing::WithParamInterface;
class EncoderBitrateAdjusterTest : public Test,
public WithParamInterface<std::string> {
public: public:
static constexpr int64_t kWindowSizeMs = 3000; static constexpr int64_t kWindowSizeMs = 3000;
static constexpr int kDefaultBitrateBps = 300000; static constexpr int kDefaultBitrateBps = 300000;
@ -35,10 +42,12 @@ class EncoderBitrateAdjusterTest : public ::testing::Test {
static_assert(kSequenceLength % 2 == 0, "Sequence length must be even."); static_assert(kSequenceLength % 2 == 0, "Sequence length must be even.");
EncoderBitrateAdjusterTest() EncoderBitrateAdjusterTest()
: target_bitrate_(DataRate::BitsPerSec(kDefaultBitrateBps)), : time_controller_(/*start_time=*/Timestamp::Millis(123)),
target_bitrate_(DataRate::BitsPerSec(kDefaultBitrateBps)),
target_framerate_fps_(kDefaultFrameRateFps), target_framerate_fps_(kDefaultFrameRateFps),
tl_pattern_idx_{}, tl_pattern_idx_{},
sequence_idx_{} {} sequence_idx_{},
scoped_field_trial_(GetParam()) {}
protected: protected:
void SetUpAdjusterWithCodec(size_t num_spatial_layers, void SetUpAdjusterWithCodec(size_t num_spatial_layers,
@ -55,8 +64,8 @@ class EncoderBitrateAdjusterTest : public ::testing::Test {
} }
} }
adjuster_ = adjuster_ = std::make_unique<EncoderBitrateAdjuster>(
std::make_unique<EncoderBitrateAdjuster>(codec_, scoped_field_trial_); codec_, scoped_field_trial_, *time_controller_.GetClock());
adjuster_->OnEncoderInfo(encoder_info_); adjuster_->OnEncoderInfo(encoder_info_);
current_adjusted_allocation_ = current_adjusted_allocation_ =
adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters(
@ -112,7 +121,8 @@ class EncoderBitrateAdjusterTest : public ::testing::Test {
const int64_t start_us = rtc::TimeMicros(); const int64_t start_us = rtc::TimeMicros();
while (rtc::TimeMicros() < while (rtc::TimeMicros() <
start_us + (duration_ms * rtc::kNumMicrosecsPerMillisec)) { start_us + (duration_ms * rtc::kNumMicrosecsPerMillisec)) {
clock_.AdvanceTime(TimeDelta::Seconds(1) / target_framerate_fps_); time_controller_.AdvanceTime(TimeDelta::Seconds(1) /
target_framerate_fps_);
for (size_t si = 0; si < NumSpatialLayers(); ++si) { for (size_t si = 0; si < NumSpatialLayers(); ++si) {
const std::vector<int>& tl_pattern = const std::vector<int>& tl_pattern =
kTlPatterns[NumTemporalLayers(si) - 1]; kTlPatterns[NumTemporalLayers(si) - 1];
@ -231,12 +241,14 @@ class EncoderBitrateAdjusterTest : public ::testing::Test {
return multiplied_allocation; return multiplied_allocation;
} }
GlobalSimulatedTimeController time_controller_;
VideoCodec codec_; VideoCodec codec_;
VideoEncoder::EncoderInfo encoder_info_; VideoEncoder::EncoderInfo encoder_info_;
std::unique_ptr<EncoderBitrateAdjuster> adjuster_; std::unique_ptr<EncoderBitrateAdjuster> adjuster_;
VideoBitrateAllocation current_input_allocation_; VideoBitrateAllocation current_input_allocation_;
VideoBitrateAllocation current_adjusted_allocation_; VideoBitrateAllocation current_adjusted_allocation_;
rtc::ScopedFakeClock clock_;
DataRate target_bitrate_; DataRate target_bitrate_;
double target_framerate_fps_; double target_framerate_fps_;
int tl_pattern_idx_[kMaxSpatialLayers]; int tl_pattern_idx_[kMaxSpatialLayers];
@ -250,7 +262,7 @@ class EncoderBitrateAdjusterTest : public ::testing::Test {
{0, 3, 2, 3, 1, 3, 2, 3}}; {0, 3, 2, 3, 1, 3, 2, 3}};
}; };
TEST_F(EncoderBitrateAdjusterTest, SingleLayerOptimal) { TEST_P(EncoderBitrateAdjusterTest, SingleLayerOptimal) {
// Single layer, well behaved encoder. // Single layer, well behaved encoder.
current_input_allocation_.SetBitrate(0, 0, 300000); current_input_allocation_.SetBitrate(0, 0, 300000);
target_framerate_fps_ = 30; target_framerate_fps_ = 30;
@ -264,7 +276,7 @@ TEST_F(EncoderBitrateAdjusterTest, SingleLayerOptimal) {
ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01); ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01);
} }
TEST_F(EncoderBitrateAdjusterTest, SingleLayerOveruse) { TEST_P(EncoderBitrateAdjusterTest, SingleLayerOveruse) {
// Single layer, well behaved encoder. // Single layer, well behaved encoder.
current_input_allocation_.SetBitrate(0, 0, 300000); current_input_allocation_.SetBitrate(0, 0, 300000);
target_framerate_fps_ = 30; target_framerate_fps_ = 30;
@ -278,7 +290,7 @@ TEST_F(EncoderBitrateAdjusterTest, SingleLayerOveruse) {
current_adjusted_allocation_, 0.01); current_adjusted_allocation_, 0.01);
} }
TEST_F(EncoderBitrateAdjusterTest, SingleLayerUnderuse) { TEST_P(EncoderBitrateAdjusterTest, SingleLayerUnderuse) {
// Single layer, well behaved encoder. // Single layer, well behaved encoder.
current_input_allocation_.SetBitrate(0, 0, 300000); current_input_allocation_.SetBitrate(0, 0, 300000);
target_framerate_fps_ = 30; target_framerate_fps_ = 30;
@ -291,7 +303,7 @@ TEST_F(EncoderBitrateAdjusterTest, SingleLayerUnderuse) {
ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.00); ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.00);
} }
TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOptimalSize) { TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersOptimalSize) {
// Three temporal layers, 60%/20%/20% bps distro, well behaved encoder. // Three temporal layers, 60%/20%/20% bps distro, well behaved encoder.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
current_input_allocation_.SetBitrate(0, 1, 60000); current_input_allocation_.SetBitrate(0, 1, 60000);
@ -305,7 +317,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOptimalSize) {
ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01); ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.01);
} }
TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOvershoot) { TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersOvershoot) {
// Three temporal layers, 60%/20%/20% bps distro. // Three temporal layers, 60%/20%/20% bps distro.
// 10% overshoot on all layers. // 10% overshoot on all layers.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
@ -322,7 +334,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersOvershoot) {
current_adjusted_allocation_, 0.01); current_adjusted_allocation_, 0.01);
} }
TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersUndershoot) { TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersUndershoot) {
// Three temporal layers, 60%/20%/20% bps distro, undershoot all layers. // Three temporal layers, 60%/20%/20% bps distro, undershoot all layers.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
current_input_allocation_.SetBitrate(0, 1, 60000); current_input_allocation_.SetBitrate(0, 1, 60000);
@ -337,7 +349,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersUndershoot) {
ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.0); ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.0);
} }
TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersSkewedOvershoot) { TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersSkewedOvershoot) {
// Three temporal layers, 60%/20%/20% bps distro. // Three temporal layers, 60%/20%/20% bps distro.
// 10% overshoot on base layer, 20% on higher layers. // 10% overshoot on base layer, 20% on higher layers.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
@ -355,7 +367,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersSkewedOvershoot) {
current_adjusted_allocation_, 0.01); current_adjusted_allocation_, 0.01);
} }
TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersNonLayeredEncoder) { TEST_P(EncoderBitrateAdjusterTest, ThreeTemporalLayersNonLayeredEncoder) {
// Three temporal layers, 60%/20%/20% bps allocation, 10% overshoot, // Three temporal layers, 60%/20%/20% bps allocation, 10% overshoot,
// encoder does not actually support temporal layers. // encoder does not actually support temporal layers.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
@ -376,7 +388,7 @@ TEST_F(EncoderBitrateAdjusterTest, ThreeTemporalLayersNonLayeredEncoder) {
ExpectNear(expected_allocation, current_adjusted_allocation_, 0.01); ExpectNear(expected_allocation, current_adjusted_allocation_, 0.01);
} }
TEST_F(EncoderBitrateAdjusterTest, IgnoredStream) { TEST_P(EncoderBitrateAdjusterTest, IgnoredStream) {
// Encoder with three temporal layers, but in a mode that does not support // Encoder with three temporal layers, but in a mode that does not support
// deterministic frame rate. Those are ignored, even if bitrate overshoots. // deterministic frame rate. Those are ignored, even if bitrate overshoots.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
@ -395,7 +407,7 @@ TEST_F(EncoderBitrateAdjusterTest, IgnoredStream) {
ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.00); ExpectNear(current_input_allocation_, current_adjusted_allocation_, 0.00);
} }
TEST_F(EncoderBitrateAdjusterTest, DifferentSpatialOvershoots) { TEST_P(EncoderBitrateAdjusterTest, DifferentSpatialOvershoots) {
// Two streams, both with three temporal layers. // Two streams, both with three temporal layers.
// S0 has 5% overshoot, S1 has 25% overshoot. // S0 has 5% overshoot, S1 has 25% overshoot.
current_input_allocation_.SetBitrate(0, 0, 180000); current_input_allocation_.SetBitrate(0, 0, 180000);
@ -427,13 +439,14 @@ TEST_F(EncoderBitrateAdjusterTest, DifferentSpatialOvershoots) {
} }
} }
TEST_F(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) { TEST_P(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) {
if (GetParam() == "WebRTC-VideoRateControl/adjuster_use_headroom:false/") {
// This test does not make sense without headroom adjustment.
GTEST_SKIP();
}
// Two streams, both with three temporal layers. // Two streams, both with three temporal layers.
// Media rate is 1.0, but network rate is higher. // Media rate is 1.0, but network rate is higher.
test::ScopedKeyValueConfig field_trial(
scoped_field_trial_,
"WebRTC-VideoRateControl/adjuster_use_headroom:true/");
const uint32_t kS0Bitrate = 300000; const uint32_t kS0Bitrate = 300000;
const uint32_t kS1Bitrate = 900000; const uint32_t kS1Bitrate = 900000;
current_input_allocation_.SetBitrate(0, 0, kS0Bitrate / 3); current_input_allocation_.SetBitrate(0, 0, kS0Bitrate / 3);
@ -470,12 +483,14 @@ TEST_F(EncoderBitrateAdjusterTest, HeadroomAllowsOvershootToMediaRate) {
} }
} }
TEST_F(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) { TEST_P(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) {
if (GetParam() == "WebRTC-VideoRateControl/adjuster_use_headroom:false/") {
// This test does not make sense without headroom adjustment.
GTEST_SKIP();
}
// Two streams, both with three temporal layers. // Two streams, both with three temporal layers.
// Media rate is 1.1, but network rate is higher. // Media rate is 1.1, but network rate is higher.
test::ScopedKeyValueConfig field_trial(
scoped_field_trial_,
"WebRTC-VideoRateControl/adjuster_use_headroom:true/");
const uint32_t kS0Bitrate = 300000; const uint32_t kS0Bitrate = 300000;
const uint32_t kS1Bitrate = 900000; const uint32_t kS1Bitrate = 900000;
@ -489,32 +504,37 @@ TEST_F(EncoderBitrateAdjusterTest, DontExceedMediaRateEvenWithHeadroom) {
target_framerate_fps_ = 30; target_framerate_fps_ = 30;
// Run twice, once configured as simulcast and once as VP9 SVC. // Run twice, once configured as simulcast and once as VP9 SVC.
for (int i = 0; i < 2; ++i) { for (const bool is_svc : {false, true}) {
SetUpAdjuster(2, 3, i == 0); SetUpAdjuster(/*num_spatial_layers=*/2,
// Network rate has 30% overshoot, media rate has 10% overshoot. /*num_temporal_layers=*/3, is_svc);
InsertFrames({{1.1, 1.1, 1.1}, {1.1, 1.1, 1.1}},
{{1.3, 1.3, 1.3}, {1.3, 1.3, 1.3}},
kWindowSizeMs * kSequenceLength);
// Push back by 30%. // First insert frames with no overshoot.
InsertFrames({{1.0, 1.0, 1.0}}, kWindowSizeMs * kSequenceLength);
// Verify encoder is not pushed backed.
current_adjusted_allocation_ = current_adjusted_allocation_ =
adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters(
current_input_allocation_, target_framerate_fps_)); current_input_allocation_, target_framerate_fps_));
// The up-down causes a bit more noise, allow slightly more error margin. // The up-down causes a bit more noise, allow slightly more error margin.
ExpectNear(MultiplyAllocation(current_input_allocation_, 1 / 1.3), ExpectNear(MultiplyAllocation(current_input_allocation_, 1.0),
current_adjusted_allocation_, 0.015); current_adjusted_allocation_, 0.015);
// Change network rate to 30% overshoot, media rate has 10% overshoot.
InsertFrames({{1.1, 1.1, 1.1}, {1.1, 1.1, 1.1}},
{{1.3, 1.3, 1.3}, {1.3, 1.3, 1.3}},
kWindowSizeMs * kSequenceLength);
// Add 100% link headroom, overshoot from network to media rate is allowed. // Add 100% link headroom, overshoot from network to media rate is allowed.
current_adjusted_allocation_ = current_adjusted_allocation_ =
adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters( adjuster_->AdjustRateAllocation(VideoEncoder::RateControlParameters(
current_input_allocation_, target_framerate_fps_, current_input_allocation_, target_framerate_fps_,
DataRate::BitsPerSec(current_input_allocation_.get_sum_bps() * 2))); DataRate::BitsPerSec(current_input_allocation_.get_sum_bps() * 2)));
ExpectNear(MultiplyAllocation(current_input_allocation_, 1 / 1.1), ExpectNear(MultiplyAllocation(current_input_allocation_, 1 / 1.1),
current_adjusted_allocation_, 0.015); current_adjusted_allocation_, 0.02);
} }
} }
TEST_F(EncoderBitrateAdjusterTest, HonorsMinBitrateWithAv1) { TEST_P(EncoderBitrateAdjusterTest, HonorsMinBitrateWithAv1) {
// Single layer, well behaved encoder. // Single layer, well behaved encoder.
const DataRate kHighBitrate = DataRate::KilobitsPerSec(20); const DataRate kHighBitrate = DataRate::KilobitsPerSec(20);
const DataRate kALowerMinBitrate = DataRate::KilobitsPerSec(15); const DataRate kALowerMinBitrate = DataRate::KilobitsPerSec(15);
@ -549,5 +569,13 @@ TEST_F(EncoderBitrateAdjusterTest, HonorsMinBitrateWithAv1) {
ExpectNear(expected_input_allocation, current_adjusted_allocation_, 0.01); ExpectNear(expected_input_allocation, current_adjusted_allocation_, 0.01);
} }
INSTANTIATE_TEST_SUITE_P(
AdjustWithHeadroomVariations,
EncoderBitrateAdjusterTest,
Values("WebRTC-VideoRateControl/adjuster_use_headroom:false/",
"WebRTC-VideoRateControl/adjuster_use_headroom:true/",
"WebRTC-VideoRateControl/adjuster_use_headroom:true/"
"WebRTC-BitrateAdjusterUseNewfangledHeadroomAdjustment/Enabled/"));
} // namespace test } // namespace test
} // namespace webrtc } // namespace webrtc

View File

@ -1345,8 +1345,8 @@ void VideoStreamEncoder::ReconfigureEncoder() {
const VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo(); const VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo();
if (rate_control_settings_.UseEncoderBitrateAdjuster()) { if (rate_control_settings_.UseEncoderBitrateAdjuster()) {
bitrate_adjuster_ = bitrate_adjuster_ = std::make_unique<EncoderBitrateAdjuster>(
std::make_unique<EncoderBitrateAdjuster>(codec, env_.field_trials()); codec, env_.field_trials(), env_.clock());
bitrate_adjuster_->OnEncoderInfo(info); bitrate_adjuster_->OnEncoderInfo(info);
} }

View File

@ -2611,8 +2611,7 @@ TEST_F(VideoStreamEncoderTest, CorrectlyAdjustsAv1Bitrate) {
allowed_error_bps); allowed_error_bps);
// Insert frames until bitrate adjuster is saturated. // Insert frames until bitrate adjuster is saturated.
const TimeDelta runtime = const TimeDelta runtime = EncoderBitrateAdjuster::kWindowSize;
TimeDelta::Millis(EncoderBitrateAdjuster::kWindowSizeMs);
const Timestamp start_time = clock()->CurrentTime(); const Timestamp start_time = clock()->CurrentTime();
while (clock()->CurrentTime() - start_time < runtime) { while (clock()->CurrentTime() - start_time < runtime) {
video_source_.IncomingCapturedFrame( video_source_.IncomingCapturedFrame(