Configure and use max bitrate to limit the AIMD controller estimates.
Bug: webrtc:9275 Change-Id: I9625cd473e1cb198abe08020f5462f1bd64bf2a5 Reviewed-on: https://webrtc-review.googlesource.com/77081 Reviewed-by: Stefan Holmer <stefan@webrtc.org> Reviewed-by: Sebastian Jansson <srte@webrtc.org> Commit-Queue: Björn Terelius <terelius@webrtc.org> Cr-Commit-Position: refs/heads/master@{#23287}
This commit is contained in:
parent
09ec3cf7c6
commit
18d7c7ea7e
@ -314,10 +314,11 @@ void DelayBasedBwe::SetStartBitrate(int start_bitrate_bps) {
|
||||
rate_control_.SetStartBitrate(start_bitrate_bps);
|
||||
}
|
||||
|
||||
void DelayBasedBwe::SetMinBitrate(int min_bitrate_bps) {
|
||||
void DelayBasedBwe::SetBitrateConstraints(int min_bitrate_bps,
|
||||
int max_bitrate_bps) {
|
||||
// Called from both the configuration thread and the network thread. Shouldn't
|
||||
// be called from the network thread in the future.
|
||||
rate_control_.SetMinBitrate(min_bitrate_bps);
|
||||
rate_control_.SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
|
||||
}
|
||||
|
||||
int64_t DelayBasedBwe::GetExpectedBwePeriodMs() const {
|
||||
|
||||
@ -52,7 +52,7 @@ class DelayBasedBwe {
|
||||
bool LatestEstimate(std::vector<uint32_t>* ssrcs,
|
||||
uint32_t* bitrate_bps) const;
|
||||
void SetStartBitrate(int start_bitrate_bps);
|
||||
void SetMinBitrate(int min_bitrate_bps);
|
||||
void SetBitrateConstraints(int min_bitrate_bps, int max_bitrate_bps);
|
||||
int64_t GetExpectedBwePeriodMs() const;
|
||||
|
||||
private:
|
||||
|
||||
@ -318,10 +318,11 @@ void DelayBasedBwe::SetStartBitrate(int start_bitrate_bps) {
|
||||
rate_control_.SetStartBitrate(start_bitrate_bps);
|
||||
}
|
||||
|
||||
void DelayBasedBwe::SetMinBitrate(int min_bitrate_bps) {
|
||||
void DelayBasedBwe::SetBitrateConstraints(int min_bitrate_bps,
|
||||
int max_bitrate_bps) {
|
||||
// Called from both the configuration thread and the network thread. Shouldn't
|
||||
// be called from the network thread in the future.
|
||||
rate_control_.SetMinBitrate(min_bitrate_bps);
|
||||
rate_control_.SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
|
||||
}
|
||||
|
||||
int64_t DelayBasedBwe::GetExpectedBwePeriodMs() const {
|
||||
|
||||
@ -53,8 +53,9 @@ class DelayBasedBwe {
|
||||
void OnRttUpdate(int64_t avg_rtt_ms);
|
||||
bool LatestEstimate(std::vector<uint32_t>* ssrcs,
|
||||
uint32_t* bitrate_bps) const;
|
||||
|
||||
void SetStartBitrate(int start_bitrate_bps);
|
||||
void SetMinBitrate(int min_bitrate_bps);
|
||||
void SetBitrateConstraints(int min_bitrate_bps, int max_bitrate_bps);
|
||||
int64_t GetExpectedBwePeriodMs() const;
|
||||
|
||||
private:
|
||||
|
||||
@ -129,7 +129,9 @@ GoogCcNetworkController::GoogCcNetworkController(RtcEventLog* event_log,
|
||||
max_total_allocated_bitrate_(DataRate::Zero()),
|
||||
in_cwnd_experiment_(CwndExperimentEnabled()),
|
||||
accepted_queue_ms_(kDefaultAcceptedQueueMs) {
|
||||
delay_based_bwe_->SetMinBitrate(congestion_controller::GetMinBitrateBps());
|
||||
constexpr int kMaxBitrateUnchanged = -1;
|
||||
delay_based_bwe_->SetBitrateConstraints(
|
||||
congestion_controller::GetMinBitrateBps(), kMaxBitrateUnchanged);
|
||||
UpdateBitrateConstraints(config.constraints, config.starting_bandwidth);
|
||||
OnStreamsConfig(config.stream_based_config);
|
||||
if (in_cwnd_experiment_ &&
|
||||
@ -163,7 +165,7 @@ NetworkControlUpdate GoogCcNetworkController::OnNetworkRouteChange(
|
||||
delay_based_bwe_.reset(new DelayBasedBwe(event_log_));
|
||||
acknowledged_bitrate_estimator_.reset(new AcknowledgedBitrateEstimator());
|
||||
delay_based_bwe_->SetStartBitrate(start_bitrate_bps);
|
||||
delay_based_bwe_->SetMinBitrate(min_bitrate_bps);
|
||||
delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
|
||||
|
||||
probe_controller_->Reset(msg.at_time.ms());
|
||||
probe_controller_->SetBitrates(min_bitrate_bps, start_bitrate_bps,
|
||||
@ -262,9 +264,10 @@ void GoogCcNetworkController::UpdateBitrateConstraints(
|
||||
|
||||
bandwidth_estimation_->SetBitrates(start_bitrate_bps, min_bitrate_bps,
|
||||
max_bitrate_bps);
|
||||
|
||||
if (start_bitrate_bps > 0)
|
||||
delay_based_bwe_->SetStartBitrate(start_bitrate_bps);
|
||||
delay_based_bwe_->SetMinBitrate(min_bitrate_bps);
|
||||
delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
|
||||
}
|
||||
|
||||
NetworkControlUpdate GoogCcNetworkController::OnTransportLossReport(
|
||||
|
||||
@ -137,7 +137,9 @@ SendSideCongestionController::SendSideCongestionController(
|
||||
webrtc::field_trial::IsEnabled("WebRTC-SendSideBwe-WithOverhead")),
|
||||
transport_overhead_bytes_per_packet_(0),
|
||||
pacer_pushback_experiment_(IsPacerPushbackExperimentEnabled()) {
|
||||
delay_based_bwe_->SetMinBitrate(min_bitrate_bps_);
|
||||
constexpr int kMaxBitrateUnchanged = -1;
|
||||
delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps_,
|
||||
kMaxBitrateUnchanged);
|
||||
if (in_cwnd_experiment_ &&
|
||||
!ReadCwndExperimentParameter(&accepted_queue_ms_)) {
|
||||
RTC_LOG(LS_WARNING) << "Failed to parse parameters for CwndExperiment "
|
||||
@ -186,7 +188,7 @@ void SendSideCongestionController::SetBweBitrates(int min_bitrate_bps,
|
||||
if (start_bitrate_bps > 0)
|
||||
delay_based_bwe_->SetStartBitrate(start_bitrate_bps);
|
||||
min_bitrate_bps_ = min_bitrate_bps;
|
||||
delay_based_bwe_->SetMinBitrate(min_bitrate_bps_);
|
||||
delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
|
||||
}
|
||||
MaybeTriggerOnNetworkChanged();
|
||||
}
|
||||
@ -220,8 +222,8 @@ void SendSideCongestionController::OnNetworkRouteChanged(
|
||||
min_bitrate_bps_ = min_bitrate_bps;
|
||||
delay_based_bwe_.reset(new DelayBasedBwe(event_log_, clock_));
|
||||
acknowledged_bitrate_estimator_.reset(new AcknowledgedBitrateEstimator());
|
||||
delay_based_bwe_->SetBitrateConstraints(min_bitrate_bps, max_bitrate_bps);
|
||||
delay_based_bwe_->SetStartBitrate(bitrate_bps);
|
||||
delay_based_bwe_->SetMinBitrate(min_bitrate_bps);
|
||||
}
|
||||
|
||||
probe_controller_->Reset();
|
||||
|
||||
@ -28,9 +28,14 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
static const int64_t kDefaultRttMs = 200;
|
||||
static const int64_t kMaxFeedbackIntervalMs = 1000;
|
||||
static const float kDefaultBackoffFactor = 0.85f;
|
||||
namespace {
|
||||
constexpr int64_t kDefaultRttMs = 200;
|
||||
constexpr int64_t kMinFeedbackIntervalMs = 200;
|
||||
constexpr int64_t kMaxFeedbackIntervalMs = 1000;
|
||||
constexpr float kDefaultBackoffFactor = 0.85f;
|
||||
constexpr int kDefaultMaxBandwidthBps = 30000000;
|
||||
constexpr int kDefaultStartBandwidthBps = 300000;
|
||||
} // namespace
|
||||
|
||||
const char kBweBackOffFactorExperiment[] = "WebRTC-BweBackOffFactor";
|
||||
|
||||
@ -56,8 +61,8 @@ float ReadTrendlineFilterWindowSize() {
|
||||
|
||||
AimdRateControl::AimdRateControl()
|
||||
: min_configured_bitrate_bps_(congestion_controller::GetMinBitrateBps()),
|
||||
max_configured_bitrate_bps_(30000000),
|
||||
current_bitrate_bps_(max_configured_bitrate_bps_),
|
||||
max_configured_bitrate_bps_(kDefaultMaxBandwidthBps),
|
||||
current_bitrate_bps_(kDefaultStartBandwidthBps),
|
||||
avg_max_bitrate_kbps_(-1.0f),
|
||||
var_max_bitrate_kbps_(0.4f),
|
||||
rate_control_state_(kRcHold),
|
||||
@ -78,13 +83,30 @@ AimdRateControl::AimdRateControl()
|
||||
AimdRateControl::~AimdRateControl() {}
|
||||
|
||||
void AimdRateControl::SetStartBitrate(int start_bitrate_bps) {
|
||||
current_bitrate_bps_ = start_bitrate_bps;
|
||||
bitrate_is_initialized_ = true;
|
||||
// TODO(terelius): It would make sense to not change the estimate if we have
|
||||
// already initialized a bitrate. However, some code recreates and resets
|
||||
// parameters multiple times and those tests break when changing this.
|
||||
if (start_bitrate_bps > 0) {
|
||||
// TODO(terelius): I think this should clamp the bitrate to the configured
|
||||
// max and min, but that would be a brekaing change because SetStartBitrate
|
||||
// and SetBitrateConstraints are separate calls that can occur in either
|
||||
// order. Consider merging the calls in the future.
|
||||
current_bitrate_bps_ = start_bitrate_bps;
|
||||
bitrate_is_initialized_ = true;
|
||||
}
|
||||
}
|
||||
|
||||
void AimdRateControl::SetMinBitrate(int min_bitrate_bps) {
|
||||
min_configured_bitrate_bps_ = min_bitrate_bps;
|
||||
current_bitrate_bps_ = std::max<int>(min_bitrate_bps, current_bitrate_bps_);
|
||||
void AimdRateControl::SetBitrateConstraints(int min_bitrate_bps,
|
||||
int max_bitrate_bps) {
|
||||
if (min_bitrate_bps > 0)
|
||||
min_configured_bitrate_bps_ = min_bitrate_bps;
|
||||
if (max_bitrate_bps > 0) {
|
||||
max_configured_bitrate_bps_ =
|
||||
rtc::SafeMax(min_configured_bitrate_bps_, max_bitrate_bps);
|
||||
}
|
||||
current_bitrate_bps_ = rtc::SafeClamp<uint32_t>(current_bitrate_bps_,
|
||||
min_configured_bitrate_bps_,
|
||||
max_configured_bitrate_bps_);
|
||||
}
|
||||
|
||||
bool AimdRateControl::ValidEstimate() const {
|
||||
@ -97,7 +119,6 @@ int64_t AimdRateControl::GetFeedbackInterval() const {
|
||||
static const int kRtcpSize = 80;
|
||||
const int64_t interval = static_cast<int64_t>(
|
||||
kRtcpSize * 8.0 * 1000.0 / (0.05 * current_bitrate_bps_) + 0.5);
|
||||
const int64_t kMinFeedbackIntervalMs = 200;
|
||||
return rtc::SafeClamp(interval, kMinFeedbackIntervalMs,
|
||||
kMaxFeedbackIntervalMs);
|
||||
}
|
||||
@ -177,10 +198,9 @@ int AimdRateControl::GetExpectedBandwidthPeriodMs() const {
|
||||
if (!last_decrease_)
|
||||
return smoothing_experiment_ ? kMinPeriodMs : kDefaultPeriodMs;
|
||||
|
||||
return std::min(kMaxPeriodMs,
|
||||
std::max<int>(1000 * static_cast<int64_t>(*last_decrease_) /
|
||||
increase_rate,
|
||||
kMinPeriodMs));
|
||||
return rtc::SafeClamp<int>(
|
||||
1000 * static_cast<int64_t>(*last_decrease_) / increase_rate,
|
||||
kMinPeriodMs, kMaxPeriodMs);
|
||||
}
|
||||
|
||||
uint32_t AimdRateControl::ChangeBitrate(uint32_t new_bitrate_bps,
|
||||
@ -280,7 +300,8 @@ uint32_t AimdRateControl::ClampBitrate(uint32_t new_bitrate_bps,
|
||||
// We allow a bit more lag at very low rates to not too easily get stuck if
|
||||
// the encoder produces uneven outputs.
|
||||
const uint32_t max_bitrate_bps =
|
||||
static_cast<uint32_t>(1.5f * incoming_bitrate_bps) + 10000;
|
||||
rtc::SafeMin(static_cast<uint32_t>(1.5f * incoming_bitrate_bps) + 10000,
|
||||
max_configured_bitrate_bps_);
|
||||
if (new_bitrate_bps > current_bitrate_bps_ &&
|
||||
new_bitrate_bps > max_bitrate_bps) {
|
||||
new_bitrate_bps = std::max(current_bitrate_bps_, max_bitrate_bps);
|
||||
|
||||
@ -30,7 +30,7 @@ class AimdRateControl {
|
||||
// otherwise.
|
||||
bool ValidEstimate() const;
|
||||
void SetStartBitrate(int start_bitrate_bps);
|
||||
void SetMinBitrate(int min_bitrate_bps);
|
||||
void SetBitrateConstraints(int min_bitrate_bps, int max_bitrate_bps);
|
||||
int64_t GetFeedbackInterval() const;
|
||||
// Returns true if the bitrate estimate hasn't been changed for more than
|
||||
// an RTT, or if the incoming_bitrate is less than half of the current
|
||||
|
||||
@ -105,6 +105,50 @@ TEST(AimdRateControlTest, GetIncreaseRateAndBandwidthPeriodSmoothingExp) {
|
||||
states.aimd_rate_control->GetExpectedBandwidthPeriodMs());
|
||||
}
|
||||
|
||||
TEST(AimdRateControlTest, BweLimitedByConfiguredMin) {
|
||||
auto states = CreateAimdRateControlStates();
|
||||
constexpr int kMinBitrate = 50000;
|
||||
constexpr int kMaxBitrate = 200000;
|
||||
constexpr int kStartBitrate = kMinBitrate + 1;
|
||||
states.aimd_rate_control->SetBitrateConstraints(kMinBitrate, kMaxBitrate);
|
||||
states.aimd_rate_control->SetStartBitrate(kStartBitrate);
|
||||
states.aimd_rate_control->SetEstimate(
|
||||
kStartBitrate, states.simulated_clock->TimeInMilliseconds());
|
||||
while (states.simulated_clock->TimeInMilliseconds() - kClockInitialTime <
|
||||
20000) {
|
||||
// Don't decrease below minimum even if the actual throughput is lower.
|
||||
constexpr int kAckedBitrate = kMinBitrate - 1000;
|
||||
UpdateRateControl(states, BandwidthUsage::kBwOverusing, kAckedBitrate,
|
||||
states.simulated_clock->TimeInMilliseconds());
|
||||
states.simulated_clock->AdvanceTimeMilliseconds(100);
|
||||
}
|
||||
ASSERT_TRUE(states.aimd_rate_control->ValidEstimate());
|
||||
EXPECT_EQ(static_cast<uint32_t>(kMinBitrate),
|
||||
states.aimd_rate_control->LatestEstimate());
|
||||
}
|
||||
|
||||
TEST(AimdRateControlTest, BweLimitedByConfiguredMax) {
|
||||
auto states = CreateAimdRateControlStates();
|
||||
constexpr int kMinBitrate = 50000;
|
||||
constexpr int kMaxBitrate = 200000;
|
||||
constexpr int kStartBitrate = kMaxBitrate - 1;
|
||||
states.aimd_rate_control->SetBitrateConstraints(kMinBitrate, kMaxBitrate);
|
||||
states.aimd_rate_control->SetStartBitrate(kStartBitrate);
|
||||
states.aimd_rate_control->SetEstimate(
|
||||
kStartBitrate, states.simulated_clock->TimeInMilliseconds());
|
||||
while (states.simulated_clock->TimeInMilliseconds() - kClockInitialTime <
|
||||
20000) {
|
||||
// Don't increase above maximum even if the actual throughput is higher.
|
||||
constexpr int kAckedBitrate = kMaxBitrate + 10000;
|
||||
UpdateRateControl(states, BandwidthUsage::kBwNormal, kAckedBitrate,
|
||||
states.simulated_clock->TimeInMilliseconds());
|
||||
states.simulated_clock->AdvanceTimeMilliseconds(100);
|
||||
}
|
||||
ASSERT_TRUE(states.aimd_rate_control->ValidEstimate());
|
||||
EXPECT_EQ(static_cast<uint32_t>(kMaxBitrate),
|
||||
states.aimd_rate_control->LatestEstimate());
|
||||
}
|
||||
|
||||
TEST(AimdRateControlTest, BweLimitedByAckedBitrate) {
|
||||
auto states = CreateAimdRateControlStates();
|
||||
constexpr int kAckedBitrate = 10000;
|
||||
@ -222,6 +266,7 @@ TEST(AimdRateControlTest, BandwidthPeriodIsNotAboveMaxSmoothingExp) {
|
||||
test::ScopedFieldTrials override_field_trials(kSmoothingExpFieldTrial);
|
||||
auto states = CreateAimdRateControlStates();
|
||||
constexpr int kInitialBitrate = 50000000;
|
||||
states.aimd_rate_control->SetBitrateConstraints(10000, 60000000);
|
||||
states.aimd_rate_control->SetEstimate(
|
||||
kInitialBitrate, states.simulated_clock->TimeInMilliseconds());
|
||||
states.simulated_clock->AdvanceTimeMilliseconds(100);
|
||||
|
||||
@ -412,6 +412,7 @@ void RemoteBitrateEstimatorAbsSendTime::SetMinBitrate(int min_bitrate_bps) {
|
||||
// Called from both the configuration thread and the network thread. Shouldn't
|
||||
// be called from the network thread in the future.
|
||||
rtc::CritScope lock(&crit_);
|
||||
remote_rate_.SetMinBitrate(min_bitrate_bps);
|
||||
constexpr int kMaxBitrateUnchanged = -1;
|
||||
remote_rate_.SetBitrateConstraints(min_bitrate_bps, kMaxBitrateUnchanged);
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
||||
@ -250,7 +250,8 @@ AimdRateControl* RemoteBitrateEstimatorSingleStream::GetRemoteRate() {
|
||||
|
||||
void RemoteBitrateEstimatorSingleStream::SetMinBitrate(int min_bitrate_bps) {
|
||||
rtc::CritScope cs(&crit_sect_);
|
||||
remote_rate_->SetMinBitrate(min_bitrate_bps);
|
||||
constexpr int kMaxBitrateUnchanged = -1;
|
||||
remote_rate_->SetBitrateConstraints(min_bitrate_bps, kMaxBitrateUnchanged);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -44,7 +44,7 @@ SendSideBweSender::SendSideBweSender(int kbps,
|
||||
bitrate_controller_->SetStartBitrate(1000 * kbps);
|
||||
bitrate_controller_->SetMinMaxBitrate(1000 * kMinBitrateKbps,
|
||||
1000 * kMaxBitrateKbps);
|
||||
bwe_->SetMinBitrate(1000 * kMinBitrateKbps);
|
||||
bwe_->SetBitrateConstraints(1000 * kMinBitrateKbps, 1000 * kMaxBitrateKbps);
|
||||
}
|
||||
|
||||
SendSideBweSender::~SendSideBweSender() {}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user