Fix for bandwidth toggling problem in StartUpPhase.
Fix has 2 parts: 1. Fix for the LossBasedControl being at much lower levels than DelayBased in StartUpPhase. 2. Explicitly fix state machine problem leading to toggling between the two estimates. Bug: webrtc:10222 Change-Id: Ieaaaec6c9233da61a86b69d936c4979c79645686 Reviewed-on: https://webrtc-review.googlesource.com/c/118280 Commit-Queue: Christoffer Rodbro <crodbro@webrtc.org> Reviewed-by: Sebastian Jansson <srte@webrtc.org> Cr-Commit-Position: refs/heads/master@{#26327}
This commit is contained in:
parent
5c2f1f053f
commit
982639c019
@ -212,4 +212,19 @@ void LossBasedBandwidthEstimation::Update(Timestamp at_time,
|
||||
}
|
||||
}
|
||||
|
||||
void LossBasedBandwidthEstimation::Reset(DataRate bitrate) {
|
||||
loss_based_bitrate_ = bitrate;
|
||||
average_loss_ = 0;
|
||||
average_loss_max_ = 0;
|
||||
}
|
||||
|
||||
void LossBasedBandwidthEstimation::MaybeReset(DataRate bitrate) {
|
||||
if (config_.allow_resets)
|
||||
Reset(bitrate);
|
||||
}
|
||||
|
||||
void LossBasedBandwidthEstimation::SetInitialBitrate(DataRate bitrate) {
|
||||
Reset(bitrate);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -52,17 +52,16 @@ class LossBasedBandwidthEstimation {
|
||||
TimeDelta last_round_trip_time);
|
||||
void UpdateAcknowledgedBitrate(DataRate acknowledged_bitrate,
|
||||
Timestamp at_time);
|
||||
void MaybeReset(DataRate bitrate) {
|
||||
if (config_.allow_resets)
|
||||
loss_based_bitrate_ = bitrate;
|
||||
}
|
||||
void SetInitialBitrate(DataRate bitrate) { loss_based_bitrate_ = bitrate; }
|
||||
void MaybeReset(DataRate bitrate);
|
||||
void SetInitialBitrate(DataRate bitrate);
|
||||
bool Enabled() const { return config_.enabled; }
|
||||
void UpdateLossStatistics(const std::vector<PacketResult>& packet_results,
|
||||
Timestamp at_time);
|
||||
DataRate GetEstimate() const { return loss_based_bitrate_; }
|
||||
|
||||
private:
|
||||
void Reset(DataRate bitrate);
|
||||
|
||||
LossBasedControlConfig config_;
|
||||
double average_loss_;
|
||||
double average_loss_max_;
|
||||
|
||||
@ -453,7 +453,12 @@ void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) {
|
||||
|
||||
if (new_bitrate != current_bitrate_) {
|
||||
min_bitrate_history_.clear();
|
||||
min_bitrate_history_.push_back(std::make_pair(at_time, current_bitrate_));
|
||||
if (loss_based_bandwidth_estimation_.Enabled()) {
|
||||
min_bitrate_history_.push_back(std::make_pair(at_time, new_bitrate));
|
||||
} else {
|
||||
min_bitrate_history_.push_back(
|
||||
std::make_pair(at_time, current_bitrate_));
|
||||
}
|
||||
CapBitrateToThresholds(at_time, new_bitrate);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -8,6 +8,8 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <queue>
|
||||
|
||||
#include "api/transport/goog_cc_factory.h"
|
||||
#include "test/field_trial.h"
|
||||
#include "test/gtest.h"
|
||||
@ -15,6 +17,32 @@
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
namespace {
|
||||
// Count dips from a constant high bandwidth level within a short window.
|
||||
int CountBandwidthDips(std::queue<DataRate> bandwidth_history,
|
||||
DataRate threshold) {
|
||||
if (bandwidth_history.empty())
|
||||
return true;
|
||||
DataRate first = bandwidth_history.front();
|
||||
bandwidth_history.pop();
|
||||
|
||||
int dips = 0;
|
||||
bool state_high = true;
|
||||
while (!bandwidth_history.empty()) {
|
||||
if (bandwidth_history.front() + threshold < first && state_high) {
|
||||
++dips;
|
||||
state_high = false;
|
||||
} else if (bandwidth_history.front() == first) {
|
||||
state_high = true;
|
||||
} else if (bandwidth_history.front() > first) {
|
||||
// If this is toggling we will catch it later when front becomes first.
|
||||
state_high = false;
|
||||
}
|
||||
bandwidth_history.pop();
|
||||
}
|
||||
return dips;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
TEST(GoogCcNetworkControllerTest, MaintainsLowRateInSafeResetTrial) {
|
||||
const DataRate kLinkCapacity = DataRate::kbps(200);
|
||||
@ -146,5 +174,39 @@ TEST(GoogCcNetworkControllerTest,
|
||||
EXPECT_LT(client->GetStats().pacer_delay_ms, 150);
|
||||
}
|
||||
|
||||
TEST(GoogCcNetworkControllerTest, NoBandwidthTogglingInLossControlTrial) {
|
||||
ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/");
|
||||
Scenario s("googcc_unit/no_toggling", true);
|
||||
auto* send_net = s.CreateSimulationNode([&](NetworkNodeConfig* c) {
|
||||
c->simulation.bandwidth = DataRate::kbps(2000);
|
||||
c->simulation.loss_rate = 0.2;
|
||||
c->simulation.delay = TimeDelta::ms(10);
|
||||
});
|
||||
|
||||
// TODO(srte): replace with SimulatedTimeClient when it supports probing.
|
||||
auto* client = s.CreateClient("send", [&](CallClientConfig* c) {
|
||||
c->transport.cc = TransportControllerConfig::CongestionController::kGoogCc;
|
||||
c->transport.rates.start_rate = DataRate::kbps(300);
|
||||
});
|
||||
auto* route = s.CreateRoutes(client, {send_net},
|
||||
s.CreateClient("return", CallClientConfig()),
|
||||
{s.CreateSimulationNode(NetworkNodeConfig())});
|
||||
s.CreateVideoStream(route->forward(), VideoStreamConfig());
|
||||
// Allow the controller to initialize.
|
||||
s.RunFor(TimeDelta::ms(250));
|
||||
|
||||
std::queue<DataRate> bandwidth_history;
|
||||
const TimeDelta step = TimeDelta::ms(50);
|
||||
for (TimeDelta time = TimeDelta::Zero(); time < TimeDelta::ms(2000);
|
||||
time += step) {
|
||||
s.RunFor(step);
|
||||
const TimeDelta window = TimeDelta::ms(500);
|
||||
if (bandwidth_history.size() >= window / step)
|
||||
bandwidth_history.pop();
|
||||
bandwidth_history.push(client->send_bandwidth());
|
||||
EXPECT_LT(CountBandwidthDips(bandwidth_history, DataRate::kbps(100)), 2);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user