Add alternative load estimator to OverUseFrameDetector.

The new estimator uses the timestamps attached to EncodedImage, and is
taken from the reverted cl
https://webrtc-review.googlesource.com/c/src/+/23720.

Bug: webrtc:8504
Change-Id: I273bbe3eb6ea2ab9628c9615b803a379061ad44a
Reviewed-on: https://webrtc-review.googlesource.com/31380
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21289}
This commit is contained in:
Niels Möller 2017-12-14 16:39:44 +01:00 committed by Commit Bot
parent 28fe510b2f
commit 83dbeacb1a
4 changed files with 500 additions and 53 deletions

View File

@ -68,9 +68,9 @@ const auto kScaleReasonCpu = AdaptationObserverInterface::AdaptReason::kCpu;
// Class for calculating the processing usage on the send-side (the average
// processing time of a frame divided by the average time difference between
// captured frames).
class SendProcessingUsage : public OveruseFrameDetector::ProcessingUsage {
class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage {
public:
explicit SendProcessingUsage(const CpuOveruseOptions& options)
explicit SendProcessingUsage1(const CpuOveruseOptions& options)
: kWeightFactorFrameDiff(0.998f),
kWeightFactorProcessing(0.995f),
kInitialSampleDiffMs(40.0f),
@ -82,7 +82,7 @@ class SendProcessingUsage : public OveruseFrameDetector::ProcessingUsage {
filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
Reset();
}
virtual ~SendProcessingUsage() {}
virtual ~SendProcessingUsage1() {}
void Reset() override {
frame_timing_.clear();
@ -109,8 +109,11 @@ class SendProcessingUsage : public OveruseFrameDetector::ProcessingUsage {
time_when_first_seen_us));
}
rtc::Optional<int> FrameSent(uint32_t timestamp,
int64_t time_sent_in_us) override {
rtc::Optional<int> FrameSent(
uint32_t timestamp,
int64_t time_sent_in_us,
int64_t /* capture_time_us */,
rtc::Optional<int> /* encode_duration_us */) override {
rtc::Optional<int> encode_duration_us;
// Delay before reporting actual encoding time, used to have the ability to
// detect total encoding time when encoding more than one layer. Encoding is
@ -211,14 +214,86 @@ class SendProcessingUsage : public OveruseFrameDetector::ProcessingUsage {
std::unique_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
};
// Class used for manual testing of overuse, enabled via field trial flag.
class OverdoseInjector : public SendProcessingUsage {
// New cpu load estimator.
// TODO(bugs.webrtc.org/8504): For some period of time, we need to
// switch between the two versions of the estimator for experiments.
// When problems are sorted out, the old estimator should be deleted.
class SendProcessingUsage2 : public OveruseFrameDetector::ProcessingUsage {
public:
OverdoseInjector(const CpuOveruseOptions& options,
explicit SendProcessingUsage2(const CpuOveruseOptions& options)
: options_(options) {
Reset();
}
virtual ~SendProcessingUsage2() = default;
void Reset() override {
prev_time_us_ = -1;
// Start in between the underuse and overuse threshold.
load_estimate_ = (options_.low_encode_usage_threshold_percent +
options_.high_encode_usage_threshold_percent) /
200.0;
}
void SetMaxSampleDiffMs(float /* diff_ms */) override {}
void FrameCaptured(const VideoFrame& frame,
int64_t time_when_first_seen_us,
int64_t last_capture_time_us) override {}
rtc::Optional<int> FrameSent(uint32_t timestamp,
int64_t time_sent_in_us,
int64_t capture_time_us,
rtc::Optional<int> encode_duration_us) override {
if (encode_duration_us) {
if (prev_time_us_ != -1) {
AddSample(1e-6 * (*encode_duration_us),
1e-6 * (capture_time_us - prev_time_us_));
}
}
prev_time_us_ = capture_time_us;
return encode_duration_us;
}
private:
void AddSample(double encode_time, double diff_time) {
RTC_CHECK_GE(diff_time, 0.0);
// Use the filter update
//
// load <-- x/d (1-exp (-d/T)) + exp (-d/T) load
//
// where we must take care for small d, using the proper limit
// (1 - exp(-d/tau)) / d = 1/tau - d/2tau^2 + O(d^2)
double tau = (1e-3 * options_.filter_time_ms);
double e = diff_time / tau;
double c;
if (e < 0.0001) {
c = (1 - e / 2) / tau;
} else {
c = -expm1(-e) / diff_time;
}
load_estimate_ = c * encode_time + exp(-e) * load_estimate_;
}
int Value() override {
return static_cast<int>(100.0 * load_estimate_ + 0.5);
}
private:
const CpuOveruseOptions options_;
int64_t prev_time_us_ = -1;
double load_estimate_;
};
// Class used for manual testing of overuse, enabled via field trial flag.
class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage {
public:
OverdoseInjector(std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage,
int64_t normal_period_ms,
int64_t overuse_period_ms,
int64_t underuse_period_ms)
: SendProcessingUsage(options),
: usage_(std::move(usage)),
normal_period_ms_(normal_period_ms),
overuse_period_ms_(overuse_period_ms),
underuse_period_ms_(underuse_period_ms),
@ -233,6 +308,29 @@ class OverdoseInjector : public SendProcessingUsage {
~OverdoseInjector() override {}
void Reset() override { usage_->Reset(); }
void SetMaxSampleDiffMs(float diff_ms) override {
usage_->SetMaxSampleDiffMs(diff_ms);
}
void FrameCaptured(const VideoFrame& frame,
int64_t time_when_first_seen_us,
int64_t last_capture_time_us) override {
usage_->FrameCaptured(frame, time_when_first_seen_us, last_capture_time_us);
}
rtc::Optional<int> FrameSent(
// These two argument used by old estimator.
uint32_t timestamp,
int64_t time_sent_in_us,
// And these two by the new estimator.
int64_t capture_time_us,
rtc::Optional<int> encode_duration_us) override {
return usage_->FrameSent(timestamp, time_sent_in_us, capture_time_us,
encode_duration_us);
}
int Value() override {
int64_t now_ms = rtc::TimeMillis();
if (last_toggling_ms_ == -1) {
@ -275,10 +373,11 @@ class OverdoseInjector : public SendProcessingUsage {
break;
}
return overried_usage_value.value_or(SendProcessingUsage::Value());
return overried_usage_value.value_or(usage_->Value());
}
private:
const std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage_;
const int64_t normal_period_ms_;
const int64_t overuse_period_ms_;
const int64_t underuse_period_ms_;
@ -293,7 +392,9 @@ CpuOveruseOptions::CpuOveruseOptions()
frame_timeout_interval_ms(1500),
min_frame_samples(120),
min_process_count(3),
high_threshold_consecutive_count(2) {
high_threshold_consecutive_count(2),
// Disabled by default.
filter_time_ms(0) {
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
// This is proof-of-concept code for letting the physical core count affect
// the interval into which we attempt to scale. For now, the code is Mac OS
@ -342,6 +443,11 @@ std::unique_ptr<OveruseFrameDetector::ProcessingUsage>
OveruseFrameDetector::CreateProcessingUsage(
const CpuOveruseOptions& options) {
std::unique_ptr<ProcessingUsage> instance;
if (options.filter_time_ms > 0) {
instance = rtc::MakeUnique<SendProcessingUsage2>(options);
} else {
instance = rtc::MakeUnique<SendProcessingUsage1>(options);
}
std::string toggling_interval =
field_trial::FindFullName("WebRTC-ForceSimulatedOveruseIntervalMs");
if (!toggling_interval.empty()) {
@ -353,8 +459,8 @@ OveruseFrameDetector::CreateProcessingUsage(
if (normal_period_ms > 0 && overuse_period_ms > 0 &&
underuse_period_ms > 0) {
instance = rtc::MakeUnique<OverdoseInjector>(
options, normal_period_ms, overuse_period_ms,
underuse_period_ms);
std::move(instance), normal_period_ms,
overuse_period_ms, underuse_period_ms);
} else {
RTC_LOG(LS_WARNING)
<< "Invalid (non-positive) normal/overuse/underuse periods: "
@ -366,12 +472,6 @@ OveruseFrameDetector::CreateProcessingUsage(
<< toggling_interval;
}
}
if (!instance) {
// No valid overuse simulation parameters set, use normal usage class.
instance = rtc::MakeUnique<SendProcessingUsage>(options);
}
return instance;
}
@ -502,10 +602,12 @@ void OveruseFrameDetector::FrameCaptured(const VideoFrame& frame,
}
void OveruseFrameDetector::FrameSent(uint32_t timestamp,
int64_t time_sent_in_us) {
int64_t time_sent_in_us,
int64_t capture_time_us,
rtc::Optional<int> encode_duration_us) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
rtc::Optional<int> encode_duration_us =
usage_->FrameSent(timestamp, time_sent_in_us);
encode_duration_us = usage_->FrameSent(timestamp, time_sent_in_us,
capture_time_us, encode_duration_us);
if (encode_duration_us) {
EncodedFrameTimeMeasured(*encode_duration_us /

View File

@ -40,6 +40,8 @@ struct CpuOveruseOptions {
int high_threshold_consecutive_count; // The number of consecutive checks
// above the high threshold before
// triggering an overuse.
// New estimator enabled if this is set non-zero.
int filter_time_ms; // Time constant for averaging
};
struct CpuOveruseMetrics {
@ -86,7 +88,10 @@ class OveruseFrameDetector {
void FrameCaptured(const VideoFrame& frame, int64_t time_when_first_seen_us);
// Called for each sent frame.
void FrameSent(uint32_t timestamp, int64_t time_sent_in_us);
void FrameSent(uint32_t timestamp,
int64_t time_sent_in_us,
int64_t capture_time_us,
rtc::Optional<int> encode_duration_us);
// Interface for cpu load estimation. Intended for internal use only.
class ProcessingUsage {
@ -97,8 +102,13 @@ class OveruseFrameDetector {
int64_t time_when_first_seen_us,
int64_t last_capture_time_us) = 0;
// Returns encode_time in us, if there's a new measurement.
virtual rtc::Optional<int> FrameSent(uint32_t timestamp,
int64_t time_sent_in_us) = 0;
virtual rtc::Optional<int> FrameSent(
// These two argument used by old estimator.
uint32_t timestamp,
int64_t time_sent_in_us,
// And these two by the new estimator.
int64_t capture_time_us,
rtc::Optional<int> encode_duration_us) = 0;
virtual int Value() = 0;
virtual ~ProcessingUsage() = default;

View File

@ -28,6 +28,7 @@ using ::testing::_;
namespace {
const int kWidth = 640;
const int kHeight = 480;
// Corresponds to load of 15%
const int kFrameIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
const int kProcessTimeUs = 5 * rtc::kNumMicrosecsPerMillisec;
} // namespace
@ -92,30 +93,32 @@ class OveruseFrameDetectorTest : public ::testing::Test,
options_.high_encode_usage_threshold_percent) / 2.0f) + 0.5;
}
void InsertAndSendFramesWithInterval(int num_frames,
int interval_us,
int width,
int height,
int delay_us) {
virtual void InsertAndSendFramesWithInterval(int num_frames,
int interval_us,
int width,
int height,
int delay_us) {
VideoFrame frame(I420Buffer::Create(width, height),
webrtc::kVideoRotation_0, 0);
uint32_t timestamp = 0;
while (num_frames-- > 0) {
frame.set_timestamp(timestamp);
overuse_detector_->FrameCaptured(frame, rtc::TimeMicros());
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
clock_.AdvanceTimeMicros(delay_us);
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros());
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(),
capture_time_us, delay_us);
clock_.AdvanceTimeMicros(interval_us - delay_us);
timestamp += interval_us * 90 / 1000;
}
}
void InsertAndSendFramesWithRandomInterval(int num_frames,
int min_interval_us,
int max_interval_us,
int width,
int height,
int delay_us) {
virtual void InsertAndSendFramesWithRandomInterval(int num_frames,
int min_interval_us,
int max_interval_us,
int width,
int height,
int delay_us) {
webrtc::Random random(17);
VideoFrame frame(I420Buffer::Create(width, height),
@ -124,9 +127,12 @@ class OveruseFrameDetectorTest : public ::testing::Test,
while (num_frames-- > 0) {
frame.set_timestamp(timestamp);
int interval_us = random.Rand(min_interval_us, max_interval_us);
overuse_detector_->FrameCaptured(frame, rtc::TimeMicros());
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
clock_.AdvanceTimeMicros(delay_us);
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros());
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(),
capture_time_us,
rtc::Optional<int>(delay_us));
overuse_detector_->CheckForOveruse();
// Avoid turning clock backwards.
@ -137,7 +143,7 @@ class OveruseFrameDetectorTest : public ::testing::Test,
}
}
void ForceUpdate(int width, int height) {
virtual void ForceUpdate(int width, int height) {
// Insert one frame, wait a second and then put in another to force update
// the usage. From the tests where these are used, adding another sample
// doesn't affect the expected outcome (this is mainly to check initial
@ -331,12 +337,13 @@ TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) {
for (size_t i = 0; i < 1000; ++i) {
// Unique timestamps.
frame.set_timestamp(static_cast<uint32_t>(i));
overuse_detector_->FrameCaptured(frame, rtc::TimeMicros());
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
clock_.AdvanceTimeMicros(kIntervalUs);
if (i > kNumFramesEncodingDelay) {
overuse_detector_->FrameSent(
static_cast<uint32_t>(i - kNumFramesEncodingDelay),
rtc::TimeMicros());
static_cast<uint32_t>(i - kNumFramesEncodingDelay), rtc::TimeMicros(),
capture_time_us, kIntervalUs);
}
overuse_detector_->CheckForOveruse();
}
@ -353,13 +360,16 @@ TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) {
uint32_t timestamp = 0;
for (size_t i = 0; i < 1000; ++i) {
frame.set_timestamp(timestamp);
overuse_detector_->FrameCaptured(frame, rtc::TimeMicros());
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
// Encode and send first parts almost instantly.
clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerMillisec);
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros());
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us,
rtc::kNumMicrosecsPerMillisec);
// Encode heavier part, resulting in >85% usage total.
clock_.AdvanceTimeMicros(kDelayUs - rtc::kNumMicrosecsPerMillisec);
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros());
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us,
kDelayUs);
clock_.AdvanceTimeMicros(kIntervalUs - kDelayUs);
timestamp += kIntervalUs * 90 / 1000;
overuse_detector_->CheckForOveruse();
@ -566,4 +576,315 @@ TEST_F(OveruseFrameDetectorTest, NoOveruseForRandomFrameIntervalWithReset) {
EXPECT_LE(UsagePercent(), InitialUsage() + 5);
}
// Tests using new cpu load estimator
class OveruseFrameDetectorTest2 : public OveruseFrameDetectorTest {
protected:
void SetUp() override {
options_.filter_time_ms = 5 * rtc::kNumMillisecsPerSec;
OveruseFrameDetectorTest::SetUp();
}
void InsertAndSendFramesWithInterval(int num_frames,
int interval_us,
int width,
int height,
int delay_us) override {
VideoFrame frame(I420Buffer::Create(width, height),
webrtc::kVideoRotation_0, 0);
while (num_frames-- > 0) {
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us /* ignored */);
overuse_detector_->FrameSent(0 /* ignored timestamp */,
0 /* ignored send_time_us */,
capture_time_us, delay_us);
clock_.AdvanceTimeMicros(interval_us);
}
}
void InsertAndSendFramesWithRandomInterval(int num_frames,
int min_interval_us,
int max_interval_us,
int width,
int height,
int delay_us) override {
webrtc::Random random(17);
VideoFrame frame(I420Buffer::Create(width, height),
webrtc::kVideoRotation_0, 0);
for (int i = 0; i < num_frames; i++) {
int interval_us = random.Rand(min_interval_us, max_interval_us);
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
overuse_detector_->FrameSent(0 /* ignored timestamp */,
0 /* ignored send_time_us */,
capture_time_us, delay_us);
overuse_detector_->CheckForOveruse();
clock_.AdvanceTimeMicros(interval_us);
}
}
void ForceUpdate(int width, int height) override {
// This is mainly to check initial values and whether the overuse
// detector has been reset or not.
InsertAndSendFramesWithInterval(1, rtc::kNumMicrosecsPerSec, width, height,
kFrameIntervalUs);
}
};
// UsagePercent() > high_encode_usage_threshold_percent => overuse.
// UsagePercent() < low_encode_usage_threshold_percent => underuse.
TEST_F(OveruseFrameDetectorTest2, TriggerOveruse) {
// usage > high => overuse
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
TriggerOveruse(options_.high_threshold_consecutive_count);
}
TEST_F(OveruseFrameDetectorTest2, OveruseAndRecover) {
// usage > high => overuse
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
TriggerOveruse(options_.high_threshold_consecutive_count);
// usage < low => underuse
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(testing::AtLeast(1));
TriggerUnderuse();
}
TEST_F(OveruseFrameDetectorTest2, OveruseAndRecoverWithNoObserver) {
overuse_detector_.reset(new OveruseFrameDetectorUnderTest(
options_, nullptr, this));
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
TriggerOveruse(options_.high_threshold_consecutive_count);
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(0);
TriggerUnderuse();
}
TEST_F(OveruseFrameDetectorTest2, DoubleOveruseAndRecover) {
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(2);
TriggerOveruse(options_.high_threshold_consecutive_count);
TriggerOveruse(options_.high_threshold_consecutive_count);
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(testing::AtLeast(1));
TriggerUnderuse();
}
TEST_F(OveruseFrameDetectorTest2, TriggerUnderuseWithMinProcessCount) {
const int kProcessIntervalUs = 5 * rtc::kNumMicrosecsPerSec;
options_.min_process_count = 1;
CpuOveruseObserverImpl overuse_observer;
overuse_detector_.reset(new OveruseFrameDetectorUnderTest(
options_, &overuse_observer, this));
InsertAndSendFramesWithInterval(
1200, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
overuse_detector_->CheckForOveruse();
EXPECT_EQ(0, overuse_observer.normaluse_);
clock_.AdvanceTimeMicros(kProcessIntervalUs);
overuse_detector_->CheckForOveruse();
EXPECT_EQ(1, overuse_observer.normaluse_);
}
TEST_F(OveruseFrameDetectorTest2, ConstantOveruseGivesNoNormalUsage) {
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(0);
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(64);
for (size_t i = 0; i < 64; ++i) {
TriggerOveruse(options_.high_threshold_consecutive_count);
}
}
TEST_F(OveruseFrameDetectorTest2, ConsecutiveCountTriggersOveruse) {
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
options_.high_threshold_consecutive_count = 2;
ReinitializeOveruseDetector();
TriggerOveruse(2);
}
TEST_F(OveruseFrameDetectorTest2, IncorrectConsecutiveCountTriggersNoOveruse) {
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
options_.high_threshold_consecutive_count = 2;
ReinitializeOveruseDetector();
TriggerOveruse(1);
}
TEST_F(OveruseFrameDetectorTest2, ProcessingUsage) {
InsertAndSendFramesWithInterval(
1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
EXPECT_EQ(kProcessTimeUs * 100 / kFrameIntervalUs, UsagePercent());
}
TEST_F(OveruseFrameDetectorTest2, ResetAfterResolutionChange) {
ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
InsertAndSendFramesWithInterval(
1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
EXPECT_NE(InitialUsage(), UsagePercent());
// Verify reset (with new width/height).
ForceUpdate(kWidth, kHeight + 1);
EXPECT_EQ(InitialUsage(), UsagePercent());
}
TEST_F(OveruseFrameDetectorTest2, ResetAfterFrameTimeout) {
ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
InsertAndSendFramesWithInterval(
1000, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
EXPECT_NE(InitialUsage(), UsagePercent());
InsertAndSendFramesWithInterval(
2, options_.frame_timeout_interval_ms *
rtc::kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs);
EXPECT_NE(InitialUsage(), UsagePercent());
// Verify reset.
InsertAndSendFramesWithInterval(
2, (options_.frame_timeout_interval_ms + 1) *
rtc::kNumMicrosecsPerMillisec, kWidth, kHeight, kProcessTimeUs);
ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
}
TEST_F(OveruseFrameDetectorTest2, ConvergesSlowly) {
InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight,
kProcessTimeUs);
// No update for the first sample.
EXPECT_EQ(InitialUsage(), UsagePercent());
// Total time approximately 40 * 33ms = 1.3s, significantly less
// than the 5s time constant.
InsertAndSendFramesWithInterval(
40, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
// Should have started to approach correct load of 15%, but not very far.
EXPECT_LT(UsagePercent(), InitialUsage());
EXPECT_GT(UsagePercent(), (InitialUsage() * 3 + 15) / 4);
// Run for roughly 10s more, should now be closer.
InsertAndSendFramesWithInterval(
300, kFrameIntervalUs, kWidth, kHeight, kProcessTimeUs);
EXPECT_NEAR(UsagePercent(), 20, 5);
}
TEST_F(OveruseFrameDetectorTest2, InitialProcessingUsage) {
ForceUpdate(kWidth, kHeight);
EXPECT_EQ(InitialUsage(), UsagePercent());
}
TEST_F(OveruseFrameDetectorTest2, MeasuresMultipleConcurrentSamples) {
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_))
.Times(testing::AtLeast(1));
static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
static const size_t kNumFramesEncodingDelay = 3;
VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
webrtc::kVideoRotation_0, 0);
for (size_t i = 0; i < 1000; ++i) {
// Unique timestamps.
frame.set_timestamp(static_cast<uint32_t>(i));
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
clock_.AdvanceTimeMicros(kIntervalUs);
if (i > kNumFramesEncodingDelay) {
overuse_detector_->FrameSent(
static_cast<uint32_t>(i - kNumFramesEncodingDelay), rtc::TimeMicros(),
capture_time_us, kIntervalUs);
}
overuse_detector_->CheckForOveruse();
}
}
TEST_F(OveruseFrameDetectorTest2, UpdatesExistingSamples) {
// >85% encoding time should trigger overuse.
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_))
.Times(testing::AtLeast(1));
static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec;
VideoFrame frame(I420Buffer::Create(kWidth, kHeight),
webrtc::kVideoRotation_0, 0);
uint32_t timestamp = 0;
for (size_t i = 0; i < 1000; ++i) {
frame.set_timestamp(timestamp);
int64_t capture_time_us = rtc::TimeMicros();
overuse_detector_->FrameCaptured(frame, capture_time_us);
// Encode and send first parts almost instantly.
clock_.AdvanceTimeMicros(rtc::kNumMicrosecsPerMillisec);
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us,
rtc::kNumMicrosecsPerMillisec);
// Encode heavier part, resulting in >85% usage total.
clock_.AdvanceTimeMicros(kDelayUs - rtc::kNumMicrosecsPerMillisec);
overuse_detector_->FrameSent(timestamp, rtc::TimeMicros(), capture_time_us,
kDelayUs);
clock_.AdvanceTimeMicros(kIntervalUs - kDelayUs);
timestamp += kIntervalUs * 90 / 1000;
overuse_detector_->CheckForOveruse();
}
}
TEST_F(OveruseFrameDetectorTest2, RunOnTqNormalUsage) {
rtc::TaskQueue queue("OveruseFrameDetectorTestQueue");
rtc::Event event(false, false);
queue.PostTask([this, &event] {
overuse_detector_->StartCheckForOveruse();
event.Set();
});
event.Wait(rtc::Event::kForever);
// Expect NormalUsage(). When called, stop the |overuse_detector_| and then
// set |event| to end the test.
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_))
.WillOnce(InvokeWithoutArgs([this, &event] {
overuse_detector_->StopCheckForOveruse();
event.Set();
}));
queue.PostTask([this] {
const int kDelayUs1 = 5 * rtc::kNumMicrosecsPerMillisec;
const int kDelayUs2 = 6 * rtc::kNumMicrosecsPerMillisec;
InsertAndSendFramesWithInterval(1300, kFrameIntervalUs, kWidth, kHeight,
kDelayUs1);
InsertAndSendFramesWithInterval(1, kFrameIntervalUs, kWidth, kHeight,
kDelayUs2);
});
EXPECT_TRUE(event.Wait(10000));
}
// Models screencast, with irregular arrival of frames which are heavy
// to encode.
TEST_F(OveruseFrameDetectorTest2, NoOveruseForLargeRandomFrameInterval) {
EXPECT_CALL(*(observer_.get()), AdaptDown(_)).Times(0);
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_))
.Times(testing::AtLeast(1));
const int kNumFrames = 500;
const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec;
const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec;
const int kMaxIntervalUs = 1000 * rtc::kNumMicrosecsPerMillisec;
InsertAndSendFramesWithRandomInterval(kNumFrames,
kMinIntervalUs, kMaxIntervalUs,
kWidth, kHeight, kEncodeTimeUs);
// Average usage 19%. Check that estimate is in the right ball park.
EXPECT_NEAR(UsagePercent(), 20, 10);
}
// Models screencast, with irregular arrival of frames, often
// exceeding the timeout interval.
TEST_F(OveruseFrameDetectorTest2, NoOveruseForRandomFrameIntervalWithReset) {
EXPECT_CALL(*(observer_.get()), AdaptDown(_)).Times(0);
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_))
.Times(testing::AtLeast(1));
const int kNumFrames = 500;
const int kEncodeTimeUs = 100 * rtc::kNumMicrosecsPerMillisec;
const int kMinIntervalUs = 30 * rtc::kNumMicrosecsPerMillisec;
const int kMaxIntervalUs = 3000 * rtc::kNumMicrosecsPerMillisec;
InsertAndSendFramesWithRandomInterval(kNumFrames,
kMinIntervalUs, kMaxIntervalUs,
kWidth, kHeight, kEncodeTimeUs);
// Average usage 6.6%, but since the frame_timeout_interval_ms is
// only 1500 ms, we often reset the estimate to the initial value.
// Check that estimate is in the right ball park.
EXPECT_GE(UsagePercent(), 1);
EXPECT_LE(UsagePercent(), InitialUsage() + 5);
}
} // namespace webrtc

View File

@ -848,12 +848,26 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
int64_t time_sent_us = rtc::TimeMicros();
uint32_t timestamp = encoded_image._timeStamp;
const int qp = encoded_image.qp_;
encoder_queue_.PostTask([this, timestamp, time_sent_us, qp] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
overuse_detector_->FrameSent(timestamp, time_sent_us);
if (quality_scaler_ && qp >= 0)
quality_scaler_->ReportQP(qp);
});
int64_t capture_time_us =
encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec;
rtc::Optional<int> encode_duration_us;
if (encoded_image.timing_.flags != TimingFrameFlags::kInvalid) {
encode_duration_us.emplace(
// TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_?
rtc::kNumMicrosecsPerMillisec *
(encoded_image.timing_.encode_finish_ms -
encoded_image.timing_.encode_start_ms));
}
encoder_queue_.PostTask(
[this, timestamp, time_sent_us, qp, capture_time_us, encode_duration_us] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
overuse_detector_->FrameSent(timestamp, time_sent_us, capture_time_us,
encode_duration_us);
if (quality_scaler_ && qp >= 0)
quality_scaler_->ReportQP(qp);
});
return result;
}