Rename adaptation api methods, extended vie_encoder unit test.
Use AdaptDown/AdaptUp instead of ScaleDown/ScaleUp, since we may want to adapt using other means than resolution. Also, extend vie_encoder with unit test that actually uses frames scaled to resolution as determined by VideoAdapter, since that seems to be the default implementation. BUG=webrtc:4172 Review-Url: https://codereview.webrtc.org/2652893015 Cr-Commit-Position: refs/heads/master@{#16402}
This commit is contained in:
parent
d83b9670a6
commit
b1ca073db4
@ -39,8 +39,6 @@ static const int kHighH264QpThreshold = 37;
|
||||
// bitstream range of [0, 127] and not the user-level range of [0,63].
|
||||
static const int kLowVp8QpThreshold = 29;
|
||||
static const int kHighVp8QpThreshold = 95;
|
||||
const ScalingObserverInterface::ScaleReason scale_reason_ =
|
||||
ScalingObserverInterface::ScaleReason::kQuality;
|
||||
|
||||
static VideoEncoder::QpThresholds CodecTypeToDefaultThresholds(
|
||||
VideoCodecType codec_type) {
|
||||
@ -91,16 +89,16 @@ class QualityScaler::CheckQPTask : public rtc::QueuedTask {
|
||||
rtc::SequencedTaskChecker task_checker_;
|
||||
};
|
||||
|
||||
QualityScaler::QualityScaler(ScalingObserverInterface* observer,
|
||||
QualityScaler::QualityScaler(AdaptationObserverInterface* observer,
|
||||
VideoCodecType codec_type)
|
||||
: QualityScaler(observer, CodecTypeToDefaultThresholds(codec_type)) {}
|
||||
|
||||
QualityScaler::QualityScaler(ScalingObserverInterface* observer,
|
||||
QualityScaler::QualityScaler(AdaptationObserverInterface* observer,
|
||||
VideoEncoder::QpThresholds thresholds)
|
||||
: QualityScaler(observer, thresholds, kMeasureMs) {}
|
||||
|
||||
// Protected ctor, should not be called directly.
|
||||
QualityScaler::QualityScaler(ScalingObserverInterface* observer,
|
||||
QualityScaler::QualityScaler(AdaptationObserverInterface* observer,
|
||||
VideoEncoder::QpThresholds thresholds,
|
||||
int64_t sampling_period)
|
||||
: check_qp_task_(nullptr),
|
||||
@ -167,14 +165,14 @@ void QualityScaler::ReportQPLow() {
|
||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
|
||||
LOG(LS_INFO) << "QP has been low, asking for higher resolution.";
|
||||
ClearSamples();
|
||||
observer_->ScaleUp(scale_reason_);
|
||||
observer_->AdaptUp(AdaptationObserverInterface::AdaptReason::kQuality);
|
||||
}
|
||||
|
||||
void QualityScaler::ReportQPHigh() {
|
||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
|
||||
LOG(LS_INFO) << "QP has been high , asking for lower resolution.";
|
||||
ClearSamples();
|
||||
observer_->ScaleDown(scale_reason_);
|
||||
observer_->AdaptDown(AdaptationObserverInterface::AdaptReason::kQuality);
|
||||
// If we've scaled down, wait longer before scaling up again.
|
||||
if (fast_rampup_) {
|
||||
fast_rampup_ = false;
|
||||
|
||||
@ -21,18 +21,21 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// An interface for a class that receives scale up/down requests.
|
||||
class ScalingObserverInterface {
|
||||
// An interface for signaling requests to limit or increase the resolution or
|
||||
// framerate of the captured video stream.
|
||||
class AdaptationObserverInterface {
|
||||
public:
|
||||
enum ScaleReason : size_t { kQuality = 0, kCpu = 1 };
|
||||
// Indicates if the adaptation is due to overuse of the CPU resources, or if
|
||||
// the quality of the encoded frames have dropped too low.
|
||||
enum AdaptReason : size_t { kQuality = 0, kCpu = 1 };
|
||||
static const size_t kScaleReasonSize = 2;
|
||||
// Called to signal that we can handle larger frames.
|
||||
virtual void ScaleUp(ScaleReason reason) = 0;
|
||||
// Called to signal that encoder to scale down.
|
||||
virtual void ScaleDown(ScaleReason reason) = 0;
|
||||
// Called to signal that we can handle larger or more frequent frames.
|
||||
virtual void AdaptUp(AdaptReason reason) = 0;
|
||||
// Called to signal that the source should reduce the resolution or framerate.
|
||||
virtual void AdaptDown(AdaptReason reason) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~ScalingObserverInterface() {}
|
||||
virtual ~AdaptationObserverInterface() {}
|
||||
};
|
||||
|
||||
// QualityScaler runs asynchronously and monitors QP values of encoded frames.
|
||||
@ -43,9 +46,10 @@ class QualityScaler {
|
||||
// Construct a QualityScaler with a given |observer|.
|
||||
// This starts the quality scaler periodically checking what the average QP
|
||||
// has been recently.
|
||||
QualityScaler(ScalingObserverInterface* observer, VideoCodecType codec_type);
|
||||
QualityScaler(AdaptationObserverInterface* observer,
|
||||
VideoCodecType codec_type);
|
||||
// If specific thresholds are desired these can be supplied as |thresholds|.
|
||||
QualityScaler(ScalingObserverInterface* observer,
|
||||
QualityScaler(AdaptationObserverInterface* observer,
|
||||
VideoEncoder::QpThresholds thresholds);
|
||||
virtual ~QualityScaler();
|
||||
// Should be called each time the encoder drops a frame
|
||||
@ -55,7 +59,7 @@ class QualityScaler {
|
||||
|
||||
// The following members declared protected for testing purposes
|
||||
protected:
|
||||
QualityScaler(ScalingObserverInterface* observer,
|
||||
QualityScaler(AdaptationObserverInterface* observer,
|
||||
VideoEncoder::QpThresholds thresholds,
|
||||
int64_t sampling_period);
|
||||
|
||||
@ -68,7 +72,7 @@ class QualityScaler {
|
||||
int64_t GetSamplingPeriodMs() const;
|
||||
|
||||
CheckQPTask* check_qp_task_ GUARDED_BY(&task_checker_);
|
||||
ScalingObserverInterface* const observer_ GUARDED_BY(&task_checker_);
|
||||
AdaptationObserverInterface* const observer_ GUARDED_BY(&task_checker_);
|
||||
rtc::SequencedTaskChecker task_checker_;
|
||||
|
||||
const int64_t sampling_period_ms_;
|
||||
|
||||
@ -26,29 +26,29 @@ static const int kHighQp = 40;
|
||||
static const size_t kDefaultTimeoutMs = 150;
|
||||
} // namespace
|
||||
|
||||
class MockScaleObserver : public ScalingObserverInterface {
|
||||
class MockAdaptationObserver : public AdaptationObserverInterface {
|
||||
public:
|
||||
MockScaleObserver() : event(false, false) {}
|
||||
virtual ~MockScaleObserver() {}
|
||||
MockAdaptationObserver() : event(false, false) {}
|
||||
virtual ~MockAdaptationObserver() {}
|
||||
|
||||
void ScaleUp(ScaleReason r) override {
|
||||
scaled_up++;
|
||||
void AdaptUp(AdaptReason r) override {
|
||||
adapt_up_events_++;
|
||||
event.Set();
|
||||
}
|
||||
void ScaleDown(ScaleReason r) override {
|
||||
scaled_down++;
|
||||
void AdaptDown(AdaptReason r) override {
|
||||
adapt_down_events_++;
|
||||
event.Set();
|
||||
}
|
||||
|
||||
rtc::Event event;
|
||||
int scaled_up = 0;
|
||||
int scaled_down = 0;
|
||||
int adapt_up_events_ = 0;
|
||||
int adapt_down_events_ = 0;
|
||||
};
|
||||
|
||||
// Pass a lower sampling period to speed up the tests.
|
||||
class QualityScalerUnderTest : public QualityScaler {
|
||||
public:
|
||||
explicit QualityScalerUnderTest(ScalingObserverInterface* observer,
|
||||
explicit QualityScalerUnderTest(AdaptationObserverInterface* observer,
|
||||
VideoEncoder::QpThresholds thresholds)
|
||||
: QualityScaler(observer, thresholds, 5) {}
|
||||
};
|
||||
@ -64,7 +64,7 @@ class QualityScalerTest : public ::testing::Test {
|
||||
|
||||
QualityScalerTest()
|
||||
: q_(new rtc::TaskQueue("QualityScalerTestQueue")),
|
||||
observer_(new MockScaleObserver()) {
|
||||
observer_(new MockAdaptationObserver()) {
|
||||
rtc::Event event(false, false);
|
||||
q_->PostTask([this, &event] {
|
||||
qs_ = std::unique_ptr<QualityScaler>(new QualityScalerUnderTest(
|
||||
@ -105,28 +105,28 @@ class QualityScalerTest : public ::testing::Test {
|
||||
|
||||
std::unique_ptr<rtc::TaskQueue> q_;
|
||||
std::unique_ptr<QualityScaler> qs_;
|
||||
std::unique_ptr<MockScaleObserver> observer_;
|
||||
std::unique_ptr<MockAdaptationObserver> observer_;
|
||||
};
|
||||
|
||||
#define DISABLED_TEST(basename, test) TEST_F(basename, DISABLED_##test)
|
||||
DISABLED_TEST(QualityScalerTest, DownscalesAfterContinuousFramedrop) {
|
||||
q_->PostTask([this] { TriggerScale(kScaleDown); });
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1, observer_->scaled_down);
|
||||
EXPECT_EQ(1, observer_->adapt_down_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, KeepsScaleAtHighQp) {
|
||||
q_->PostTask([this] { TriggerScale(kKeepScaleAtHighQp); });
|
||||
EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(0, observer_->scaled_down);
|
||||
EXPECT_EQ(0, observer_->scaled_up);
|
||||
EXPECT_EQ(0, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(0, observer_->adapt_up_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, DownscalesAboveHighQp) {
|
||||
q_->PostTask([this] { TriggerScale(kScaleDownAboveHighQp); });
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1, observer_->scaled_down);
|
||||
EXPECT_EQ(0, observer_->scaled_up);
|
||||
EXPECT_EQ(1, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(0, observer_->adapt_up_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
|
||||
@ -136,15 +136,15 @@ DISABLED_TEST(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
|
||||
qs_->ReportQP(kHighQp);
|
||||
});
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1, observer_->scaled_down);
|
||||
EXPECT_EQ(0, observer_->scaled_up);
|
||||
EXPECT_EQ(1, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(0, observer_->adapt_up_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, DoesNotDownscaleOnNormalQp) {
|
||||
q_->PostTask([this] { TriggerScale(kScaleDownAboveHighQp); });
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1, observer_->scaled_down);
|
||||
EXPECT_EQ(0, observer_->scaled_up);
|
||||
EXPECT_EQ(1, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(0, observer_->adapt_up_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) {
|
||||
@ -153,26 +153,26 @@ DISABLED_TEST(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) {
|
||||
qs_->ReportQP(kHighQp);
|
||||
});
|
||||
EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(0, observer_->scaled_down);
|
||||
EXPECT_EQ(0, observer_->scaled_up);
|
||||
EXPECT_EQ(0, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(0, observer_->adapt_up_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, UpscalesAfterLowQp) {
|
||||
q_->PostTask([this] { TriggerScale(kScaleUp); });
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(0, observer_->scaled_down);
|
||||
EXPECT_EQ(1, observer_->scaled_up);
|
||||
EXPECT_EQ(0, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(1, observer_->adapt_up_events_);
|
||||
}
|
||||
|
||||
DISABLED_TEST(QualityScalerTest, ScalesDownAndBackUp) {
|
||||
q_->PostTask([this] { TriggerScale(kScaleDown); });
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1, observer_->scaled_down);
|
||||
EXPECT_EQ(0, observer_->scaled_up);
|
||||
EXPECT_EQ(1, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(0, observer_->adapt_up_events_);
|
||||
q_->PostTask([this] { TriggerScale(kScaleUp); });
|
||||
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
|
||||
EXPECT_EQ(1, observer_->scaled_down);
|
||||
EXPECT_EQ(1, observer_->scaled_up);
|
||||
EXPECT_EQ(1, observer_->adapt_down_events_);
|
||||
EXPECT_EQ(1, observer_->adapt_up_events_);
|
||||
}
|
||||
#undef DISABLED_TEST
|
||||
} // namespace webrtc
|
||||
|
||||
@ -248,6 +248,7 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
|
||||
} // namespace
|
||||
|
||||
FrameForwarder::FrameForwarder() : sink_(nullptr) {}
|
||||
FrameForwarder::~FrameForwarder() {}
|
||||
|
||||
void FrameForwarder::IncomingCapturedFrame(const VideoFrame& video_frame) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
|
||||
@ -29,12 +29,13 @@ namespace test {
|
||||
class FrameForwarder : public rtc::VideoSourceInterface<VideoFrame> {
|
||||
public:
|
||||
FrameForwarder();
|
||||
virtual ~FrameForwarder();
|
||||
// Forwards |video_frame| to the registered |sink_|.
|
||||
void IncomingCapturedFrame(const VideoFrame& video_frame);
|
||||
virtual void IncomingCapturedFrame(const VideoFrame& video_frame);
|
||||
rtc::VideoSinkWants sink_wants() const;
|
||||
bool has_sinks() const;
|
||||
|
||||
private:
|
||||
protected:
|
||||
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
|
||||
const rtc::VideoSinkWants& wants) override;
|
||||
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
|
||||
|
||||
@ -48,7 +48,7 @@ const int kMaxOverusesBeforeApplyRampupDelay = 4;
|
||||
const float kSampleDiffMs = 33.0f;
|
||||
const float kMaxExp = 7.0f;
|
||||
|
||||
const auto kScaleReasonCpu = ScalingObserverInterface::ScaleReason::kCpu;
|
||||
const auto kScaleReasonCpu = AdaptationObserverInterface::AdaptReason::kCpu;
|
||||
} // namespace
|
||||
|
||||
CpuOveruseOptions::CpuOveruseOptions()
|
||||
@ -203,7 +203,7 @@ class OveruseFrameDetector::CheckOveruseTask : public rtc::QueuedTask {
|
||||
|
||||
OveruseFrameDetector::OveruseFrameDetector(
|
||||
const CpuOveruseOptions& options,
|
||||
ScalingObserverInterface* observer,
|
||||
AdaptationObserverInterface* observer,
|
||||
EncodedFrameObserver* encoder_timing,
|
||||
CpuOveruseMetricsObserver* metrics_observer)
|
||||
: check_overuse_task_(nullptr),
|
||||
@ -376,13 +376,13 @@ void OveruseFrameDetector::CheckForOveruse() {
|
||||
++num_overuse_detections_;
|
||||
|
||||
if (observer_)
|
||||
observer_->ScaleDown(kScaleReasonCpu);
|
||||
observer_->AdaptDown(kScaleReasonCpu);
|
||||
} else if (IsUnderusing(*metrics_, now_ms)) {
|
||||
last_rampup_time_ms_ = now_ms;
|
||||
in_quick_rampup_ = true;
|
||||
|
||||
if (observer_)
|
||||
observer_->ScaleUp(kScaleReasonCpu);
|
||||
observer_->AdaptUp(kScaleReasonCpu);
|
||||
}
|
||||
|
||||
int rampup_delay =
|
||||
|
||||
@ -65,7 +65,7 @@ class CpuOveruseMetricsObserver {
|
||||
class OveruseFrameDetector {
|
||||
public:
|
||||
OveruseFrameDetector(const CpuOveruseOptions& options,
|
||||
ScalingObserverInterface* overuse_observer,
|
||||
AdaptationObserverInterface* overuse_observer,
|
||||
EncodedFrameObserver* encoder_timing_,
|
||||
CpuOveruseMetricsObserver* metrics_observer);
|
||||
~OveruseFrameDetector();
|
||||
@ -117,7 +117,7 @@ class OveruseFrameDetector {
|
||||
const CpuOveruseOptions options_;
|
||||
|
||||
// Observer getting overuse reports.
|
||||
ScalingObserverInterface* const observer_;
|
||||
AdaptationObserverInterface* const observer_;
|
||||
EncodedFrameObserver* const encoder_timing_;
|
||||
|
||||
// Stats metrics.
|
||||
|
||||
@ -30,24 +30,24 @@ namespace {
|
||||
const int kProcessTimeUs = 5 * rtc::kNumMicrosecsPerMillisec;
|
||||
} // namespace
|
||||
|
||||
class MockCpuOveruseObserver : public ScalingObserverInterface {
|
||||
class MockCpuOveruseObserver : public AdaptationObserverInterface {
|
||||
public:
|
||||
MockCpuOveruseObserver() {}
|
||||
virtual ~MockCpuOveruseObserver() {}
|
||||
|
||||
MOCK_METHOD1(ScaleUp, void(ScaleReason));
|
||||
MOCK_METHOD1(ScaleDown, void(ScaleReason));
|
||||
MOCK_METHOD1(AdaptUp, void(AdaptReason));
|
||||
MOCK_METHOD1(AdaptDown, void(AdaptReason));
|
||||
};
|
||||
|
||||
class CpuOveruseObserverImpl : public ScalingObserverInterface {
|
||||
class CpuOveruseObserverImpl : public AdaptationObserverInterface {
|
||||
public:
|
||||
CpuOveruseObserverImpl() :
|
||||
overuse_(0),
|
||||
normaluse_(0) {}
|
||||
virtual ~CpuOveruseObserverImpl() {}
|
||||
|
||||
void ScaleDown(ScaleReason) { ++overuse_; }
|
||||
void ScaleUp(ScaleReason) { ++normaluse_; }
|
||||
void AdaptDown(AdaptReason) { ++overuse_; }
|
||||
void AdaptUp(AdaptReason) { ++normaluse_; }
|
||||
|
||||
int overuse_;
|
||||
int normaluse_;
|
||||
@ -56,7 +56,7 @@ class CpuOveruseObserverImpl : public ScalingObserverInterface {
|
||||
class OveruseFrameDetectorUnderTest : public OveruseFrameDetector {
|
||||
public:
|
||||
OveruseFrameDetectorUnderTest(const CpuOveruseOptions& options,
|
||||
ScalingObserverInterface* overuse_observer,
|
||||
AdaptationObserverInterface* overuse_observer,
|
||||
EncodedFrameObserver* encoder_timing,
|
||||
CpuOveruseMetricsObserver* metrics_observer)
|
||||
: OveruseFrameDetector(options,
|
||||
@ -145,7 +145,7 @@ class OveruseFrameDetectorTest : public ::testing::Test,
|
||||
std::unique_ptr<OveruseFrameDetectorUnderTest> overuse_detector_;
|
||||
CpuOveruseMetrics metrics_;
|
||||
|
||||
static const auto reason_ = ScalingObserverInterface::ScaleReason::kCpu;
|
||||
static const auto reason_ = AdaptationObserverInterface::AdaptReason::kCpu;
|
||||
};
|
||||
|
||||
|
||||
@ -153,33 +153,33 @@ class OveruseFrameDetectorTest : public ::testing::Test,
|
||||
// UsagePercent() < low_encode_usage_threshold_percent => underuse.
|
||||
TEST_F(OveruseFrameDetectorTest, TriggerOveruse) {
|
||||
// usage > high => overuse
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(1);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
|
||||
TriggerOveruse(options_.high_threshold_consecutive_count);
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, OveruseAndRecover) {
|
||||
// usage > high => overuse
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(1);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
|
||||
TriggerOveruse(options_.high_threshold_consecutive_count);
|
||||
// usage < low => underuse
|
||||
EXPECT_CALL(*(observer_.get()), ScaleUp(reason_)).Times(testing::AtLeast(1));
|
||||
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(testing::AtLeast(1));
|
||||
TriggerUnderuse();
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, OveruseAndRecoverWithNoObserver) {
|
||||
overuse_detector_.reset(new OveruseFrameDetectorUnderTest(
|
||||
options_, nullptr, nullptr, this));
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(0);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
|
||||
TriggerOveruse(options_.high_threshold_consecutive_count);
|
||||
EXPECT_CALL(*(observer_.get()), ScaleUp(reason_)).Times(0);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(0);
|
||||
TriggerUnderuse();
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, DoubleOveruseAndRecover) {
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(2);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(2);
|
||||
TriggerOveruse(options_.high_threshold_consecutive_count);
|
||||
TriggerOveruse(options_.high_threshold_consecutive_count);
|
||||
EXPECT_CALL(*(observer_.get()), ScaleUp(reason_)).Times(testing::AtLeast(1));
|
||||
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(testing::AtLeast(1));
|
||||
TriggerUnderuse();
|
||||
}
|
||||
|
||||
@ -199,22 +199,22 @@ TEST_F(OveruseFrameDetectorTest, TriggerUnderuseWithMinProcessCount) {
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, ConstantOveruseGivesNoNormalUsage) {
|
||||
EXPECT_CALL(*(observer_.get()), ScaleUp(reason_)).Times(0);
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(64);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_)).Times(0);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(64);
|
||||
for (size_t i = 0; i < 64; ++i) {
|
||||
TriggerOveruse(options_.high_threshold_consecutive_count);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, ConsecutiveCountTriggersOveruse) {
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(1);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(1);
|
||||
options_.high_threshold_consecutive_count = 2;
|
||||
ReinitializeOveruseDetector();
|
||||
TriggerOveruse(2);
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) {
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_)).Times(0);
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_)).Times(0);
|
||||
options_.high_threshold_consecutive_count = 2;
|
||||
ReinitializeOveruseDetector();
|
||||
TriggerOveruse(1);
|
||||
@ -281,7 +281,7 @@ TEST_F(OveruseFrameDetectorTest, InitialProcessingUsage) {
|
||||
}
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) {
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_))
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_))
|
||||
.Times(testing::AtLeast(1));
|
||||
static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
|
||||
static const size_t kNumFramesEncodingDelay = 3;
|
||||
@ -303,7 +303,7 @@ TEST_F(OveruseFrameDetectorTest, MeasuresMultipleConcurrentSamples) {
|
||||
|
||||
TEST_F(OveruseFrameDetectorTest, UpdatesExistingSamples) {
|
||||
// >85% encoding time should trigger overuse.
|
||||
EXPECT_CALL(*(observer_.get()), ScaleDown(reason_))
|
||||
EXPECT_CALL(*(observer_.get()), AdaptDown(reason_))
|
||||
.Times(testing::AtLeast(1));
|
||||
static const int kIntervalUs = 33 * rtc::kNumMicrosecsPerMillisec;
|
||||
static const int kDelayUs = 30 * rtc::kNumMicrosecsPerMillisec;
|
||||
@ -337,7 +337,7 @@ TEST_F(OveruseFrameDetectorTest, RunOnTqNormalUsage) {
|
||||
|
||||
// Expect NormalUsage(). When called, stop the |overuse_detector_| and then
|
||||
// set |event| to end the test.
|
||||
EXPECT_CALL(*(observer_.get()), ScaleUp(reason_))
|
||||
EXPECT_CALL(*(observer_.get()), AdaptUp(reason_))
|
||||
.WillOnce(InvokeWithoutArgs([this, &event] {
|
||||
overuse_detector_->StopCheckForOveruse();
|
||||
event.Set();
|
||||
|
||||
@ -14,7 +14,6 @@
|
||||
#include <limits>
|
||||
#include <utility>
|
||||
|
||||
#include "webrtc/modules/video_coding/include/video_codec_initializer.h"
|
||||
#include "webrtc/base/arraysize.h"
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
@ -23,6 +22,7 @@
|
||||
#include "webrtc/common_video/include/video_bitrate_allocator.h"
|
||||
#include "webrtc/modules/pacing/paced_sender.h"
|
||||
#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
|
||||
#include "webrtc/modules/video_coding/include/video_codec_initializer.h"
|
||||
#include "webrtc/modules/video_coding/include/video_coding.h"
|
||||
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
|
||||
#include "webrtc/video/overuse_frame_detector.h"
|
||||
@ -31,6 +31,8 @@
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
using DegradationPreference = VideoSendStream::DegradationPreference;
|
||||
|
||||
// Time interval for logging frame counts.
|
||||
const int64_t kFrameLogIntervalMs = 60000;
|
||||
// We will never ask for a resolution lower than this.
|
||||
@ -134,13 +136,11 @@ class ViEEncoder::VideoSourceProxy {
|
||||
public:
|
||||
explicit VideoSourceProxy(ViEEncoder* vie_encoder)
|
||||
: vie_encoder_(vie_encoder),
|
||||
degradation_preference_(
|
||||
VideoSendStream::DegradationPreference::kMaintainResolution),
|
||||
degradation_preference_(DegradationPreference::kMaintainResolution),
|
||||
source_(nullptr) {}
|
||||
|
||||
void SetSource(
|
||||
rtc::VideoSourceInterface<VideoFrame>* source,
|
||||
const VideoSendStream::DegradationPreference& degradation_preference) {
|
||||
void SetSource(rtc::VideoSourceInterface<VideoFrame>* source,
|
||||
const DegradationPreference& degradation_preference) {
|
||||
// Called on libjingle's worker thread.
|
||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&main_checker_);
|
||||
rtc::VideoSourceInterface<VideoFrame>* old_source = nullptr;
|
||||
@ -199,8 +199,7 @@ class ViEEncoder::VideoSourceProxy {
|
||||
if (!IsResolutionScalingEnabledLocked()) {
|
||||
// This can happen since |degradation_preference_| is set on
|
||||
// libjingle's worker thread but the adaptation is done on the encoder
|
||||
// task
|
||||
// queue.
|
||||
// task queue.
|
||||
return;
|
||||
}
|
||||
// The input video frame size will have a resolution with "one step up"
|
||||
@ -216,7 +215,7 @@ class ViEEncoder::VideoSourceProxy {
|
||||
bool IsResolutionScalingEnabledLocked() const
|
||||
EXCLUSIVE_LOCKS_REQUIRED(&crit_) {
|
||||
return degradation_preference_ !=
|
||||
VideoSendStream::DegradationPreference::kMaintainResolution;
|
||||
DegradationPreference::kMaintainResolution;
|
||||
}
|
||||
|
||||
const rtc::VideoSinkWants& current_wants() const
|
||||
@ -230,8 +229,7 @@ class ViEEncoder::VideoSourceProxy {
|
||||
ViEEncoder* const vie_encoder_;
|
||||
rtc::VideoSinkWants sink_wants_ GUARDED_BY(&crit_);
|
||||
rtc::VideoSinkWants disabled_scaling_sink_wants_ GUARDED_BY(&crit_);
|
||||
VideoSendStream::DegradationPreference degradation_preference_
|
||||
GUARDED_BY(&crit_);
|
||||
DegradationPreference degradation_preference_ GUARDED_BY(&crit_);
|
||||
rtc::VideoSourceInterface<VideoFrame>* source_ GUARDED_BY(&crit_);
|
||||
|
||||
RTC_DISALLOW_COPY_AND_ASSIGN(VideoSourceProxy);
|
||||
@ -269,6 +267,7 @@ ViEEncoder::ViEEncoder(uint32_t number_of_cores,
|
||||
picture_id_rpsi_(0),
|
||||
clock_(Clock::GetRealTimeClock()),
|
||||
scale_counter_(kScaleReasonSize, 0),
|
||||
degradation_preference_(DegradationPreference::kMaintainResolution),
|
||||
last_captured_timestamp_(0),
|
||||
delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
|
||||
clock_->TimeInMilliseconds()),
|
||||
@ -293,7 +292,7 @@ ViEEncoder::~ViEEncoder() {
|
||||
|
||||
void ViEEncoder::Stop() {
|
||||
RTC_DCHECK_RUN_ON(&thread_checker_);
|
||||
source_proxy_->SetSource(nullptr, VideoSendStream::DegradationPreference());
|
||||
source_proxy_->SetSource(nullptr, DegradationPreference());
|
||||
encoder_queue_.PostTask([this] {
|
||||
RTC_DCHECK_RUN_ON(&encoder_queue_);
|
||||
overuse_detector_.StopCheckForOveruse();
|
||||
@ -338,10 +337,12 @@ void ViEEncoder::SetSource(
|
||||
source_proxy_->SetSource(source, degradation_preference);
|
||||
encoder_queue_.PostTask([this, degradation_preference] {
|
||||
RTC_DCHECK_RUN_ON(&encoder_queue_);
|
||||
scaling_enabled_ = (degradation_preference !=
|
||||
VideoSendStream::DegradationPreference::kMaintainResolution);
|
||||
|
||||
degradation_preference_ = degradation_preference;
|
||||
stats_proxy_->SetResolutionRestrictionStats(
|
||||
scaling_enabled_, scale_counter_[kCpu] > 0, scale_counter_[kQuality]);
|
||||
degradation_preference !=
|
||||
VideoSendStream::DegradationPreference::kMaintainResolution,
|
||||
scale_counter_[kCpu] > 0, scale_counter_[kQuality]);
|
||||
});
|
||||
}
|
||||
|
||||
@ -437,7 +438,8 @@ void ViEEncoder::ReconfigureEncoder() {
|
||||
std::move(streams), encoder_config_.min_transmit_bitrate_bps);
|
||||
|
||||
const auto scaling_settings = settings_.encoder->GetScalingSettings();
|
||||
if (scaling_enabled_ && scaling_settings.enabled) {
|
||||
if (degradation_preference_ != DegradationPreference::kMaintainResolution &&
|
||||
scaling_settings.enabled) {
|
||||
if (scaling_settings.thresholds) {
|
||||
quality_scaler_.reset(
|
||||
new QualityScaler(this, *(scaling_settings.thresholds)));
|
||||
@ -701,9 +703,9 @@ void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps,
|
||||
}
|
||||
}
|
||||
|
||||
void ViEEncoder::ScaleDown(ScaleReason reason) {
|
||||
void ViEEncoder::AdaptDown(AdaptReason reason) {
|
||||
RTC_DCHECK_RUN_ON(&encoder_queue_);
|
||||
if (!scaling_enabled_)
|
||||
if (degradation_preference_ != DegradationPreference::kBalanced)
|
||||
return;
|
||||
// Request lower resolution if the current resolution is lower than last time
|
||||
// we asked for the resolution to be lowered.
|
||||
@ -734,10 +736,12 @@ void ViEEncoder::ScaleDown(ScaleReason reason) {
|
||||
}
|
||||
}
|
||||
|
||||
void ViEEncoder::ScaleUp(ScaleReason reason) {
|
||||
void ViEEncoder::AdaptUp(AdaptReason reason) {
|
||||
RTC_DCHECK_RUN_ON(&encoder_queue_);
|
||||
if (scale_counter_[reason] == 0 || !scaling_enabled_)
|
||||
if (scale_counter_[reason] == 0 ||
|
||||
degradation_preference_ != DegradationPreference::kBalanced) {
|
||||
return;
|
||||
}
|
||||
// Only scale if resolution is higher than last time
|
||||
// we requested higher resolution.
|
||||
int current_pixel_count =
|
||||
|
||||
@ -50,7 +50,7 @@ class VideoBitrateAllocationObserver;
|
||||
class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
|
||||
public EncodedImageCallback,
|
||||
public VCMSendStatisticsCallback,
|
||||
public ScalingObserverInterface {
|
||||
public AdaptationObserverInterface {
|
||||
public:
|
||||
// Interface for receiving encoded video frames and notifications about
|
||||
// configuration changes.
|
||||
@ -120,8 +120,8 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
|
||||
|
||||
// webrtc::ScalingObserverInterface implementation.
|
||||
// These methods are protected for easier testing.
|
||||
void ScaleUp(ScaleReason reason) override;
|
||||
void ScaleDown(ScaleReason reason) override;
|
||||
void AdaptUp(AdaptReason reason) override;
|
||||
void AdaptDown(AdaptReason reason) override;
|
||||
|
||||
private:
|
||||
class ConfigureEncoderTask;
|
||||
@ -215,7 +215,8 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
|
||||
// restricted, and if so, why.
|
||||
std::vector<int> scale_counter_ ACCESS_ON(&encoder_queue_);
|
||||
// Set depending on degradation preferences
|
||||
bool scaling_enabled_ ACCESS_ON(&encoder_queue_) = false;
|
||||
VideoSendStream::DegradationPreference degradation_preference_
|
||||
ACCESS_ON(&encoder_queue_);
|
||||
|
||||
// Pixel count last time the resolution was requested to be changed down.
|
||||
rtc::Optional<int> max_pixel_count_ ACCESS_ON(&encoder_queue_);
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
|
||||
#include "webrtc/api/video/i420_buffer.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/media/base/videoadapter.h"
|
||||
#include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h"
|
||||
#include "webrtc/system_wrappers/include/metrics_default.h"
|
||||
#include "webrtc/system_wrappers/include/sleep.h"
|
||||
@ -37,7 +38,7 @@ const int kMinPixelsPerFrame = 120 * 90;
|
||||
namespace webrtc {
|
||||
|
||||
using DegredationPreference = VideoSendStream::DegradationPreference;
|
||||
using ScaleReason = ScalingObserverInterface::ScaleReason;
|
||||
using ScaleReason = AdaptationObserverInterface::AdaptReason;
|
||||
using ::testing::_;
|
||||
using ::testing::Return;
|
||||
|
||||
@ -69,22 +70,22 @@ class ViEEncoderUnderTest : public ViEEncoder {
|
||||
nullptr /* pre_encode_callback */,
|
||||
nullptr /* encoder_timing */) {}
|
||||
|
||||
void PostTaskAndWait(bool down, ScaleReason reason) {
|
||||
void PostTaskAndWait(bool down, AdaptReason reason) {
|
||||
rtc::Event event(false, false);
|
||||
encoder_queue()->PostTask([this, &event, reason, down] {
|
||||
down ? ScaleDown(reason) : ScaleUp(reason);
|
||||
down ? AdaptDown(reason) : AdaptUp(reason);
|
||||
event.Set();
|
||||
});
|
||||
RTC_DCHECK(event.Wait(5000));
|
||||
}
|
||||
|
||||
void TriggerCpuOveruse() { PostTaskAndWait(true, ScaleReason::kCpu); }
|
||||
void TriggerCpuOveruse() { PostTaskAndWait(true, AdaptReason::kCpu); }
|
||||
|
||||
void TriggerCpuNormalUsage() { PostTaskAndWait(false, ScaleReason::kCpu); }
|
||||
void TriggerCpuNormalUsage() { PostTaskAndWait(false, AdaptReason::kCpu); }
|
||||
|
||||
void TriggerQualityLow() { PostTaskAndWait(true, ScaleReason::kQuality); }
|
||||
void TriggerQualityLow() { PostTaskAndWait(true, AdaptReason::kQuality); }
|
||||
|
||||
void TriggerQualityHigh() { PostTaskAndWait(false, ScaleReason::kQuality); }
|
||||
void TriggerQualityHigh() { PostTaskAndWait(false, AdaptReason::kQuality); }
|
||||
};
|
||||
|
||||
class VideoStreamFactory
|
||||
@ -110,6 +111,52 @@ class VideoStreamFactory
|
||||
const size_t num_temporal_layers_;
|
||||
};
|
||||
|
||||
class AdaptingFrameForwarder : public test::FrameForwarder {
|
||||
public:
|
||||
AdaptingFrameForwarder() : adaptation_enabled_(false) {}
|
||||
virtual ~AdaptingFrameForwarder() {}
|
||||
|
||||
void set_adaptation_enabled(bool enabled) {
|
||||
rtc::CritScope cs(&crit_);
|
||||
adaptation_enabled_ = enabled;
|
||||
}
|
||||
|
||||
bool adaption_enabled() {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return adaptation_enabled_;
|
||||
}
|
||||
|
||||
void IncomingCapturedFrame(const VideoFrame& video_frame) override {
|
||||
int cropped_width = 0;
|
||||
int cropped_height = 0;
|
||||
int out_width = 0;
|
||||
int out_height = 0;
|
||||
if (adaption_enabled() &&
|
||||
adapter_.AdaptFrameResolution(video_frame.width(), video_frame.height(),
|
||||
video_frame.timestamp_us() * 1000,
|
||||
&cropped_width, &cropped_height,
|
||||
&out_width, &out_height)) {
|
||||
VideoFrame adapted_frame(
|
||||
new rtc::RefCountedObject<TestBuffer>(nullptr, out_width, out_height),
|
||||
99, 99, kVideoRotation_0);
|
||||
adapted_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
|
||||
test::FrameForwarder::IncomingCapturedFrame(adapted_frame);
|
||||
} else {
|
||||
test::FrameForwarder::IncomingCapturedFrame(video_frame);
|
||||
}
|
||||
}
|
||||
|
||||
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
|
||||
const rtc::VideoSinkWants& wants) override {
|
||||
rtc::CritScope cs(&crit_);
|
||||
adapter_.OnResolutionRequest(wants.max_pixel_count,
|
||||
wants.max_pixel_count_step_up);
|
||||
test::FrameForwarder::AddOrUpdateSink(sink, wants);
|
||||
}
|
||||
|
||||
cricket::VideoAdapter adapter_;
|
||||
bool adaptation_enabled_ GUARDED_BY(crit_);
|
||||
};
|
||||
} // namespace
|
||||
|
||||
class ViEEncoderTest : public ::testing::Test {
|
||||
@ -258,11 +305,25 @@ class ViEEncoderTest : public ::testing::Test {
|
||||
EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
|
||||
{
|
||||
rtc::CritScope lock(&crit_);
|
||||
timestamp = timestamp_;
|
||||
timestamp = last_timestamp_;
|
||||
}
|
||||
test_encoder_->CheckLastTimeStampsMatch(expected_ntp_time, timestamp);
|
||||
}
|
||||
|
||||
void WaitForEncodedFrame(uint32_t expected_width,
|
||||
uint32_t expected_height) {
|
||||
uint32_t width = 0;
|
||||
uint32_t height = 0;
|
||||
EXPECT_TRUE(encoded_frame_event_.Wait(kDefaultTimeoutMs));
|
||||
{
|
||||
rtc::CritScope lock(&crit_);
|
||||
width = last_width_;
|
||||
height = last_height_;
|
||||
}
|
||||
EXPECT_EQ(expected_height, height);
|
||||
EXPECT_EQ(expected_width, width);
|
||||
}
|
||||
|
||||
void SetExpectNoFrames() {
|
||||
rtc::CritScope lock(&crit_);
|
||||
expect_frames_ = false;
|
||||
@ -285,9 +346,11 @@ class ViEEncoderTest : public ::testing::Test {
|
||||
const RTPFragmentationHeader* fragmentation) override {
|
||||
rtc::CritScope lock(&crit_);
|
||||
EXPECT_TRUE(expect_frames_);
|
||||
timestamp_ = encoded_image._timeStamp;
|
||||
last_timestamp_ = encoded_image._timeStamp;
|
||||
last_width_ = encoded_image._encodedWidth;
|
||||
last_height_ = encoded_image._encodedHeight;
|
||||
encoded_frame_event_.Set();
|
||||
return Result(Result::OK, timestamp_);
|
||||
return Result(Result::OK, last_timestamp_);
|
||||
}
|
||||
|
||||
void OnEncoderConfigurationChanged(std::vector<VideoStream> streams,
|
||||
@ -300,7 +363,9 @@ class ViEEncoderTest : public ::testing::Test {
|
||||
rtc::CriticalSection crit_;
|
||||
TestEncoder* test_encoder_;
|
||||
rtc::Event encoded_frame_event_;
|
||||
uint32_t timestamp_ = 0;
|
||||
uint32_t last_timestamp_ = 0;
|
||||
uint32_t last_height_ = 0;
|
||||
uint32_t last_width_ = 0;
|
||||
bool expect_frames_ = true;
|
||||
int number_of_reconfigurations_ = 0;
|
||||
int min_transmit_bitrate_bps_ = 0;
|
||||
@ -313,7 +378,7 @@ class ViEEncoderTest : public ::testing::Test {
|
||||
TestEncoder fake_encoder_;
|
||||
std::unique_ptr<SendStatisticsProxy> stats_proxy_;
|
||||
TestSink sink_;
|
||||
test::FrameForwarder video_source_;
|
||||
AdaptingFrameForwarder video_source_;
|
||||
std::unique_ptr<ViEEncoderUnderTest> vie_encoder_;
|
||||
};
|
||||
|
||||
@ -1029,4 +1094,32 @@ TEST_F(ViEEncoderTest, CallsBitrateObserver) {
|
||||
vie_encoder_->Stop();
|
||||
}
|
||||
|
||||
// TODO(sprang): Extend this with fps throttling and any "balanced" extensions.
|
||||
TEST_F(ViEEncoderTest, AdaptsResolutionOnOveruse) {
|
||||
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
|
||||
|
||||
const int kFrameWidth = 1280;
|
||||
const int kFrameHeight = 720;
|
||||
// Enabled default VideoAdapter downscaling. First step is 3/4, not 3/5 as
|
||||
// requested by ViEEncoder::VideoSourceProxy::RequestResolutionLowerThan().
|
||||
video_source_.set_adaptation_enabled(true);
|
||||
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(1, kFrameWidth, kFrameHeight));
|
||||
sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
|
||||
|
||||
// Trigger CPU overuse, downscale by 3/4.
|
||||
vie_encoder_->TriggerCpuOveruse();
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(2, kFrameWidth, kFrameHeight));
|
||||
sink_.WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
|
||||
|
||||
// Trigger CPU normal use, return to original resoluton;
|
||||
vie_encoder_->TriggerCpuNormalUsage();
|
||||
video_source_.IncomingCapturedFrame(
|
||||
CreateFrame(3, kFrameWidth, kFrameHeight));
|
||||
sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
|
||||
|
||||
vie_encoder_->Stop();
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user