Allow for framerate reduction for HW encoder.

R=pbos@webrtc.org, stefan@webrtc.org
TBR=glaznev@google.com

Review URL: https://webrtc-codereview.appspot.com/51159004 .

Cr-Commit-Position: refs/heads/master@{#9573}
This commit is contained in:
jackychen 2015-07-13 16:26:33 -07:00
parent 900996290c
commit 6e2ce6e1ae
21 changed files with 267 additions and 52 deletions

View File

@ -100,6 +100,8 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
void OnDroppedFrame() override;
int GetTargetFramerate() override;
private:
// CHECK-fail if not running on |codec_thread_|.
void CheckOnCodecThread();
@ -199,6 +201,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
scoped_ptr<webrtc::QualityScaler> quality_scaler_;
// Dynamic resolution change, off by default.
bool scale_;
int updated_framerate_;
};
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
@ -294,11 +297,12 @@ int32_t MediaCodecVideoEncoder::InitEncode(
ALOGD("InitEncode request");
scale_ = false;
if (codecType_ == kVideoCodecVP8) {
quality_scaler_->Init(kMaxQP / kLowQpThresholdDenominator);
if (scale_ && codecType_ == kVideoCodecVP8) {
quality_scaler_->Init(kMaxQP / kLowQpThresholdDenominator, true);
quality_scaler_->SetMinResolution(kMinWidth, kMinHeight);
quality_scaler_->ReportFramerate(codec_settings->maxFramerate);
}
updated_framerate_ = codec_settings->maxFramerate;
return codec_thread_->Invoke<int32_t>(
Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
this,
@ -337,8 +341,11 @@ int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
uint32_t frame_rate) {
if (codecType_ == kVideoCodecVP8)
if (scale_ && codecType_ == kVideoCodecVP8) {
quality_scaler_->ReportFramerate(frame_rate);
} else {
updated_framerate_ = frame_rate;
}
return codec_thread_->Invoke<int32_t>(
Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
this,
@ -493,9 +500,13 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
CHECK(frame_types->size() == 1) << "Unexpected stream count";
const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8)
? quality_scaler_->GetScaledFrame(frame)
: frame;
// Check framerate before spatial resolution change.
if (scale_ && codecType_ == kVideoCodecVP8) {
quality_scaler_->OnEncodeFrame(frame);
updated_framerate_ = quality_scaler_->GetTargetFramerate();
}
const VideoFrame& input_frame = (scale_ && codecType_ == kVideoCodecVP8) ?
quality_scaler_->GetScaledFrame(frame) : frame;
if (input_frame.width() != width_ || input_frame.height() != height_) {
ALOGD("Frame resolution change from %d x %d to %d x %d",
@ -506,8 +517,6 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
return WEBRTC_VIDEO_CODEC_OK;
}
bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
// Check if we accumulated too many frames in encoder input buffers
// or the encoder latency exceeds 70 ms and drop frame if so.
if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
@ -566,6 +575,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
render_times_ms_.push_back(input_frame.render_time_ms());
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_method_,
key_frame,
@ -712,7 +722,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
last_input_timestamp_ms_ - last_output_timestamp_ms_,
frame_encoding_time_ms);
if (payload_size && codecType_ == kVideoCodecVP8)
if (payload_size && scale_ && codecType_ == kVideoCodecVP8)
quality_scaler_->ReportQP(webrtc::vp8::GetQP(payload));
// Calculate and print encoding statistics - every 3 seconds.
@ -860,10 +870,14 @@ int32_t MediaCodecVideoEncoder::NextNaluPosition(
}
void MediaCodecVideoEncoder::OnDroppedFrame() {
if (codecType_ == kVideoCodecVP8)
if (scale_ && codecType_ == kVideoCodecVP8)
quality_scaler_->ReportDroppedFrame();
}
int MediaCodecVideoEncoder::GetTargetFramerate() {
return updated_framerate_;
}
MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);

View File

@ -504,4 +504,8 @@ void SimulcastEncoderAdapter::OnDroppedFrame() {
streaminfos_[0].encoder->OnDroppedFrame();
}
int SimulcastEncoderAdapter::GetTargetFramerate() {
return streaminfos_[0].encoder->GetTargetFramerate();
}
} // namespace webrtc

View File

@ -57,6 +57,8 @@ class SimulcastEncoderAdapter : public VP8Encoder {
void OnDroppedFrame() override;
int GetTargetFramerate() override;
private:
struct StreamInfo {
StreamInfo()

View File

@ -580,7 +580,8 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
}
rps_.Init();
quality_scaler_.Init(codec_.qpMax / QualityScaler::kDefaultLowQpDenominator);
quality_scaler_.Init(codec_.qpMax / QualityScaler::kDefaultLowQpDenominator,
false);
quality_scaler_.ReportFramerate(codec_.maxFramerate);
return InitAndSetControlSettings();
@ -709,6 +710,8 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
const bool use_quality_scaler = encoders_.size() == 1 &&
configurations_[0].rc_dropframe_thresh > 0 &&
codec_.codecSpecific.VP8.automaticResizeOn;
if (use_quality_scaler)
quality_scaler_.OnEncodeFrame(frame);
const VideoFrame& input_image =
use_quality_scaler ? quality_scaler_.GetScaledFrame(frame) : frame;

View File

@ -182,6 +182,8 @@ class VCMQMSettingsCallback {
const uint32_t width,
const uint32_t height) = 0;
virtual void SetTargetFramerate(int frame_rate) = 0;
protected:
virtual ~VCMQMSettingsCallback() {
}

View File

@ -214,6 +214,10 @@ bool VCMGenericEncoder::SupportsNativeHandle() const {
return encoder_->SupportsNativeHandle();
}
int VCMGenericEncoder::GetTargetFramerate() {
return encoder_->GetTargetFramerate();
}
/***************************
* Callback Implementation
***************************/

View File

@ -140,6 +140,8 @@ public:
bool SupportsNativeHandle() const;
int GetTargetFramerate();
private:
VideoEncoder* const encoder_;
VideoEncoderRateObserver* const rate_observer_;

View File

@ -363,6 +363,8 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
_nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
}
if (qm_settings_callback_)
qm_settings_callback_->SetTargetFramerate(_encoder->GetTargetFramerate());
return VCM_OK;
}

View File

@ -25,14 +25,16 @@ class QualityScaler {
};
QualityScaler();
void Init(int low_qp_threshold);
void Init(int low_qp_threshold, bool use_framerate_reduction);
void SetMinResolution(int min_width, int min_height);
void ReportFramerate(int framerate);
void ReportQP(int qp);
void ReportDroppedFrame();
void Reset(int framerate, int bitrate, int width, int height);
Resolution GetScaledResolution(const VideoFrame& frame);
void OnEncodeFrame(const VideoFrame& frame);
Resolution GetScaledResolution() const;
const VideoFrame& GetScaledFrame(const VideoFrame& frame);
int GetTargetFramerate() const;
private:
void AdjustScale(bool up);
@ -42,11 +44,16 @@ class QualityScaler {
VideoFrame scaled_frame_;
size_t num_samples_;
int framerate_;
int target_framerate_;
int low_qp_threshold_;
MovingAverage<int> framedrop_percent_;
MovingAverage<int> average_qp_;
Resolution res_;
int downscale_shift_;
int framerate_down_;
bool use_framerate_reduction_;
int min_width_;
int min_height_;
};

View File

@ -24,13 +24,16 @@ QualityScaler::QualityScaler()
: num_samples_(0),
low_qp_threshold_(-1),
downscale_shift_(0),
framerate_down_(false),
min_width_(kDefaultMinDownscaleDimension),
min_height_(kDefaultMinDownscaleDimension) {
}
void QualityScaler::Init(int low_qp_threshold) {
void QualityScaler::Init(int low_qp_threshold, bool use_framerate_reduction) {
ClearSamples();
low_qp_threshold_ = low_qp_threshold;
use_framerate_reduction_ = use_framerate_reduction;
target_framerate_ = -1;
}
void QualityScaler::SetMinResolution(int min_width, int min_height) {
@ -42,6 +45,7 @@ void QualityScaler::SetMinResolution(int min_width, int min_height) {
void QualityScaler::ReportFramerate(int framerate) {
num_samples_ = static_cast<size_t>(
kMeasureSeconds * (framerate < kMinFps ? kMinFps : framerate));
framerate_ = framerate;
}
void QualityScaler::ReportQP(int qp) {
@ -53,41 +57,65 @@ void QualityScaler::ReportDroppedFrame() {
framedrop_percent_.AddSample(100);
}
QualityScaler::Resolution QualityScaler::GetScaledResolution(
const VideoFrame& frame) {
void QualityScaler::OnEncodeFrame(const VideoFrame& frame) {
// Should be set through InitEncode -> Should be set by now.
assert(low_qp_threshold_ >= 0);
assert(num_samples_ > 0);
Resolution res;
res.width = frame.width();
res.height = frame.height();
res_.width = frame.width();
res_.height = frame.height();
// Update scale factor.
int avg_drop = 0;
int avg_qp = 0;
// When encoder consistently overshoots, framerate reduction and spatial
// resizing will be triggered to get a smoother video.
if (framedrop_percent_.GetAverage(num_samples_, &avg_drop) &&
avg_drop >= kFramedropPercentThreshold) {
AdjustScale(false);
// Reducing frame rate before spatial resolution change.
// Reduce frame rate only when it is above a certain number.
// Only one reduction is allowed for now.
// TODO(jackychen): Allow more than one framerate reduction.
if (use_framerate_reduction_ && !framerate_down_ && framerate_ >= 20) {
target_framerate_ = framerate_ / 2;
framerate_down_ = true;
// If frame rate has been updated, clear the buffer. We don't want
// spatial resolution to change right after frame rate change.
ClearSamples();
} else {
AdjustScale(false);
}
} else if (average_qp_.GetAverage(num_samples_, &avg_qp) &&
avg_qp <= low_qp_threshold_) {
AdjustScale(true);
if (use_framerate_reduction_ && framerate_down_) {
target_framerate_ = -1;
framerate_down_ = false;
ClearSamples();
} else {
AdjustScale(true);
}
}
assert(downscale_shift_ >= 0);
for (int shift = downscale_shift_;
shift > 0 && (res.width >> 1 >= min_width_) &&
(res.height >> 1 >= min_height_);
shift > 0 && (res_.width / 2 >= min_width_) &&
(res_.height / 2 >= min_height_);
--shift) {
res.width >>= 1;
res.height >>= 1;
res_.width /= 2;
res_.height /= 2;
}
}
return res;
QualityScaler::Resolution QualityScaler::GetScaledResolution() const {
return res_;
}
int QualityScaler::GetTargetFramerate() const {
return target_framerate_;
}
const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) {
Resolution res = GetScaledResolution(frame);
Resolution res = GetScaledResolution();
if (res.width == frame.width())
return frame;

View File

@ -25,18 +25,28 @@ static const int kMaxQp = 56;
} // namespace
class QualityScalerTest : public ::testing::Test {
public:
// Temporal and spatial resolution.
struct Resolution {
int framerate;
int width;
int height;
};
protected:
enum ScaleDirection { kScaleDown, kScaleUp };
enum BadQualityMetric { kDropFrame, kReportLowQP };
QualityScalerTest() {
input_frame_.CreateEmptyFrame(
kWidth, kHeight, kWidth, kHalfWidth, kHalfWidth);
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator);
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, false);
qs_.ReportFramerate(kFramerate);
qs_.OnEncodeFrame(input_frame_);
}
bool TriggerScale(ScaleDirection scale_direction) {
int initial_width = qs_.GetScaledResolution(input_frame_).width;
qs_.OnEncodeFrame(input_frame_);
int initial_width = qs_.GetScaledResolution().width;
for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
switch (scale_direction) {
case kScaleUp:
@ -46,8 +56,8 @@ class QualityScalerTest : public ::testing::Test {
qs_.ReportDroppedFrame();
break;
}
if (qs_.GetScaledResolution(input_frame_).width != initial_width)
qs_.OnEncodeFrame(input_frame_);
if (qs_.GetScaledResolution().width != initial_width)
return true;
}
@ -60,7 +70,8 @@ class QualityScalerTest : public ::testing::Test {
}
void ExpectScaleUsingReportedResolution() {
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
qs_.OnEncodeFrame(input_frame_);
QualityScaler::Resolution res = qs_.GetScaledResolution();
const VideoFrame& scaled_frame = qs_.GetScaledFrame(input_frame_);
EXPECT_EQ(res.width, scaled_frame.width());
EXPECT_EQ(res.height, scaled_frame.height());
@ -70,6 +81,14 @@ class QualityScalerTest : public ::testing::Test {
void DoesNotDownscaleFrameDimensions(int width, int height);
Resolution TriggerResolutionChange(BadQualityMetric dropframe_lowqp,
int num_second,
int initial_framerate);
void VerifyQualityAdaptation(int initial_framerate, int seconds,
bool expect_spatial_resize,
bool expect_framerate_reduction);
void DownscaleEndsAt(int input_width,
int input_height,
int end_width,
@ -84,7 +103,8 @@ TEST_F(QualityScalerTest, UsesOriginalFrameInitially) {
}
TEST_F(QualityScalerTest, ReportsOriginalResolutionInitially) {
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
qs_.OnEncodeFrame(input_frame_);
QualityScaler::Resolution res = qs_.GetScaledResolution();
EXPECT_EQ(input_frame_.width(), res.width);
EXPECT_EQ(input_frame_.height(), res.height);
}
@ -92,7 +112,7 @@ TEST_F(QualityScalerTest, ReportsOriginalResolutionInitially) {
TEST_F(QualityScalerTest, DownscalesAfterContinuousFramedrop) {
EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within " << kNumSeconds
<< " seconds.";
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
QualityScaler::Resolution res = qs_.GetScaledResolution();
EXPECT_LT(res.width, input_frame_.width());
EXPECT_LT(res.height, input_frame_.height());
}
@ -102,7 +122,8 @@ TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
qs_.ReportQP(kNormalQp);
qs_.ReportDroppedFrame();
qs_.ReportDroppedFrame();
if (qs_.GetScaledResolution(input_frame_).width < input_frame_.width())
qs_.OnEncodeFrame(input_frame_);
if (qs_.GetScaledResolution().width < input_frame_.width())
return;
}
@ -112,7 +133,8 @@ TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
TEST_F(QualityScalerTest, DoesNotDownscaleOnNormalQp) {
for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
qs_.ReportQP(kNormalQp);
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution(input_frame_).width)
qs_.OnEncodeFrame(input_frame_);
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width)
<< "Unexpected scale on half framedrop.";
}
}
@ -120,11 +142,13 @@ TEST_F(QualityScalerTest, DoesNotDownscaleOnNormalQp) {
TEST_F(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) {
for (int i = 0; i < kFramerate * kNumSeconds / 2; ++i) {
qs_.ReportQP(kNormalQp);
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution(input_frame_).width)
qs_.OnEncodeFrame(input_frame_);
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width)
<< "Unexpected scale on half framedrop.";
qs_.ReportDroppedFrame();
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution(input_frame_).width)
qs_.OnEncodeFrame(input_frame_);
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width)
<< "Unexpected scale on half framedrop.";
}
}
@ -139,7 +163,8 @@ void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
while (min_dimension >= 2 * QualityScaler::kDefaultMinDownscaleDimension) {
EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within "
<< kNumSeconds << " seconds.";
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
qs_.OnEncodeFrame(input_frame_);
QualityScaler::Resolution res = qs_.GetScaledResolution();
min_dimension = res.width < res.height ? res.width : res.height;
++current_shift;
ASSERT_EQ(input_frame_.width() >> current_shift, res.width);
@ -151,7 +176,8 @@ void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
while (min_dimension < initial_min_dimension) {
EXPECT_TRUE(TriggerScale(kScaleUp)) << "No upscale within " << kNumSeconds
<< " seconds.";
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
qs_.OnEncodeFrame(input_frame_);
QualityScaler::Resolution res = qs_.GetScaledResolution();
min_dimension = res.width < res.height ? res.width : res.height;
--current_shift;
ASSERT_EQ(input_frame_.width() >> current_shift, res.width);
@ -186,7 +212,8 @@ void QualityScalerTest::DoesNotDownscaleFrameDimensions(int width, int height) {
for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
qs_.ReportDroppedFrame();
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution(input_frame_).width)
qs_.OnEncodeFrame(input_frame_);
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width)
<< "Unexpected scale of minimal-size frame.";
}
}
@ -203,6 +230,94 @@ TEST_F(QualityScalerTest, DoesNotDownscaleFrom1Px) {
DoesNotDownscaleFrameDimensions(1, 1);
}
QualityScalerTest::Resolution QualityScalerTest::TriggerResolutionChange(
BadQualityMetric dropframe_lowqp, int num_second, int initial_framerate) {
QualityScalerTest::Resolution res;
res.framerate = initial_framerate;
qs_.OnEncodeFrame(input_frame_);
res.width = qs_.GetScaledResolution().width;
res.height = qs_.GetScaledResolution().height;
for (int i = 0; i < kFramerate * num_second; ++i) {
switch (dropframe_lowqp) {
case kReportLowQP:
qs_.ReportQP(kLowQp);
break;
case kDropFrame:
qs_.ReportDroppedFrame();
break;
}
qs_.OnEncodeFrame(input_frame_);
// Simulate the case when SetRates is called right after reducing
// framerate.
qs_.ReportFramerate(initial_framerate);
res.framerate = qs_.GetTargetFramerate();
if (res.framerate != -1)
qs_.ReportFramerate(res.framerate);
res.width = qs_.GetScaledResolution().width;
res.height = qs_.GetScaledResolution().height;
}
return res;
}
void QualityScalerTest::VerifyQualityAdaptation(
int initial_framerate, int seconds, bool expect_spatial_resize,
bool expect_framerate_reduction) {
qs_.Init(kMaxQp / QualityScaler::kDefaultLowQpDenominator, true);
qs_.OnEncodeFrame(input_frame_);
int init_width = qs_.GetScaledResolution().width;
int init_height = qs_.GetScaledResolution().height;
// Test reducing framerate by dropping frame continuously.
QualityScalerTest::Resolution res = TriggerResolutionChange(
kDropFrame, seconds, initial_framerate);
if (expect_framerate_reduction) {
EXPECT_LT(res.framerate, initial_framerate);
} else {
// No framerate reduction, video decimator should be disabled.
EXPECT_EQ(-1, res.framerate);
}
if (expect_spatial_resize) {
EXPECT_LT(res.width, init_width);
EXPECT_LT(res.height, init_height);
} else {
EXPECT_EQ(init_width, res.width);
EXPECT_EQ(init_height, res.height);
}
// The "seconds * 1.5" is to ensure spatial resolution to recover.
// For example, in 10 seconds test, framerate reduction happens in the first
// 5 seconds from 30fps to 15fps and causes the buffer size to be half of the
// original one. Then it will take only 75 samples to downscale (twice in 150
// samples). So to recover the resolution changes, we need more than 10
// seconds (i.e, seconds * 1.5). This is because the framerate increases
// before spatial size recovers, so it will take 150 samples to recover
// spatial size (300 for twice).
res = TriggerResolutionChange(kReportLowQP, seconds * 1.5, initial_framerate);
EXPECT_EQ(-1, res.framerate);
EXPECT_EQ(init_width, res.width);
EXPECT_EQ(init_height, res.height);
}
// In 5 seconds test, only framerate adjusting should happen.
TEST_F(QualityScalerTest, ChangeFramerateOnly) {
VerifyQualityAdaptation(kFramerate, 5, false, true);
}
// In 10 seconds test, framerate adjusting and scaling are both
// triggered, it shows that scaling would happen after framerate
// adjusting.
TEST_F(QualityScalerTest, ChangeFramerateAndSpatialSize) {
VerifyQualityAdaptation(kFramerate, 10, true, true);
}
// When starting from a low framerate, only spatial size will be changed.
TEST_F(QualityScalerTest, ChangeSpatialSizeOnly) {
qs_.ReportFramerate(kFramerate >> 1);
VerifyQualityAdaptation(kFramerate >> 1, 10, true, false);
}
TEST_F(QualityScalerTest, DoesNotDownscaleBelow2xDefaultMinDimensionsWidth) {
DoesNotDownscaleFrameDimensions(
2 * QualityScaler::kDefaultMinDownscaleDimension - 1, 1000);
@ -227,7 +342,7 @@ void QualityScalerTest::DownscaleEndsAt(int input_width,
// Drop all frames to force-trigger downscaling.
while (true) {
TriggerScale(kScaleDown);
QualityScaler::Resolution res = qs_.GetScaledResolution(input_frame_);
QualityScaler::Resolution res = qs_.GetScaledResolution();
if (last_width == res.width) {
EXPECT_EQ(last_height, res.height);
EXPECT_EQ(end_width, res.width);

View File

@ -214,6 +214,8 @@ class VideoProcessingModule : public Module {
uint32_t height,
uint32_t frame_rate) = 0;
virtual void SetTargetFramerate(int frame_rate) {}
/**
Get decimated(target) frame rate
*/

View File

@ -38,7 +38,6 @@ void VPMFramePreprocessor::Reset() {
frame_cnt_ = 0;
}
void VPMFramePreprocessor::EnableTemporalDecimation(bool enable) {
vd_->EnableTemporalDecimation(enable);
}
@ -62,12 +61,19 @@ int32_t VPMFramePreprocessor::SetTargetResolution(
if (ret_val < 0) return ret_val;
ret_val = vd_->SetTargetFramerate(frame_rate);
if (ret_val < 0) return ret_val;
vd_->SetTargetFramerate(frame_rate);
return VPM_OK;
}
void VPMFramePreprocessor::SetTargetFramerate(int frame_rate) {
if (frame_rate == -1) {
vd_->EnableTemporalDecimation(false);
} else {
vd_->EnableTemporalDecimation(true);
vd_->SetTargetFramerate(frame_rate);
}
}
void VPMFramePreprocessor::UpdateIncomingframe_rate() {
vd_->UpdateIncomingframe_rate();
}

View File

@ -41,6 +41,9 @@ class VPMFramePreprocessor {
int32_t SetTargetResolution(uint32_t width, uint32_t height,
uint32_t frame_rate);
// Set target frame rate.
void SetTargetFramerate(int frame_rate);
// Update incoming frame rate/dimension.
void UpdateIncomingframe_rate();

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/checks.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/source/video_decimator.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
@ -36,11 +37,9 @@ void VPMVideoDecimator::EnableTemporalDecimation(bool enable) {
enable_temporal_decimation_ = enable;
}
int32_t VPMVideoDecimator::SetTargetFramerate(uint32_t frame_rate) {
if (frame_rate == 0) return VPM_PARAMETER_ERROR;
void VPMVideoDecimator::SetTargetFramerate(int frame_rate) {
DCHECK(frame_rate);
target_frame_rate_ = frame_rate;
return VPM_OK;
}
bool VPMVideoDecimator::DropFrame() {

View File

@ -25,7 +25,7 @@ class VPMVideoDecimator {
void EnableTemporalDecimation(bool enable);
int32_t SetTargetFramerate(uint32_t frame_rate);
void SetTargetFramerate(int frame_rate);
bool DropFrame();

View File

@ -148,6 +148,11 @@ int32_t VideoProcessingModuleImpl::SetTargetResolution(uint32_t width,
return frame_pre_processor_.SetTargetResolution(width, height, frame_rate);
}
void VideoProcessingModuleImpl::SetTargetFramerate(int frame_rate) {
CriticalSectionScoped cs(&mutex_);
frame_pre_processor_.SetTargetFramerate(frame_rate);
}
uint32_t VideoProcessingModuleImpl::Decimatedframe_rate() {
CriticalSectionScoped cs(&mutex_);
return frame_pre_processor_.Decimatedframe_rate();

View File

@ -48,6 +48,8 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
uint32_t height,
uint32_t frame_rate) override;
void SetTargetFramerate(int frame_rate) override;
// Get decimated values: frame rate/dimension
uint32_t Decimatedframe_rate() override;
uint32_t DecimatedWidth() const override;

View File

@ -134,4 +134,10 @@ bool VideoEncoderSoftwareFallbackWrapper::SupportsNativeHandle() const {
return encoder_->SupportsNativeHandle();
}
int VideoEncoderSoftwareFallbackWrapper::GetTargetFramerate() {
if (fallback_encoder_)
return fallback_encoder_->GetTargetFramerate();
return encoder_->GetTargetFramerate();
}
} // namespace webrtc

View File

@ -125,6 +125,7 @@ class VideoEncoder {
return -1;
}
virtual void OnDroppedFrame() {}
virtual int GetTargetFramerate() { return -1; }
virtual bool SupportsNativeHandle() const { return false; }
};
@ -151,6 +152,7 @@ class VideoEncoderSoftwareFallbackWrapper : public VideoEncoder {
int32_t SetRates(uint32_t bitrate, uint32_t framerate) override;
void OnDroppedFrame() override;
int GetTargetFramerate() override;
bool SupportsNativeHandle() const override;
private:

View File

@ -78,6 +78,9 @@ class QMVideoSettingsCallback : public VCMQMSettingsCallback {
const uint32_t width,
const uint32_t height);
// Update target frame rate.
void SetTargetFramerate(int frame_rate);
private:
VideoProcessingModule* vpm_;
};
@ -868,4 +871,8 @@ int32_t QMVideoSettingsCallback::SetVideoQMSettings(
return vpm_->SetTargetResolution(width, height, frame_rate);
}
void QMVideoSettingsCallback::SetTargetFramerate(int frame_rate) {
vpm_->SetTargetFramerate(frame_rate);
}
} // namespace webrtc