Refactor VideoDenoiser to work with I420Buffer, not VideoFrame.

BUG=webrtc:5921
R=jackychen@webrtc.org, marpan@webrtc.org

Review URL: https://codereview.webrtc.org/2005733003 .

Cr-Commit-Position: refs/heads/master@{#13179}
This commit is contained in:
Niels Möller 2016-06-17 09:12:44 +02:00
parent 68208897be
commit 6af2e86b46
7 changed files with 99 additions and 71 deletions

View File

@ -134,6 +134,10 @@ class I420Buffer : public VideoFrameBuffer {
// Scale all of |src| to the size of |this| buffer, with no cropping.
void ScaleFrom(const rtc::scoped_refptr<VideoFrameBuffer>& src);
// Create a new buffer with identical strides, and copy the pixel data.
static rtc::scoped_refptr<I420Buffer> CopyKeepStride(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
protected:
~I420Buffer() override;

View File

@ -187,20 +187,20 @@ rtc::scoped_refptr<VideoFrameBuffer> I420Buffer::NativeToI420Buffer() {
}
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
int width = buffer->width();
int height = buffer->height();
rtc::scoped_refptr<I420Buffer> copy =
const rtc::scoped_refptr<VideoFrameBuffer>& source) {
int width = source->width();
int height = source->height();
rtc::scoped_refptr<I420Buffer> target =
new rtc::RefCountedObject<I420Buffer>(width, height);
RTC_CHECK(libyuv::I420Copy(buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),
copy->MutableDataY(), copy->StrideY(),
copy->MutableDataU(), copy->StrideU(),
copy->MutableDataV(), copy->StrideV(),
RTC_CHECK(libyuv::I420Copy(source->DataY(), source->StrideY(),
source->DataU(), source->StrideU(),
source->DataV(), source->StrideV(),
target->MutableDataY(), target->StrideY(),
target->MutableDataU(), target->StrideU(),
target->MutableDataV(), target->StrideV(),
width, height) == 0);
return copy;
return target;
}
void I420Buffer::SetToBlack() {
@ -265,6 +265,27 @@ void I420Buffer::ScaleFrom(const rtc::scoped_refptr<VideoFrameBuffer>& src) {
CropAndScaleFrom(src, 0, 0, src->width(), src->height());
}
rtc::scoped_refptr<I420Buffer> I420Buffer::CopyKeepStride(
const rtc::scoped_refptr<VideoFrameBuffer>& source) {
int width = source->width();
int height = source->height();
int stride_y = source->StrideY();
int stride_u = source->StrideU();
int stride_v = source->StrideV();
rtc::scoped_refptr<I420Buffer> target =
new rtc::RefCountedObject<I420Buffer>(
width, height, stride_y, stride_u, stride_v);
RTC_CHECK(libyuv::I420Copy(source->DataY(), stride_y,
source->DataU(), stride_u,
source->DataV(), stride_v,
target->MutableDataY(), stride_y,
target->MutableDataU(), stride_u,
target->MutableDataV(), stride_v,
width, height) == 0);
return target;
}
NativeHandleBuffer::NativeHandleBuffer(void* native_handle,
int width,
int height)

View File

@ -96,18 +96,20 @@ const VideoFrame* VPMFramePreprocessor::PreprocessFrame(
const VideoFrame* current_frame = &frame;
if (denoiser_) {
VideoFrame* denoised_frame = &denoised_frame_[0];
VideoFrame* denoised_frame_prev = &denoised_frame_[1];
rtc::scoped_refptr<I420Buffer>* denoised_frame = &denoised_buffer_[0];
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev = &denoised_buffer_[1];
// Swap the buffer to save one memcpy in DenoiseFrame.
if (denoised_frame_toggle_) {
denoised_frame = &denoised_frame_[1];
denoised_frame_prev = &denoised_frame_[0];
denoised_frame = &denoised_buffer_[1];
denoised_frame_prev = &denoised_buffer_[0];
}
// Invert the flag.
denoised_frame_toggle_ ^= 1;
denoiser_->DenoiseFrame(*current_frame, denoised_frame, denoised_frame_prev,
true);
current_frame = denoised_frame;
denoiser_->DenoiseFrame(current_frame->video_frame_buffer(), denoised_frame,
denoised_frame_prev, true);
denoised_frame_.ShallowCopy(*current_frame);
denoised_frame_.set_video_frame_buffer(*denoised_frame);
current_frame = &denoised_frame_;
}
if (spatial_resampler_->ApplyResample(current_frame->width(),

View File

@ -61,7 +61,8 @@ class VPMFramePreprocessor {
// we can compute new content metrics every |kSkipFrameCA| frames.
enum { kSkipFrameCA = 2 };
VideoFrame denoised_frame_[2];
rtc::scoped_refptr<I420Buffer> denoised_buffer_[2];
VideoFrame denoised_frame_;
VideoFrame resampled_frame_;
VPMSpatialResampler* spatial_resampler_;
VPMVideoDecimator* vd_;

View File

@ -132,10 +132,10 @@ TEST_F(VideoProcessingTest, Denoiser) {
VideoDenoiser denoiser_c(false);
// Create SSE or NEON denoiser.
VideoDenoiser denoiser_sse_neon(true);
VideoFrame denoised_frame_c;
VideoFrame denoised_frame_prev_c;
VideoFrame denoised_frame_sse_neon;
VideoFrame denoised_frame_prev_sse_neon;
rtc::scoped_refptr<I420Buffer> denoised_frame_c;
rtc::scoped_refptr<I420Buffer> denoised_frame_prev_c;
rtc::scoped_refptr<I420Buffer> denoised_frame_sse_neon;
rtc::scoped_refptr<I420Buffer> denoised_frame_prev_sse_neon;
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
@ -144,10 +144,12 @@ TEST_F(VideoProcessingTest, Denoiser) {
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, &video_frame_));
VideoFrame* p_denoised_c = &denoised_frame_c;
VideoFrame* p_denoised_prev_c = &denoised_frame_prev_c;
VideoFrame* p_denoised_sse_neon = &denoised_frame_sse_neon;
VideoFrame* p_denoised_prev_sse_neon = &denoised_frame_prev_sse_neon;
rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
rtc::scoped_refptr<I420Buffer>* p_denoised_sse_neon =
&denoised_frame_sse_neon;
rtc::scoped_refptr<I420Buffer>* p_denoised_prev_sse_neon =
&denoised_frame_prev_sse_neon;
// Swap the buffer to save one memcpy in DenoiseFrame.
if (denoised_frame_toggle) {
p_denoised_c = &denoised_frame_prev_c;
@ -155,14 +157,16 @@ TEST_F(VideoProcessingTest, Denoiser) {
p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
}
denoiser_c.DenoiseFrame(video_frame_, p_denoised_c, p_denoised_prev_c,
denoiser_c.DenoiseFrame(video_frame_.video_frame_buffer(),
p_denoised_c, p_denoised_prev_c,
false);
denoiser_sse_neon.DenoiseFrame(video_frame_, p_denoised_sse_neon,
denoiser_sse_neon.DenoiseFrame(video_frame_.video_frame_buffer(),
p_denoised_sse_neon,
p_denoised_prev_sse_neon, false);
// Invert the flag.
denoised_frame_toggle ^= 1;
// Denoising results should be the same for C and SSE/NEON denoiser.
ASSERT_TRUE(test::FramesEqual(*p_denoised_c, *p_denoised_sse_neon));
ASSERT_TRUE(test::FrameBufsEqual(*p_denoised_c, *p_denoised_sse_neon));
}
ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
}

View File

@ -73,29 +73,23 @@ VideoDenoiser::VideoDenoiser(bool runtime_cpu_detection)
filter_(DenoiserFilter::Create(runtime_cpu_detection, &cpu_type_)),
ne_(new NoiseEstimation()) {}
void VideoDenoiser::DenoiserReset(const VideoFrame& frame,
VideoFrame* denoised_frame,
VideoFrame* denoised_frame_prev) {
width_ = frame.width();
height_ = frame.height();
void VideoDenoiser::DenoiserReset(
const rtc::scoped_refptr<VideoFrameBuffer>& frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev) {
width_ = frame->width();
height_ = frame->height();
mb_cols_ = width_ >> 4;
mb_rows_ = height_ >> 4;
stride_y_ = frame.video_frame_buffer()->StrideY();
stride_u_ = frame.video_frame_buffer()->StrideU();
stride_v_ = frame.video_frame_buffer()->StrideV();
stride_y_ = frame->StrideY();
stride_u_ = frame->StrideU();
stride_v_ = frame->StrideV();
// Allocate an empty buffer for denoised_frame_prev.
denoised_frame_prev->CreateEmptyFrame(width_, height_, stride_y_, stride_u_,
stride_v_);
*denoised_frame_prev = new rtc::RefCountedObject<I420Buffer>(
width_, height_, stride_y_, stride_u_, stride_v_);
// Allocate and initialize denoised_frame with key frame.
denoised_frame->CreateFrame(
frame.video_frame_buffer()->DataY(),
frame.video_frame_buffer()->DataU(),
frame.video_frame_buffer()->DataV(),
width_, height_, stride_y_, stride_u_, stride_v_, kVideoRotation_0);
// Set time parameters to the output frame.
denoised_frame->set_timestamp(frame.timestamp());
denoised_frame->set_render_time_ms(frame.render_time_ms());
*denoised_frame = I420Buffer::CopyKeepStride(frame);
// Init noise estimator and allocate buffers.
ne_->Init(width_, height_, cpu_type_);
@ -225,26 +219,26 @@ void VideoDenoiser::CopyLumaOnMargin(const uint8_t* y_src, uint8_t* y_dst) {
}
}
void VideoDenoiser::DenoiseFrame(const VideoFrame& frame,
VideoFrame* denoised_frame,
VideoFrame* denoised_frame_prev,
bool noise_estimation_enabled) {
void VideoDenoiser::DenoiseFrame(
const rtc::scoped_refptr<VideoFrameBuffer>& frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev,
bool noise_estimation_enabled) {
// If previous width and height are different from current frame's, need to
// reallocate the buffers and no denoising for the current frame.
if (width_ != frame.width() || height_ != frame.height()) {
if (width_ != frame->width() || height_ != frame->height()) {
DenoiserReset(frame, denoised_frame, denoised_frame_prev);
return;
}
// Set buffer pointers.
const uint8_t* y_src = frame.video_frame_buffer()->DataY();
const uint8_t* u_src = frame.video_frame_buffer()->DataU();
const uint8_t* v_src = frame.video_frame_buffer()->DataV();
uint8_t* y_dst = denoised_frame->video_frame_buffer()->MutableDataY();
uint8_t* u_dst = denoised_frame->video_frame_buffer()->MutableDataU();
uint8_t* v_dst = denoised_frame->video_frame_buffer()->MutableDataV();
uint8_t* y_dst_prev =
denoised_frame_prev->video_frame_buffer()->MutableDataY();
const uint8_t* y_src = frame->DataY();
const uint8_t* u_src = frame->DataU();
const uint8_t* v_src = frame->DataV();
uint8_t* y_dst = (*denoised_frame)->MutableDataY();
uint8_t* u_dst = (*denoised_frame)->MutableDataU();
uint8_t* v_dst = (*denoised_frame)->MutableDataV();
uint8_t* y_dst_prev = (*denoised_frame_prev)->MutableDataY();
memset(x_density_.get(), 0, mb_cols_);
memset(y_density_.get(), 0, mb_rows_);
memset(moving_object_.get(), 1, mb_cols_ * mb_rows_);
@ -338,10 +332,6 @@ void VideoDenoiser::DenoiseFrame(const VideoFrame& frame,
memcpy(u_dst, u_src, (height_ >> 1) * stride_u_);
memcpy(v_dst, v_src, (height_ >> 1) * stride_v_);
// Set time parameters to the output frame.
denoised_frame->set_timestamp(frame.timestamp());
denoised_frame->set_render_time_ms(frame.render_time_ms());
#if DISPLAY || DISPLAYNEON
// Show rectangular region
ShowRect(filter_, moving_edge_, moving_object_, x_density_, y_density_, u_src,

View File

@ -23,15 +23,21 @@ class VideoDenoiser {
public:
explicit VideoDenoiser(bool runtime_cpu_detection);
void DenoiseFrame(const VideoFrame& frame,
VideoFrame* denoised_frame,
VideoFrame* denoised_frame_prev,
// TODO(nisse): Let the denoised_frame and denoised_frame_prev be
// member variables referencing two I420Buffer, and return a refptr
// to the current one. When we also move the double-buffering logic
// from the caller.
void DenoiseFrame(const rtc::scoped_refptr<VideoFrameBuffer>& frame,
// Buffers are allocated/replaced when dimensions
// change.
rtc::scoped_refptr<I420Buffer>* denoised_frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev,
bool noise_estimation_enabled);
private:
void DenoiserReset(const VideoFrame& frame,
VideoFrame* denoised_frame,
VideoFrame* denoised_frame_prev);
void DenoiserReset(const rtc::scoped_refptr<VideoFrameBuffer>& frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame,
rtc::scoped_refptr<I420Buffer>* denoised_frame_prev);
// Check the mb position, return 1: close to the frame center (between 1/8
// and 7/8 of width/height), 3: close to the border (out of 1/16 and 15/16