New helper function test::ReadI420Buffer, refactor FrameReader to use it.

This change reduces the number of places where we first fread a I420
frame into a uint8_t buffer, followed by a copy into a frame buffer
object.

BUG=None

Review-Url: https://codereview.webrtc.org/2362683002
Cr-Commit-Position: refs/heads/master@{#14456}
This commit is contained in:
nisse 2016-09-30 04:14:07 -07:00 committed by Commit bot
parent 6f112cc136
commit 115bd153c7
19 changed files with 182 additions and 229 deletions

View File

@ -15,6 +15,7 @@
#include "webrtc/test/gtest.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/video_frame.h"
@ -35,8 +36,7 @@ class TestLibYuv : public ::testing::Test {
virtual void TearDown();
FILE* source_file_;
VideoFrame orig_frame_;
std::unique_ptr<uint8_t[]> orig_buffer_;
std::unique_ptr<VideoFrame> orig_frame_;
const int width_;
const int height_;
const int size_y_;
@ -51,9 +51,7 @@ TestLibYuv::TestLibYuv()
height_(288),
size_y_(width_ * height_),
size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)),
frame_length_(CalcBufferSize(kI420, 352, 288)) {
orig_buffer_.reset(new uint8_t[frame_length_]);
}
frame_length_(CalcBufferSize(kI420, 352, 288)) {}
void TestLibYuv::SetUp() {
const std::string input_file_name = webrtc::test::ResourcePath("foreman_cif",
@ -62,16 +60,10 @@ void TestLibYuv::SetUp() {
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
input_file_name << "\n";
EXPECT_EQ(frame_length_,
fread(orig_buffer_.get(), 1, frame_length_, source_file_));
orig_frame_.CreateFrame(orig_buffer_.get(),
orig_buffer_.get() + size_y_,
orig_buffer_.get() +
size_y_ + size_uv_,
width_, height_,
width_, (width_ + 1) / 2,
(width_ + 1) / 2,
kVideoRotation_0);
rtc::scoped_refptr<VideoFrameBuffer> buffer(
test::ReadI420Buffer(width_, height_, source_file_));
orig_frame_.reset(new VideoFrame(buffer, kVideoRotation_0, 0));
}
void TestLibYuv::TearDown() {
@ -100,7 +92,7 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> I420 \n", j);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0, out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
@ -108,7 +100,7 @@ TEST_F(TestLibYuv, ConvertTest) {
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
j++;
@ -120,7 +112,7 @@ TEST_F(TestLibYuv, ConvertTest) {
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
res_i420_buffer =
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
@ -129,7 +121,7 @@ TEST_F(TestLibYuv, ConvertTest) {
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44);
@ -137,11 +129,11 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> UYVY\n", j);
std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
res_i420_buffer.get()));
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
@ -150,7 +142,7 @@ TEST_F(TestLibYuv, ConvertTest) {
printf("\nConvert #%d I420 <-> YUY2\n", j);
std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
height_, 0,
@ -160,14 +152,14 @@ TEST_F(TestLibYuv, ConvertTest) {
return;
}
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
printf("\nConvert #%d I420 <-> RGB565\n", j);
std::unique_ptr<uint8_t[]> out_rgb565_buffer(
new uint8_t[width_ * height_ * 2]);
EXPECT_EQ(0,
ConvertFromI420(orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
ConvertFromI420(*orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
height_, 0,
@ -177,7 +169,7 @@ TEST_F(TestLibYuv, ConvertTest) {
}
j++;
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
@ -187,7 +179,7 @@ TEST_F(TestLibYuv, ConvertTest) {
std::unique_ptr<uint8_t[]> out_argb8888_buffer(
new uint8_t[width_ * height_ * 4]);
EXPECT_EQ(0,
ConvertFromI420(orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
ConvertFromI420(*orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
@ -197,7 +189,7 @@ TEST_F(TestLibYuv, ConvertTest) {
return;
}
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
// TODO(leozwang) Investigate the right psnr should be set for
// I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42);
@ -221,7 +213,7 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
rtc::scoped_refptr<I420Buffer> res_i420_buffer =
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
EXPECT_EQ(0, ConvertFromI420(*orig_frame_, kI420, 0,
out_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
height_, 0, kVideoRotation_0,
@ -230,11 +222,10 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
return;
}
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
psnr = I420PSNR(*orig_frame_->video_frame_buffer(), *res_i420_buffer);
EXPECT_EQ(48.0, psnr);
}
TEST_F(TestLibYuv, RotateTest) {
// Use ConvertToI420 for multiple rotations - see that nothing breaks, all
// memory is properly allocated and end result is equal to the starting point.
@ -242,18 +233,22 @@ TEST_F(TestLibYuv, RotateTest) {
int rotated_height = width_;
int stride_y;
int stride_uv;
// Assume compact layout, no padding.
const uint8_t *orig_buffer = orig_frame_->video_frame_buffer()->DataY();
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_90,
rotated_res_i420_buffer.get()));
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_270,
rotated_res_i420_buffer.get()));
rotated_res_i420_buffer = I420Buffer::Create(
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
0, kVideoRotation_180,
rotated_res_i420_buffer.get()));
}

View File

@ -56,7 +56,6 @@ VideoProcessorImpl::VideoProcessorImpl(webrtc::VideoEncoder* encoder,
stats_(stats),
encode_callback_(NULL),
decode_callback_(NULL),
source_buffer_(NULL),
first_key_frame_has_been_excluded_(false),
last_frame_missing_(false),
initialized_(false),
@ -81,7 +80,6 @@ bool VideoProcessorImpl::Init() {
// Initialize data structures used by the encoder/decoder APIs
size_t frame_length_in_bytes = frame_reader_->FrameLength();
source_buffer_ = new uint8_t[frame_length_in_bytes];
last_successful_frame_buffer_ = new uint8_t[frame_length_in_bytes];
// Set fixed properties common for all frames.
// To keep track of spatial resize actions by encoder.
@ -143,7 +141,6 @@ bool VideoProcessorImpl::Init() {
}
VideoProcessorImpl::~VideoProcessorImpl() {
delete[] source_buffer_;
delete[] last_successful_frame_buffer_;
encoder_->RegisterEncodeCompleteCallback(NULL);
delete encode_callback_;
@ -190,17 +187,15 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
if (frame_number == 0) {
prev_time_stamp_ = -1;
}
if (frame_reader_->ReadFrame(source_buffer_)) {
// Copy the source frame to the newly read frame data.
source_frame_.CreateFrame(source_buffer_, config_.codec_settings->width,
config_.codec_settings->height, kVideoRotation_0);
rtc::scoped_refptr<VideoFrameBuffer> buffer(frame_reader_->ReadFrame());
if (buffer) {
// Use the frame number as "timestamp" to identify frames
VideoFrame source_frame(buffer, frame_number, 0, webrtc::kVideoRotation_0);
// Ensure we have a new statistics data object we can fill:
FrameStatistic& stat = stats_->NewFrame(frame_number);
encode_start_ns_ = rtc::TimeNanos();
// Use the frame number as "timestamp" to identify frames
source_frame_.set_timestamp(frame_number);
// Decide if we're going to force a keyframe:
std::vector<FrameType> frame_types(1, kVideoFrameDelta);
@ -213,7 +208,7 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
encoded_frame_size_ = 0;
encoded_frame_type_ = kVideoFrameDelta;
int32_t encode_result = encoder_->Encode(source_frame_, NULL, &frame_types);
int32_t encode_result = encoder_->Encode(source_frame, NULL, &frame_types);
if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
fprintf(stderr, "Failed to encode frame %d, return code: %d\n",

View File

@ -199,8 +199,6 @@ class VideoProcessorImpl : public VideoProcessor {
EncodedImageCallback* encode_callback_;
DecodedImageCallback* decode_callback_;
// Buffer used for reading the source video file:
uint8_t* source_buffer_;
// Keep track of the last successful frame, since we need to write that
// when decoding fails:
uint8_t* last_successful_frame_buffer_;

View File

@ -226,7 +226,8 @@ class VideoProcessorIntegrationTest : public testing::Test {
break;
}
frame_reader_ = new webrtc::test::FrameReaderImpl(
config_.input_filename, config_.frame_length_in_bytes);
config_.input_filename, config_.codec_settings->width,
config_.codec_settings->height);
frame_writer_ = new webrtc::test::FrameWriterImpl(
config_.output_filename, config_.frame_length_in_bytes);
ASSERT_TRUE(frame_reader_->Init());

View File

@ -75,7 +75,8 @@ TEST_F(VideoProcessorTest, Init) {
TEST_F(VideoProcessorTest, ProcessFrame) {
ExpectInit();
EXPECT_CALL(encoder_mock_, Encode(_, _, _)).Times(1);
EXPECT_CALL(frame_reader_mock_, ReadFrame(_)).WillOnce(Return(true));
EXPECT_CALL(frame_reader_mock_, ReadFrame())
.WillOnce(Return(I420Buffer::Create(50, 50)));
// Since we don't return any callback from the mock, the decoder will not
// be more than initialized...
VideoProcessorImpl video_processor(

View File

@ -17,6 +17,7 @@
#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@ -128,14 +129,11 @@ class TestVp8Impl : public ::testing::Test {
decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get());
// Using a QCIF image (aligned stride (u,v planes) > width).
// Processing only one frame.
length_source_frame_ = CalcBufferSize(kI420, kWidth, kHeight);
source_buffer_.reset(new uint8_t[length_source_frame_]);
source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL);
// Set input frame.
ASSERT_EQ(
fread(source_buffer_.get(), 1, length_source_frame_, source_file_),
length_source_frame_);
rtc::scoped_refptr<VideoFrameBuffer> compact_buffer(
test::ReadI420Buffer(kWidth, kHeight, source_file_));
ASSERT_TRUE(compact_buffer);
codec_inst_.width = kWidth;
codec_inst_.height = kHeight;
const int kFramerate = 30;
@ -147,15 +145,15 @@ class TestVp8Impl : public ::testing::Test {
EXPECT_EQ(stride_y, 176);
EXPECT_EQ(stride_uv, 96);
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(
0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width,
codec_inst_.height, 0, kVideoRotation_0,
buffer.get()));
rtc::scoped_refptr<I420Buffer> stride_buffer(
I420Buffer::Create(kWidth, kHeight, stride_y, stride_uv, stride_uv));
// No scaling in our case, just a copy, to add stride to the image.
stride_buffer->ScaleFrom(compact_buffer);
input_frame_.reset(
new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0));
new VideoFrame(stride_buffer, kVideoRotation_0, 0));
input_frame_->set_timestamp(kTestTimestamp);
}
void SetUpEncodeDecode() {
@ -202,7 +200,6 @@ class TestVp8Impl : public ::testing::Test {
std::unique_ptr<VideoDecoder> decoder_;
EncodedImage encoded_frame_;
VideoFrame decoded_frame_;
size_t length_source_frame_;
VideoCodec codec_inst_;
};

View File

@ -163,19 +163,16 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
int64_t starttime = rtc::TimeMillis();
int frame_cnt = 1;
int frames_processed = 0;
rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
webrtc::I420Buffer::Create(width, height, width, half_width, half_width);
while (!feof(input_file) &&
(num_frames == -1 || frames_processed < num_frames)) {
if (fread(frame_buffer.get(), 1, length, input_file) != length)
continue;
while (num_frames == -1 || frames_processed < num_frames) {
rtc::scoped_refptr<VideoFrameBuffer> buffer(
test::ReadI420Buffer(width, height, input_file));
if (!buffer) {
// EOF or read error.
break;
}
if (frame_cnt >= start_frame) {
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
height, 0, webrtc::kVideoRotation_0, &i420_buffer);
webrtc::VideoFrame input_frame(i420_buffer, 0, 0,
webrtc::kVideoRotation_0);
encoder->Encode(input_frame, NULL, NULL);
encoder->Encode(VideoFrame(buffer, webrtc::kVideoRotation_0, 0),
NULL, NULL);
decoder->Decode(encoder_callback.encoded_image(), false, NULL);
++frames_processed;
}

View File

@ -125,6 +125,8 @@ TEST_F(VideoProcessingTest, MbDenoise) {
EXPECT_EQ(COPY_BLOCK, decision);
}
// TODO(nisse): Refactor to not use test fixture. Can use some static
// helper method to open the input file.
TEST_F(VideoProcessingTest, Denoiser) {
// Used in swap buffer.
int denoised_frame_toggle = 0;
@ -137,14 +139,11 @@ TEST_F(VideoProcessingTest, Denoiser) {
rtc::scoped_refptr<I420Buffer> denoised_frame_sse_neon;
rtc::scoped_refptr<I420Buffer> denoised_frame_prev_sse_neon;
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_) {
// Using ConvertToI420 to add stride to the image.
rtc::scoped_refptr<webrtc::I420Buffer> input_buffer =
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
0, kVideoRotation_0, input_buffer.get()));
for (;;) {
rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer(
test::ReadI420Buffer(width_, height_, source_file_));
if (!video_frame_buffer)
break;
rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
@ -159,9 +158,11 @@ TEST_F(VideoProcessingTest, Denoiser) {
p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
}
denoiser_c.DenoiseFrame(input_buffer, p_denoised_c, p_denoised_prev_c,
denoiser_c.DenoiseFrame(video_frame_buffer,
p_denoised_c, p_denoised_prev_c,
false);
denoiser_sse_neon.DenoiseFrame(input_buffer, p_denoised_sse_neon,
denoiser_sse_neon.DenoiseFrame(video_frame_buffer,
p_denoised_sse_neon,
p_denoised_prev_sse_neon, false);
// Invert the flag.
denoised_frame_toggle ^= 1;

View File

@ -18,6 +18,7 @@
#include "webrtc/base/keep_ref_until_done.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
@ -34,10 +35,8 @@ static void PreprocessFrameAndVerify(const VideoFrame& source,
int target_height,
VideoProcessing* vpm,
const VideoFrame* out_frame);
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
int source_width,
int source_height,
static rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
rtc::scoped_refptr<VideoFrameBuffer> source_buffer,
int offset_x,
int offset_y,
int cropped_width,
@ -48,7 +47,7 @@ rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
// verified under the same conditions.
static void TestSize(
const VideoFrame& source_frame,
const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
const VideoFrameBuffer& cropped_source_buffer,
int target_width,
int target_height,
double expected_psnr,
@ -133,15 +132,9 @@ TEST_F(VideoProcessingTest, Resampler) {
vp_->EnableTemporalDecimation(false);
// Reading test frame
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
ASSERT_EQ(static_cast<size_t>(size_y_),
fread(buffer->MutableDataY(), 1, size_y_, source_file_));
ASSERT_EQ(static_cast<size_t>(size_uv_),
fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
ASSERT_EQ(static_cast<size_t>(size_uv_),
fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
rtc::scoped_refptr<VideoFrameBuffer> video_buffer(
test::ReadI420Buffer(width_, height_, source_file_));
ASSERT_TRUE(video_buffer);
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
// Initiate test timer.
@ -149,36 +142,36 @@ TEST_F(VideoProcessingTest, Resampler) {
// Init the sourceFrame with a timestamp.
int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
VideoFrame video_frame(video_buffer, time_start_ms * 90, time_start_ms,
webrtc::kVideoRotation_0);
// Test scaling to different sizes: source is of |width|/|height| = 352/288.
// Pure scaling:
TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
TestSize(video_frame, *video_buffer, width_ / 4, height_ / 4, 25.2, vp_);
TestSize(video_frame, *video_buffer, width_ / 2, height_ / 2, 28.1, vp_);
// No resampling:
TestSize(video_frame, buffer, width_, height_, -1, vp_);
TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
TestSize(video_frame, *video_buffer, width_, height_, -1, vp_);
TestSize(video_frame, *video_buffer, 2 * width_, 2 * height_, 32.2, vp_);
// Scaling and cropping. The cropped source frame is the largest center
// aligned region that can be used from the source while preserving aspect
// ratio.
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
TestSize(video_frame, *CropBuffer(video_buffer, 0, 56, 352, 176),
100, 50, 24.0, vp_);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
TestSize(video_frame, *CropBuffer(video_buffer, 0, 30, 352, 225),
400, 256, 31.3, vp_);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
TestSize(video_frame, *CropBuffer(video_buffer, 68, 0, 216, 288),
480, 640, 32.15, vp_);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
TestSize(video_frame, *CropBuffer(video_buffer, 0, 12, 352, 264),
960, 720, 32.2, vp_);
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
TestSize(video_frame, *CropBuffer(video_buffer, 0, 44, 352, 198),
1280, 720, 32.15, vp_);
// Upsampling to odd size.
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
TestSize(video_frame, *CropBuffer(video_buffer, 0, 26, 352, 233),
501, 333, 32.05, vp_);
// Downsample to odd size.
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
TestSize(video_frame, *CropBuffer(video_buffer, 0, 34, 352, 219),
281, 175, 29.3, vp_);
// Stop timer.
@ -219,20 +212,18 @@ void PreprocessFrameAndVerify(const VideoFrame& source,
}
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
int source_width,
int source_height,
rtc::scoped_refptr<VideoFrameBuffer> source_buffer,
int offset_x,
int offset_y,
int cropped_width,
int cropped_height) {
// Force even.
offset_x &= 1;
offset_y &= 1;
offset_x &= ~1;
offset_y &= ~1;
size_t y_start = offset_x + offset_y * source_buffer->StrideY();
size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideV();
return rtc::scoped_refptr<VideoFrameBuffer>(
new rtc::RefCountedObject<WrappedI420Buffer>(
@ -243,7 +234,7 @@ rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
}
void TestSize(const VideoFrame& source_frame,
const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
const VideoFrameBuffer& cropped_source,
int target_width,
int target_height,
double expected_psnr,
@ -260,14 +251,14 @@ void TestSize(const VideoFrame& source_frame,
// Scale |resampled_source_frame| back to the source scale.
VideoFrame resampled_source_frame;
resampled_source_frame.CopyFrame(*out_frame);
PreprocessFrameAndVerify(resampled_source_frame,
cropped_source_buffer->width(),
cropped_source_buffer->height(), vpm, out_frame);
// Compute PSNR against the cropped source frame and check expectation.
PreprocessFrameAndVerify(resampled_source_frame, cropped_source.width(),
cropped_source.height(), vpm, out_frame);
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
// Compute PSNR against the cropped source frame and check expectation.
double psnr =
I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
I420PSNR(cropped_source, *out_frame->video_frame_buffer());
EXPECT_GT(psnr, expected_psnr);
printf(
"PSNR: %f. PSNR is between source of size %d %d, and a modified "

View File

@ -128,6 +128,7 @@ rtc_source_set("test_support") {
]
deps = [
":video_test_common",
"../base:gtest_prod",
"../base:rtc_base_approved",
"../common_video",
@ -320,7 +321,6 @@ rtc_source_set("test_common") {
deps = [
":rtp_test_utils",
":test_support",
":video_test_common",
"..:webrtc_common",
"../audio",
"../base:rtc_base_approved",

View File

@ -18,7 +18,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "libyuv/convert.h"
#include "webrtc/test/frame_utils.h"
namespace webrtc {
namespace test {
@ -101,35 +101,21 @@ class YuvFileGenerator : public FrameGenerator {
return temp_frame_.get();
}
// TODO(nisse): Have a frame reader in one place. And read directly
// into the planes of an I420Buffer, the extra copying below is silly.
void ReadNextFrame() {
size_t bytes_read =
fread(frame_buffer_.get(), 1, frame_size_, files_[file_index_]);
if (bytes_read < frame_size_) {
last_read_buffer_ =
test::ReadI420Buffer(static_cast<int>(width_),
static_cast<int>(height_),
files_[file_index_]);
if (!last_read_buffer_) {
// No more frames to read in this file, rewind and move to next file.
rewind(files_[file_index_]);
file_index_ = (file_index_ + 1) % files_.size();
bytes_read = fread(frame_buffer_.get(), 1, frame_size_,
files_[file_index_]);
assert(bytes_read >= frame_size_);
last_read_buffer_ =
test::ReadI420Buffer(static_cast<int>(width_),
static_cast<int>(height_),
files_[file_index_]);
RTC_CHECK(last_read_buffer_);
}
size_t half_width = (width_ + 1) / 2;
size_t size_y = width_ * height_;
size_t size_uv = half_width * ((height_ + 1) / 2);
last_read_buffer_ = I420Buffer::Create(
static_cast<int>(width_), static_cast<int>(height_),
static_cast<int>(width_), static_cast<int>(half_width),
static_cast<int>(half_width));
libyuv::I420Copy(
frame_buffer_.get(), static_cast<int>(width_),
frame_buffer_.get() + size_y, static_cast<int>(half_width),
frame_buffer_.get() + size_y + size_uv, static_cast<int>(half_width),
last_read_buffer_->MutableDataY(), last_read_buffer_->StrideY(),
last_read_buffer_->MutableDataU(), last_read_buffer_->StrideU(),
last_read_buffer_->MutableDataV(), last_read_buffer_->StrideV(),
static_cast<int>(width_), static_cast<int>(height_));
}
private:

View File

@ -72,5 +72,22 @@ bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
half_width, half_height);
}
rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE *f) {
int half_width = (width + 1) / 2;
rtc::scoped_refptr<I420Buffer> buffer(
// Explicit stride, no padding between rows.
I420Buffer::Create(width, height, width, half_width, half_width));
size_t size_y = static_cast<size_t>(width) * height;
size_t size_uv = static_cast<size_t>(half_width) * ((height + 1) / 2);
if (fread(buffer->MutableDataY(), 1, size_y, f) < size_y)
return nullptr;
if (fread(buffer->MutableDataU(), 1, size_uv, f) < size_uv)
return nullptr;
if (fread(buffer->MutableDataV(), 1, size_uv, f) < size_uv)
return nullptr;
return buffer;
}
} // namespace test
} // namespace webrtc

View File

@ -14,6 +14,7 @@
#include "webrtc/base/scoped_ref_ptr.h"
namespace webrtc {
class I420Buffer;
class VideoFrame;
class VideoFrameBuffer;
namespace test {
@ -38,6 +39,8 @@ bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2);
bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f2);
rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE *);
} // namespace test
} // namespace webrtc

View File

@ -81,6 +81,7 @@
'<(webrtc_root)/base/base.gyp:rtc_base_approved',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'video_test_common',
],
'sources': [
'gmock.h',
@ -211,7 +212,6 @@
'<(webrtc_root)/webrtc.gyp:webrtc',
'rtp_test_utils',
'test_support',
'video_test_common',
],
},
{

View File

@ -12,15 +12,17 @@
#include <assert.h>
#include "webrtc/test/frame_utils.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
namespace test {
FrameReaderImpl::FrameReaderImpl(std::string input_filename,
size_t frame_length_in_bytes)
int width, int height)
: input_filename_(input_filename),
frame_length_in_bytes_(frame_length_in_bytes),
width_(width), height_(height),
input_file_(NULL) {
}
@ -29,11 +31,14 @@ FrameReaderImpl::~FrameReaderImpl() {
}
bool FrameReaderImpl::Init() {
if (frame_length_in_bytes_ <= 0) {
fprintf(stderr, "Frame length must be >0, was %zu\n",
frame_length_in_bytes_);
if (width_ <= 0 || height_ <= 0) {
fprintf(stderr, "Frame width and height must be >0, was %d x %d\n",
width_, height_);
return false;
}
frame_length_in_bytes_ =
width_ * height_ + 2 * ((width_ + 1) / 2) * ((height_ + 1) / 2);
input_file_ = fopen(input_filename_.c_str(), "rb");
if (input_file_ == NULL) {
fprintf(stderr, "Couldn't open input file for reading: %s\n",
@ -58,24 +63,18 @@ void FrameReaderImpl::Close() {
}
}
bool FrameReaderImpl::ReadFrame(uint8_t* source_buffer) {
assert(source_buffer);
rtc::scoped_refptr<I420Buffer> FrameReaderImpl::ReadFrame() {
if (input_file_ == NULL) {
fprintf(stderr, "FrameReader is not initialized (input file is NULL)\n");
return false;
return nullptr;
}
size_t nbr_read = fread(source_buffer, 1, frame_length_in_bytes_,
input_file_);
if (nbr_read != static_cast<unsigned int>(frame_length_in_bytes_) &&
ferror(input_file_)) {
rtc::scoped_refptr<I420Buffer> buffer(
ReadI420Buffer(width_, height_, input_file_));
if (!buffer && ferror(input_file_)) {
fprintf(stderr, "Error reading from input file: %s\n",
input_filename_.c_str());
return false;
}
if (feof(input_file_) != 0) {
return false; // No more frames to process.
}
return true;
return buffer;
}
size_t FrameReaderImpl::FrameLength() { return frame_length_in_bytes_; }

View File

@ -15,12 +15,14 @@
#include <string>
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class I420Buffer;
namespace test {
// Handles reading of frames from video files.
// Handles reading of I420 frames from video files.
class FrameReader {
public:
virtual ~FrameReader() {}
@ -30,11 +32,9 @@ class FrameReader {
// Returns false if an error has occurred, in addition to printing to stderr.
virtual bool Init() = 0;
// Reads a frame into the supplied buffer, which must contain enough space
// for the frame size.
// Returns true if there are more frames to read, false if we've already
// read the last frame (in the previous call).
virtual bool ReadFrame(uint8_t* source_buffer) = 0;
// Reads a frame from the input file. On success, returns the frame.
// Returns nullptr if encountering end of file or a read error.
virtual rtc::scoped_refptr<I420Buffer> ReadFrame() = 0;
// Closes the input file if open. Essentially makes this class impossible
// to use anymore. Will also be invoked by the destructor.
@ -51,12 +51,11 @@ class FrameReaderImpl : public FrameReader {
// Creates a file handler. The input file is assumed to exist and be readable.
// Parameters:
// input_filename The file to read from.
// frame_length_in_bytes The size of each frame.
// For YUV this is 3 * width * height / 2
FrameReaderImpl(std::string input_filename, size_t frame_length_in_bytes);
// width, height Size of each frame to read.
FrameReaderImpl(std::string input_filename, int width, int height);
~FrameReaderImpl() override;
bool Init() override;
bool ReadFrame(uint8_t* source_buffer) override;
rtc::scoped_refptr<I420Buffer> ReadFrame() override;
void Close() override;
size_t FrameLength() override;
int NumberOfFrames() override;
@ -64,6 +63,8 @@ class FrameReaderImpl : public FrameReader {
private:
std::string input_filename_;
size_t frame_length_in_bytes_;
int width_;
int height_;
int number_of_frames_;
FILE* input_file_;
};

View File

@ -12,14 +12,13 @@
#include "webrtc/test/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
namespace test {
const std::string kInputFileContents = "baz";
// Setting the kFrameLength value to a value much larger than the
// file to test causes the ReadFrame test to fail on Windows.
const size_t kFrameLength = 1000;
const size_t kFrameLength = 3;
class FrameReaderTest: public testing::Test {
protected:
@ -33,7 +32,7 @@ class FrameReaderTest: public testing::Test {
fprintf(dummy, "%s", kInputFileContents.c_str());
fclose(dummy);
frame_reader_ = new FrameReaderImpl(temp_filename_, kFrameLength);
frame_reader_ = new FrameReaderImpl(temp_filename_, 1, 1);
ASSERT_TRUE(frame_reader_->Init());
}
void TearDown() {
@ -46,25 +45,25 @@ class FrameReaderTest: public testing::Test {
};
TEST_F(FrameReaderTest, InitSuccess) {
FrameReaderImpl frame_reader(temp_filename_, kFrameLength);
FrameReaderImpl frame_reader(temp_filename_, 1, 1);
ASSERT_TRUE(frame_reader.Init());
ASSERT_EQ(kFrameLength, frame_reader.FrameLength());
ASSERT_EQ(0, frame_reader.NumberOfFrames());
ASSERT_EQ(1, frame_reader.NumberOfFrames());
}
TEST_F(FrameReaderTest, ReadFrame) {
uint8_t buffer[3];
bool result = frame_reader_->ReadFrame(buffer);
ASSERT_FALSE(result); // No more files to read.
ASSERT_EQ(kInputFileContents[0], buffer[0]);
ASSERT_EQ(kInputFileContents[1], buffer[1]);
ASSERT_EQ(kInputFileContents[2], buffer[2]);
rtc::scoped_refptr<VideoFrameBuffer> buffer;
buffer = frame_reader_->ReadFrame();
ASSERT_TRUE(buffer);
ASSERT_EQ(kInputFileContents[0], buffer->DataY()[0]);
ASSERT_EQ(kInputFileContents[1], buffer->DataU()[0]);
ASSERT_EQ(kInputFileContents[2], buffer->DataV()[0]);
ASSERT_FALSE(frame_reader_->ReadFrame()); // End of file
}
TEST_F(FrameReaderTest, ReadFrameUninitialized) {
uint8_t buffer[3];
FrameReaderImpl file_reader(temp_filename_, kFrameLength);
ASSERT_FALSE(file_reader.ReadFrame(buffer));
FrameReaderImpl file_reader(temp_filename_, 1, 1);
ASSERT_FALSE(file_reader.ReadFrame());
}
} // namespace test

View File

@ -17,6 +17,7 @@
#include <memory>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/test/frame_utils.h"
#include "webrtc/video_frame.h"
#include "libyuv/convert.h"
@ -110,44 +111,17 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
int frame_number = 0;
// Read reference and test frames.
const size_t frame_length = 3 * width * height >> 1;
rtc::scoped_refptr<I420Buffer> ref_i420_buffer;
rtc::scoped_refptr<I420Buffer> test_i420_buffer;
std::unique_ptr<uint8_t[]> ref_buffer(new uint8_t[frame_length]);
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[frame_length]);
for (;;) {
rtc::scoped_refptr<I420Buffer> ref_i420_buffer(
test::ReadI420Buffer(width, height, ref_fp));
if (!ref_i420_buffer)
break;
// Set decoded image parameters.
int half_width = (width + 1) / 2;
ref_i420_buffer =
I420Buffer::Create(width, height, width, half_width, half_width);
test_i420_buffer =
I420Buffer::Create(width, height, width, half_width, half_width);
rtc::scoped_refptr<I420Buffer> test_i420_buffer(
test::ReadI420Buffer(width, height, test_fp));
// TODO(nisse): Have a frame reader in one place. And read directly
// into the planes of an I420Buffer, the extra copying below is silly.
size_t ref_bytes = fread(ref_buffer.get(), 1, frame_length, ref_fp);
size_t test_bytes = fread(test_buffer.get(), 1, frame_length, test_fp);
while (ref_bytes == frame_length && test_bytes == frame_length) {
// Converting from buffer to plane representation.
size_t size_y = width * height;
size_t size_uv = half_width * ((height + 1) / 2);
libyuv::I420Copy(
ref_buffer.get(), width,
ref_buffer.get() + size_y, half_width,
ref_buffer.get() + size_y + size_uv, half_width,
ref_i420_buffer->MutableDataY(), ref_i420_buffer->StrideY(),
ref_i420_buffer->MutableDataU(), ref_i420_buffer->StrideU(),
ref_i420_buffer->MutableDataV(), ref_i420_buffer->StrideV(),
width, height);
libyuv::I420Copy(
test_buffer.get(), width,
test_buffer.get() + size_y, half_width,
test_buffer.get() + size_y + size_uv, half_width,
test_i420_buffer->MutableDataY(), test_i420_buffer->StrideY(),
test_i420_buffer->MutableDataU(), test_i420_buffer->StrideU(),
test_i420_buffer->MutableDataV(), test_i420_buffer->StrideV(),
width, height);
if (!test_i420_buffer)
break;
switch (video_metrics_type) {
case kPSNR:
@ -166,8 +140,6 @@ int CalculateMetrics(VideoMetricsType video_metrics_type,
break;
}
frame_number++;
ref_bytes = fread(ref_buffer.get(), 1, frame_length, ref_fp);
test_bytes = fread(test_buffer.get(), 1, frame_length, test_fp);
}
int return_code = 0;
if (frame_number == 0) {

View File

@ -21,7 +21,7 @@ namespace test {
class MockFrameReader : public FrameReader {
public:
MOCK_METHOD0(Init, bool());
MOCK_METHOD1(ReadFrame, bool(uint8_t* source_buffer));
MOCK_METHOD0(ReadFrame, rtc::scoped_refptr<I420Buffer>());
MOCK_METHOD0(Close, void());
MOCK_METHOD0(FrameLength, size_t());
MOCK_METHOD0(NumberOfFrames, int());