Reland of Move MutableDataY{,U,V} methods to I420Buffer only. (patchset #1 id:1 of https://codereview.webrtc.org/2354223002/ )
Reason for revert:
Downstream application now fixed.
Original issue's description:
> Revert of Move MutableDataY{,U,V} methods to I420Buffer only. (patchset #14 id:260001 of https://codereview.webrtc.org/2278883002/ )
>
> Reason for revert:
> Broke downstream application.
>
> Original issue's description:
> > Move MutableDataY{,U,V} methods to I420Buffer only.
> >
> > Deleted from the VideoFrameBuffer base class.
> >
> > BUG=webrtc:5921
> >
> > Committed: https://crrev.com/5539ef6c03c273f39fadae41ace47fdc11ac6d60
> > Cr-Commit-Position: refs/heads/master@{#14317}
>
> TBR=perkj@webrtc.org,magjed@webrtc.org,pthatcher@webrtc.org,honghaiz@webrtc.org,stefan@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:5921
>
> Committed: https://crrev.com/776870a2599b8f43ad56987f9031690e3ccecde8
> Cr-Commit-Position: refs/heads/master@{#14325}
TBR=perkj@webrtc.org,magjed@webrtc.org,pthatcher@webrtc.org,honghaiz@webrtc.org,stefan@webrtc.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=webrtc:5921
Review-Url: https://codereview.webrtc.org/2372483002
Cr-Commit-Position: refs/heads/master@{#14389}
This commit is contained in:
parent
c637389949
commit
64ec8f826f
@ -35,7 +35,7 @@ CoreVideoFrameBuffer::NativeToI420Buffer() {
|
||||
size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0);
|
||||
size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0);
|
||||
// TODO(tkchin): Use a frame buffer pool.
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
||||
new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
|
||||
CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
|
||||
const uint8_t* src_y = static_cast<const uint8_t*>(
|
||||
|
||||
@ -52,7 +52,7 @@ TEST(TestI420BufferPool, FailToReuse) {
|
||||
}
|
||||
|
||||
TEST(TestI420BufferPool, FrameValidAfterPoolDestruction) {
|
||||
rtc::scoped_refptr<VideoFrameBuffer> buffer;
|
||||
rtc::scoped_refptr<I420Buffer> buffer;
|
||||
{
|
||||
I420BufferPool pool;
|
||||
buffer = pool.CreateBuffer(16, 16);
|
||||
|
||||
@ -162,16 +162,14 @@ TEST(TestVideoFrame, CopyFrame) {
|
||||
EXPECT_EQ(kRotation, small_frame.rotation());
|
||||
|
||||
// Frame of larger dimensions.
|
||||
small_frame.CreateEmptyFrame(width, height,
|
||||
stride_y, stride_u, stride_v);
|
||||
memset(small_frame.video_frame_buffer()->MutableDataY(), 1,
|
||||
small_frame.allocated_size(kYPlane));
|
||||
memset(small_frame.video_frame_buffer()->MutableDataU(), 2,
|
||||
small_frame.allocated_size(kUPlane));
|
||||
memset(small_frame.video_frame_buffer()->MutableDataV(), 3,
|
||||
small_frame.allocated_size(kVPlane));
|
||||
big_frame.CopyFrame(small_frame);
|
||||
EXPECT_TRUE(test::FramesEqual(small_frame, big_frame));
|
||||
rtc::scoped_refptr<I420Buffer> buffer =
|
||||
I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
|
||||
memset(buffer->MutableDataY(), 1, width * height);
|
||||
memset(buffer->MutableDataU(), 2, ((height + 1) / 2) * stride_u);
|
||||
memset(buffer->MutableDataV(), 3, ((height + 1) / 2) * stride_u);
|
||||
VideoFrame other_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
|
||||
big_frame.CopyFrame(other_frame);
|
||||
EXPECT_TRUE(test::FramesEqual(other_frame, big_frame));
|
||||
}
|
||||
|
||||
TEST(TestVideoFrame, ShallowCopy) {
|
||||
|
||||
@ -45,12 +45,6 @@ class VideoFrameBuffer : public rtc::RefCountInterface {
|
||||
virtual const uint8_t* DataU() const = 0;
|
||||
virtual const uint8_t* DataV() const = 0;
|
||||
|
||||
// TODO(nisse): Move MutableData methods to the I420Buffer subclass.
|
||||
// Non-const data access.
|
||||
virtual uint8_t* MutableDataY();
|
||||
virtual uint8_t* MutableDataU();
|
||||
virtual uint8_t* MutableDataV();
|
||||
|
||||
// Returns the number of bytes between successive rows for a given plane.
|
||||
virtual int StrideY() const = 0;
|
||||
virtual int StrideU() const = 0;
|
||||
@ -98,9 +92,9 @@ class I420Buffer : public VideoFrameBuffer {
|
||||
const uint8_t* DataU() const override;
|
||||
const uint8_t* DataV() const override;
|
||||
|
||||
uint8_t* MutableDataY() override;
|
||||
uint8_t* MutableDataU() override;
|
||||
uint8_t* MutableDataV() override;
|
||||
uint8_t* MutableDataY();
|
||||
uint8_t* MutableDataU();
|
||||
uint8_t* MutableDataV();
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
@ -69,6 +69,7 @@ size_t CalcBufferSize(VideoType type, int width, int height);
|
||||
// already open for writing.
|
||||
// Return value: 0 if OK, < 0 otherwise.
|
||||
int PrintVideoFrame(const VideoFrame& frame, FILE* file);
|
||||
int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file);
|
||||
|
||||
// Extract buffer from VideoFrame or VideoFrameBuffer (consecutive
|
||||
// planes, no stride)
|
||||
@ -92,11 +93,13 @@ int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer);
|
||||
// - sample_size : Required only for the parsing of MJPG (set to 0 else).
|
||||
// - rotate : Rotation mode of output image.
|
||||
// Output:
|
||||
// - dst_frame : Reference to a destination frame.
|
||||
// - dst_buffer : Reference to a destination frame buffer.
|
||||
// Return value: 0 if OK, < 0 otherwise.
|
||||
|
||||
// TODO(nisse): Deprecated, see
|
||||
// https://bugs.chromium.org/p/webrtc/issues/detail?id=5921.
|
||||
// TODO(nisse): Delete this wrapper, and let users call libyuv directly. Most
|
||||
// calls pass |src_video_type| == kI420, and should use libyuv::I420Copy. The
|
||||
// only exception at the time of this writing is
|
||||
// VideoCaptureImpl::IncomingFrame, which still needs libyuv::ConvertToI420.
|
||||
int ConvertToI420(VideoType src_video_type,
|
||||
const uint8_t* src_frame,
|
||||
int crop_x,
|
||||
@ -105,7 +108,7 @@ int ConvertToI420(VideoType src_video_type,
|
||||
int src_height,
|
||||
size_t sample_size,
|
||||
VideoRotation rotation,
|
||||
VideoFrame* dst_frame);
|
||||
I420Buffer* dst_buffer);
|
||||
|
||||
// Convert From I420
|
||||
// Input:
|
||||
|
||||
@ -95,21 +95,20 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
|
||||
double psnr = 0.0;
|
||||
|
||||
VideoFrame res_i420_frame;
|
||||
res_i420_frame.CreateEmptyFrame(width_, height_, width_,
|
||||
(width_ + 1) / 2,
|
||||
(width_ + 1) / 2);
|
||||
rtc::scoped_refptr<I420Buffer> res_i420_buffer = I420Buffer::Create(
|
||||
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
|
||||
|
||||
printf("\nConvert #%d I420 <-> I420 \n", j);
|
||||
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
|
||||
out_i420_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0, out_i420_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
height_, 0, kVideoRotation_0,
|
||||
res_i420_buffer.get()));
|
||||
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
j++;
|
||||
|
||||
@ -119,17 +118,18 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
int stride_y = 0;
|
||||
int stride_uv = 0;
|
||||
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
|
||||
res_i420_frame.CreateEmptyFrame(width_, height_, stride_y,
|
||||
stride_uv, stride_uv);
|
||||
res_i420_buffer =
|
||||
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB24, 0, res_rgb_buffer2.get()));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
height_, 0, kVideoRotation_0,
|
||||
res_i420_buffer.get()));
|
||||
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
|
||||
// Optimization Speed- quality trade-off => 45 dB only (platform dependant).
|
||||
EXPECT_GT(ceil(psnr), 44);
|
||||
@ -137,44 +137,47 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
|
||||
printf("\nConvert #%d I420 <-> UYVY\n", j);
|
||||
std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kUYVY, 0, out_uyvy_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
height_, 0, kVideoRotation_0,
|
||||
res_i420_buffer.get()));
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
j++;
|
||||
|
||||
printf("\nConvert #%d I420 <-> YUY2\n", j);
|
||||
std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kYUY2, 0, out_yuy2_buffer.get()));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
height_, 0,
|
||||
kVideoRotation_0, res_i420_buffer.get()));
|
||||
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
|
||||
printf("\nConvert #%d I420 <-> RGB565\n", j);
|
||||
std::unique_ptr<uint8_t[]> out_rgb565_buffer(
|
||||
new uint8_t[width_ * height_ * 2]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kRGB565, 0,
|
||||
out_rgb565_buffer.get()));
|
||||
EXPECT_EQ(0,
|
||||
ConvertFromI420(orig_frame_, kRGB565, 0, out_rgb565_buffer.get()));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
height_, 0,
|
||||
kVideoRotation_0, res_i420_buffer.get()));
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
j++;
|
||||
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
|
||||
// Another example is I420ToRGB24, the psnr is 44
|
||||
// TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB.
|
||||
@ -183,18 +186,20 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
printf("\nConvert #%d I420 <-> ARGB8888\n", j);
|
||||
std::unique_ptr<uint8_t[]> out_argb8888_buffer(
|
||||
new uint8_t[width_ * height_ * 4]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kARGB, 0,
|
||||
out_argb8888_buffer.get()));
|
||||
EXPECT_EQ(0,
|
||||
ConvertFromI420(orig_frame_, kARGB, 0, out_argb8888_buffer.get()));
|
||||
|
||||
EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
height_, 0, kVideoRotation_0,
|
||||
res_i420_buffer.get()));
|
||||
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
// TODO(leozwang) Investigate the right psnr should be set for
|
||||
// I420ToARGB8888,
|
||||
EXPECT_GT(ceil(psnr), 42);
|
||||
|
||||
ASSERT_EQ(0, fclose(output_file));
|
||||
@ -209,49 +214,48 @@ TEST_F(TestLibYuv, ConvertAlignedFrame) {
|
||||
|
||||
double psnr = 0.0;
|
||||
|
||||
VideoFrame res_i420_frame;
|
||||
int stride_y = 0;
|
||||
int stride_uv = 0;
|
||||
Calc16ByteAlignedStride(width_, &stride_y, &stride_uv);
|
||||
res_i420_frame.CreateEmptyFrame(width_, height_,
|
||||
stride_y, stride_uv, stride_uv);
|
||||
|
||||
rtc::scoped_refptr<I420Buffer> res_i420_buffer =
|
||||
I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv);
|
||||
std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]);
|
||||
EXPECT_EQ(0, ConvertFromI420(orig_frame_, kI420, 0,
|
||||
out_i420_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, out_i420_buffer.get(), 0, 0, width_,
|
||||
height_, 0, kVideoRotation_0, &res_i420_frame));
|
||||
height_, 0, kVideoRotation_0,
|
||||
res_i420_buffer.get()));
|
||||
|
||||
if (PrintVideoFrame(res_i420_frame, output_file) < 0) {
|
||||
if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) {
|
||||
return;
|
||||
}
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
psnr = I420PSNR(*orig_frame_.video_frame_buffer(), *res_i420_buffer);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
}
|
||||
|
||||
|
||||
TEST_F(TestLibYuv, RotateTest) {
|
||||
// Use ConvertToI420 for multiple roatations - see that nothing breaks, all
|
||||
// Use ConvertToI420 for multiple rotations - see that nothing breaks, all
|
||||
// memory is properly allocated and end result is equal to the starting point.
|
||||
VideoFrame rotated_res_i420_frame;
|
||||
int rotated_width = height_;
|
||||
int rotated_height = width_;
|
||||
int stride_y;
|
||||
int stride_uv;
|
||||
Calc16ByteAlignedStride(rotated_width, &stride_y, &stride_uv);
|
||||
rotated_res_i420_frame.CreateEmptyFrame(rotated_width,
|
||||
rotated_height,
|
||||
stride_y,
|
||||
stride_uv,
|
||||
stride_uv);
|
||||
rtc::scoped_refptr<I420Buffer> rotated_res_i420_buffer = I420Buffer::Create(
|
||||
rotated_width, rotated_height, stride_y, stride_uv, stride_uv);
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_90, &rotated_res_i420_frame));
|
||||
0, kVideoRotation_90,
|
||||
rotated_res_i420_buffer.get()));
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_270, &rotated_res_i420_frame));
|
||||
rotated_res_i420_frame.CreateEmptyFrame(width_, height_,
|
||||
width_, (width_ + 1) / 2,
|
||||
(width_ + 1) / 2);
|
||||
0, kVideoRotation_270,
|
||||
rotated_res_i420_buffer.get()));
|
||||
rotated_res_i420_buffer = I420Buffer::Create(
|
||||
width_, height_, width_, (width_ + 1) / 2, (width_ + 1) / 2);
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer_.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_180, &rotated_res_i420_frame));
|
||||
0, kVideoRotation_180,
|
||||
rotated_res_i420_buffer.get()));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -103,33 +103,35 @@ static int PrintPlane(const uint8_t* buf,
|
||||
}
|
||||
|
||||
// TODO(nisse): Belongs with the test code?
|
||||
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
|
||||
if (file == NULL)
|
||||
return -1;
|
||||
if (frame.IsZeroSize())
|
||||
return -1;
|
||||
int width = frame.video_frame_buffer()->width();
|
||||
int height = frame.video_frame_buffer()->height();
|
||||
int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file) {
|
||||
int width = frame.width();
|
||||
int height = frame.height();
|
||||
int chroma_width = (width + 1) / 2;
|
||||
int chroma_height = (height + 1) / 2;
|
||||
|
||||
if (PrintPlane(frame.video_frame_buffer()->DataY(), width, height,
|
||||
frame.video_frame_buffer()->StrideY(), file) < 0) {
|
||||
if (PrintPlane(frame.DataY(), width, height,
|
||||
frame.StrideY(), file) < 0) {
|
||||
return -1;
|
||||
}
|
||||
if (PrintPlane(frame.video_frame_buffer()->DataU(),
|
||||
if (PrintPlane(frame.DataU(),
|
||||
chroma_width, chroma_height,
|
||||
frame.video_frame_buffer()->StrideU(), file) < 0) {
|
||||
frame.StrideU(), file) < 0) {
|
||||
return -1;
|
||||
}
|
||||
if (PrintPlane(frame.video_frame_buffer()->DataV(),
|
||||
if (PrintPlane(frame.DataV(),
|
||||
chroma_width, chroma_height,
|
||||
frame.video_frame_buffer()->StrideV(), file) < 0) {
|
||||
frame.StrideV(), file) < 0) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
|
||||
if (frame.IsZeroSize())
|
||||
return -1;
|
||||
return PrintVideoFrame(*frame.video_frame_buffer(), file);
|
||||
}
|
||||
|
||||
int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
|
||||
size_t size,
|
||||
uint8_t* buffer) {
|
||||
@ -249,23 +251,19 @@ int ConvertToI420(VideoType src_video_type,
|
||||
int src_height,
|
||||
size_t sample_size,
|
||||
VideoRotation rotation,
|
||||
VideoFrame* dst_frame) {
|
||||
int dst_width = dst_frame->width();
|
||||
int dst_height = dst_frame->height();
|
||||
I420Buffer* dst_buffer) {
|
||||
int dst_width = dst_buffer->width();
|
||||
int dst_height = dst_buffer->height();
|
||||
// LibYuv expects pre-rotation values for dst.
|
||||
// Stride values should correspond to the destination values.
|
||||
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
|
||||
dst_width = dst_frame->height();
|
||||
dst_height = dst_frame->width();
|
||||
std::swap(dst_width, dst_height);
|
||||
}
|
||||
return libyuv::ConvertToI420(
|
||||
src_frame, sample_size,
|
||||
dst_frame->video_frame_buffer()->MutableDataY(),
|
||||
dst_frame->video_frame_buffer()->StrideY(),
|
||||
dst_frame->video_frame_buffer()->MutableDataU(),
|
||||
dst_frame->video_frame_buffer()->StrideU(),
|
||||
dst_frame->video_frame_buffer()->MutableDataV(),
|
||||
dst_frame->video_frame_buffer()->StrideV(),
|
||||
dst_buffer->MutableDataY(), dst_buffer->StrideY(),
|
||||
dst_buffer->MutableDataU(), dst_buffer->StrideU(),
|
||||
dst_buffer->MutableDataV(), dst_buffer->StrideV(),
|
||||
crop_x, crop_y,
|
||||
src_width, src_height,
|
||||
dst_width, dst_height,
|
||||
|
||||
@ -87,10 +87,18 @@ void VideoFrame::CreateFrame(const uint8_t* buffer_y,
|
||||
const int expected_size_y = height * stride_y;
|
||||
const int expected_size_u = half_height * stride_u;
|
||||
const int expected_size_v = half_height * stride_v;
|
||||
CreateEmptyFrame(width, height, stride_y, stride_u, stride_v);
|
||||
memcpy(video_frame_buffer_->MutableDataY(), buffer_y, expected_size_y);
|
||||
memcpy(video_frame_buffer_->MutableDataU(), buffer_u, expected_size_u);
|
||||
memcpy(video_frame_buffer_->MutableDataV(), buffer_v, expected_size_v);
|
||||
// Allocate a new buffer.
|
||||
rtc::scoped_refptr<I420Buffer> buffer_ =
|
||||
I420Buffer::Create(width, height, stride_y, stride_u, stride_v);
|
||||
|
||||
memcpy(buffer_->MutableDataY(), buffer_y, expected_size_y);
|
||||
memcpy(buffer_->MutableDataU(), buffer_u, expected_size_u);
|
||||
memcpy(buffer_->MutableDataV(), buffer_v, expected_size_v);
|
||||
|
||||
video_frame_buffer_ = buffer_;
|
||||
timestamp_rtp_ = 0;
|
||||
ntp_time_ms_ = 0;
|
||||
timestamp_us_ = 0;
|
||||
rotation_ = rotation;
|
||||
}
|
||||
|
||||
|
||||
@ -31,19 +31,6 @@ int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
|
||||
} // namespace
|
||||
|
||||
uint8_t* VideoFrameBuffer::MutableDataY() {
|
||||
RTC_NOTREACHED();
|
||||
return nullptr;
|
||||
}
|
||||
uint8_t* VideoFrameBuffer::MutableDataU() {
|
||||
RTC_NOTREACHED();
|
||||
return nullptr;
|
||||
}
|
||||
uint8_t* VideoFrameBuffer::MutableDataV() {
|
||||
RTC_NOTREACHED();
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::~VideoFrameBuffer() {}
|
||||
|
||||
I420Buffer::I420Buffer(int width, int height)
|
||||
|
||||
@ -453,7 +453,6 @@ class VideoFrameTest : public testing::Test {
|
||||
static bool IsEqual(const cricket::VideoFrame& frame,
|
||||
int width,
|
||||
int height,
|
||||
int64_t timestamp_us,
|
||||
const uint8_t* y,
|
||||
uint32_t ypitch,
|
||||
const uint8_t* u,
|
||||
@ -462,7 +461,6 @@ class VideoFrameTest : public testing::Test {
|
||||
uint32_t vpitch,
|
||||
int max_error) {
|
||||
return IsSize(frame, width, height) &&
|
||||
frame.timestamp_us() == timestamp_us &&
|
||||
IsPlaneEqual("y", frame.video_frame_buffer()->DataY(),
|
||||
frame.video_frame_buffer()->StrideY(), y, ypitch,
|
||||
static_cast<uint32_t>(width),
|
||||
@ -480,15 +478,25 @@ class VideoFrameTest : public testing::Test {
|
||||
static bool IsEqual(const cricket::VideoFrame& frame1,
|
||||
const cricket::VideoFrame& frame2,
|
||||
int max_error) {
|
||||
return IsEqual(frame1,
|
||||
return frame1.timestamp_us() == frame2.timestamp_us() &&
|
||||
IsEqual(frame1,
|
||||
frame2.width(), frame2.height(),
|
||||
frame2.timestamp_us(),
|
||||
frame2.video_frame_buffer()->DataY(),
|
||||
frame2.video_frame_buffer()->StrideY(),
|
||||
frame2.video_frame_buffer()->DataU(),
|
||||
frame2.video_frame_buffer()->StrideU(),
|
||||
frame2.video_frame_buffer()->DataV(),
|
||||
frame2.video_frame_buffer()->StrideV(),
|
||||
frame2.video_frame_buffer()->StrideV(), max_error);
|
||||
}
|
||||
|
||||
static bool IsEqual(
|
||||
const cricket::VideoFrame& frame1,
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||
int max_error) {
|
||||
return IsEqual(frame1, buffer->width(), buffer->height(),
|
||||
buffer->DataY(), buffer->StrideY(),
|
||||
buffer->DataU(), buffer->StrideU(),
|
||||
buffer->DataV(), buffer->StrideV(),
|
||||
max_error);
|
||||
}
|
||||
|
||||
@ -497,10 +505,10 @@ class VideoFrameTest : public testing::Test {
|
||||
int hcrop, int vcrop, int max_error) {
|
||||
return frame1.width() <= frame2.width() &&
|
||||
frame1.height() <= frame2.height() &&
|
||||
frame1.timestamp_us() == frame2.timestamp_us() &&
|
||||
IsEqual(frame1,
|
||||
frame2.width() - hcrop * 2,
|
||||
frame2.height() - vcrop * 2,
|
||||
frame2.timestamp_us(),
|
||||
frame2.video_frame_buffer()->DataY()
|
||||
+ vcrop * frame2.video_frame_buffer()->StrideY()
|
||||
+ hcrop,
|
||||
@ -539,8 +547,8 @@ class VideoFrameTest : public testing::Test {
|
||||
const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
|
||||
const uint8_t* u = y + kWidth * kHeight;
|
||||
const uint8_t* v = u + kWidth * kHeight / 4;
|
||||
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
|
||||
kWidth / 2, v, kWidth / 2, 0));
|
||||
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v,
|
||||
kWidth / 2, 0));
|
||||
}
|
||||
|
||||
// Test constructing an image from a YV12 buffer.
|
||||
@ -554,8 +562,8 @@ class VideoFrameTest : public testing::Test {
|
||||
const uint8_t* y = reinterpret_cast<uint8_t*>(ms.get()->GetBuffer());
|
||||
const uint8_t* v = y + kWidth * kHeight;
|
||||
const uint8_t* u = v + kWidth * kHeight / 4;
|
||||
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 0, y, kWidth, u,
|
||||
kWidth / 2, v, kWidth / 2, 0));
|
||||
EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, y, kWidth, u, kWidth / 2, v,
|
||||
kWidth / 2, 0));
|
||||
}
|
||||
|
||||
// Test constructing an image from a I422 buffer.
|
||||
@ -772,7 +780,8 @@ class VideoFrameTest : public testing::Test {
|
||||
// Macro to help test different rotations
|
||||
#define TEST_MIRROR(FOURCC, BPP) \
|
||||
void Construct##FOURCC##Mirror() { \
|
||||
T frame1, frame2, frame3; \
|
||||
T frame1, frame2; \
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> res_buffer; \
|
||||
std::unique_ptr<rtc::MemoryStream> ms( \
|
||||
CreateYuvSample(kWidth, kHeight, BPP)); \
|
||||
ASSERT_TRUE(ms.get() != NULL); \
|
||||
@ -788,21 +797,18 @@ class VideoFrameTest : public testing::Test {
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
frame3.InitToEmptyBuffer(width_rotate, height_rotate); \
|
||||
res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate); \
|
||||
libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \
|
||||
frame2.video_frame_buffer()->StrideY(), \
|
||||
frame2.video_frame_buffer()->DataU(), \
|
||||
frame2.video_frame_buffer()->StrideU(), \
|
||||
frame2.video_frame_buffer()->DataV(), \
|
||||
frame2.video_frame_buffer()->StrideV(), \
|
||||
frame3.video_frame_buffer()->MutableDataY(), \
|
||||
frame3.video_frame_buffer()->StrideY(), \
|
||||
frame3.video_frame_buffer()->MutableDataU(), \
|
||||
frame3.video_frame_buffer()->StrideU(), \
|
||||
frame3.video_frame_buffer()->MutableDataV(), \
|
||||
frame3.video_frame_buffer()->StrideV(), kWidth, \
|
||||
kHeight); \
|
||||
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
|
||||
res_buffer->MutableDataY(), res_buffer->StrideY(), \
|
||||
res_buffer->MutableDataU(), res_buffer->StrideU(), \
|
||||
res_buffer->MutableDataV(), res_buffer->StrideV(), \
|
||||
kWidth, kHeight); \
|
||||
EXPECT_TRUE(IsEqual(frame1, res_buffer, 0)); \
|
||||
}
|
||||
|
||||
TEST_MIRROR(I420, 420)
|
||||
@ -810,7 +816,8 @@ class VideoFrameTest : public testing::Test {
|
||||
// Macro to help test different rotations
|
||||
#define TEST_ROTATE(FOURCC, BPP, ROTATE) \
|
||||
void Construct##FOURCC##Rotate##ROTATE() { \
|
||||
T frame1, frame2, frame3; \
|
||||
T frame1, frame2; \
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> res_buffer; \
|
||||
std::unique_ptr<rtc::MemoryStream> ms( \
|
||||
CreateYuvSample(kWidth, kHeight, BPP)); \
|
||||
ASSERT_TRUE(ms.get() != NULL); \
|
||||
@ -826,21 +833,18 @@ class VideoFrameTest : public testing::Test {
|
||||
data_size, 0, webrtc::kVideoRotation_0)); \
|
||||
int width_rotate = frame1.width(); \
|
||||
int height_rotate = frame1.height(); \
|
||||
frame3.InitToEmptyBuffer(width_rotate, height_rotate); \
|
||||
res_buffer = webrtc::I420Buffer::Create(width_rotate, height_rotate); \
|
||||
libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \
|
||||
frame2.video_frame_buffer()->StrideY(), \
|
||||
frame2.video_frame_buffer()->DataU(), \
|
||||
frame2.video_frame_buffer()->StrideU(), \
|
||||
frame2.video_frame_buffer()->DataV(), \
|
||||
frame2.video_frame_buffer()->StrideV(), \
|
||||
frame3.video_frame_buffer()->MutableDataY(), \
|
||||
frame3.video_frame_buffer()->StrideY(), \
|
||||
frame3.video_frame_buffer()->MutableDataU(), \
|
||||
frame3.video_frame_buffer()->StrideU(), \
|
||||
frame3.video_frame_buffer()->MutableDataV(), \
|
||||
frame3.video_frame_buffer()->StrideV(), kWidth, \
|
||||
kHeight, libyuv::kRotate##ROTATE); \
|
||||
EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \
|
||||
res_buffer->MutableDataY(), res_buffer->StrideY(), \
|
||||
res_buffer->MutableDataU(), res_buffer->StrideU(), \
|
||||
res_buffer->MutableDataV(), res_buffer->StrideV(), \
|
||||
kWidth, kHeight, libyuv::kRotate##ROTATE); \
|
||||
EXPECT_TRUE(IsEqual(frame1, res_buffer, 0)); \
|
||||
}
|
||||
|
||||
// Test constructing an image with rotation.
|
||||
@ -944,7 +948,7 @@ class VideoFrameTest : public testing::Test {
|
||||
const uint8_t* y = pixel;
|
||||
const uint8_t* u = y + 1;
|
||||
const uint8_t* v = u + 1;
|
||||
EXPECT_TRUE(IsEqual(frame, 1, 1, 0, y, 1, u, 1, v, 1, 0));
|
||||
EXPECT_TRUE(IsEqual(frame, 1, 1, y, 1, u, 1, v, 1, 0));
|
||||
}
|
||||
|
||||
// Test 5 pixel edge case image.
|
||||
|
||||
@ -66,17 +66,13 @@ void VerifyCodecHasDefaultFeedbackParams(const cricket::VideoCodec& codec) {
|
||||
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
|
||||
}
|
||||
|
||||
static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
|
||||
int width,
|
||||
int height) {
|
||||
video_frame->CreateEmptyFrame(
|
||||
width, height, width, (width + 1) / 2, (width + 1) / 2);
|
||||
memset(video_frame->video_frame_buffer()->MutableDataY(), 16,
|
||||
video_frame->allocated_size(webrtc::kYPlane));
|
||||
memset(video_frame->video_frame_buffer()->MutableDataU(), 128,
|
||||
video_frame->allocated_size(webrtc::kUPlane));
|
||||
memset(video_frame->video_frame_buffer()->MutableDataV(), 128,
|
||||
video_frame->allocated_size(webrtc::kVPlane));
|
||||
static rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateBlackFrameBuffer(
|
||||
int width,
|
||||
int height) {
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
||||
webrtc::I420Buffer::Create(width, height);
|
||||
buffer->SetToBlack();
|
||||
return buffer;
|
||||
}
|
||||
|
||||
void VerifySendStreamHasRtxTypes(const webrtc::VideoSendStream::Config& config,
|
||||
@ -2204,9 +2200,9 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
|
||||
cricket::FakeVideoRenderer renderer;
|
||||
EXPECT_TRUE(channel_->SetSink(last_ssrc_, &renderer));
|
||||
|
||||
webrtc::VideoFrame video_frame;
|
||||
CreateBlackFrame(&video_frame, 4, 4);
|
||||
video_frame.set_timestamp(kInitialTimestamp);
|
||||
webrtc::VideoFrame video_frame(CreateBlackFrameBuffer(4, 4),
|
||||
kInitialTimestamp, 0,
|
||||
webrtc::kVideoRotation_0);
|
||||
// Initial NTP time is not available on the first frame, but should still be
|
||||
// able to be estimated.
|
||||
stream->InjectFrame(video_frame);
|
||||
|
||||
@ -129,7 +129,9 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
new_height = dw;
|
||||
}
|
||||
|
||||
InitToEmptyBuffer(new_width, new_height);
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
||||
webrtc::I420Buffer::Create(new_width, new_height);
|
||||
video_frame_buffer_ = buffer;
|
||||
rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
|
||||
|
||||
int horiz_crop = ((w - dw) / 2) & ~1;
|
||||
@ -140,15 +142,10 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
int idh = (h < 0) ? -dh : dh;
|
||||
int r = libyuv::ConvertToI420(
|
||||
sample, sample_size,
|
||||
video_frame_buffer_->MutableDataY(),
|
||||
video_frame_buffer_->StrideY(),
|
||||
video_frame_buffer_->MutableDataU(),
|
||||
video_frame_buffer_->StrideU(),
|
||||
video_frame_buffer_->MutableDataV(),
|
||||
video_frame_buffer_->StrideV(),
|
||||
horiz_crop, vert_crop,
|
||||
w, h,
|
||||
dw, idh,
|
||||
buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(),
|
||||
horiz_crop, vert_crop, w, h, dw, idh,
|
||||
static_cast<libyuv::RotationMode>(
|
||||
apply_rotation ? rotation : webrtc::kVideoRotation_0),
|
||||
format);
|
||||
@ -162,7 +159,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
}
|
||||
|
||||
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
|
||||
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
|
||||
video_frame_buffer_ = webrtc::I420Buffer::Create(w, h);
|
||||
rotation_ = webrtc::kVideoRotation_0;
|
||||
}
|
||||
|
||||
|
||||
@ -420,15 +420,19 @@ class VideoCaptureExternalTest : public testing::Test {
|
||||
capability.maxFPS = kTestFramerate;
|
||||
capture_callback_.SetExpectedCapability(capability);
|
||||
|
||||
test_frame_.CreateEmptyFrame(kTestWidth, kTestHeight, kTestWidth,
|
||||
((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
|
||||
SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
|
||||
memset(test_frame_.video_frame_buffer()->MutableDataY(), 127,
|
||||
kTestWidth * kTestHeight);
|
||||
memset(test_frame_.video_frame_buffer()->MutableDataU(), 127,
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(
|
||||
kTestWidth, kTestHeight,
|
||||
kTestWidth, ((kTestWidth + 1) / 2), (kTestWidth + 1) / 2);
|
||||
|
||||
memset(buffer->MutableDataY(), 127, kTestWidth * kTestHeight);
|
||||
memset(buffer->MutableDataU(), 127,
|
||||
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
|
||||
memset(test_frame_.video_frame_buffer()->MutableDataV(), 127,
|
||||
memset(buffer->MutableDataV(), 127,
|
||||
((kTestWidth + 1) / 2) * ((kTestHeight + 1) / 2));
|
||||
test_frame_.reset(
|
||||
new webrtc::VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
|
||||
|
||||
SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
|
||||
|
||||
capture_module_->RegisterCaptureDataCallback(capture_callback_);
|
||||
capture_module_->RegisterCaptureCallback(capture_feedback_);
|
||||
@ -443,7 +447,7 @@ class VideoCaptureExternalTest : public testing::Test {
|
||||
webrtc::VideoCaptureExternal* capture_input_interface_;
|
||||
rtc::scoped_refptr<VideoCaptureModule> capture_module_;
|
||||
std::unique_ptr<webrtc::ProcessThread> process_module_;
|
||||
webrtc::VideoFrame test_frame_;
|
||||
std::unique_ptr<webrtc::VideoFrame> test_frame_;
|
||||
TestVideoCaptureCallback capture_callback_;
|
||||
TestVideoCaptureFeedBack capture_feedback_;
|
||||
};
|
||||
@ -451,13 +455,13 @@ class VideoCaptureExternalTest : public testing::Test {
|
||||
// Test input of external video frames.
|
||||
TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
|
||||
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
|
||||
test_frame_.width(),
|
||||
test_frame_.height());
|
||||
test_frame_->width(),
|
||||
test_frame_->height());
|
||||
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
|
||||
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
|
||||
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
|
||||
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
|
||||
length, capture_callback_.capability(), 0));
|
||||
EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
|
||||
EXPECT_TRUE(capture_callback_.CompareLastFrame(*test_frame_));
|
||||
}
|
||||
|
||||
// Test frame rate and no picture alarm.
|
||||
@ -472,13 +476,14 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
|
||||
uint64_t startTime = rtc::TimeNanos();
|
||||
|
||||
while ((rtc::TimeNanos() - startTime) < testTime) {
|
||||
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
|
||||
test_frame_.width(),
|
||||
test_frame_.height());
|
||||
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
|
||||
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
|
||||
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
|
||||
length, capture_callback_.capability(), 0));
|
||||
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
|
||||
test_frame_->width(),
|
||||
test_frame_->height());
|
||||
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
|
||||
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
|
||||
EXPECT_EQ(
|
||||
0, capture_input_interface_->IncomingFrame(
|
||||
test_buffer.get(), length, capture_callback_.capability(), 0));
|
||||
SleepMs(100);
|
||||
}
|
||||
EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
|
||||
@ -489,10 +494,10 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
|
||||
startTime = rtc::TimeNanos();
|
||||
while ((rtc::TimeNanos() - startTime) < testTime) {
|
||||
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
|
||||
test_frame_.width(),
|
||||
test_frame_.height());
|
||||
test_frame_->width(),
|
||||
test_frame_->height());
|
||||
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
|
||||
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
|
||||
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
|
||||
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
|
||||
length, capture_callback_.capability(), 0));
|
||||
SleepMs(1000 / 30);
|
||||
@ -507,10 +512,10 @@ TEST_F(VideoCaptureExternalTest, MAYBE_FrameRate) {
|
||||
TEST_F(VideoCaptureExternalTest, Rotation) {
|
||||
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
|
||||
size_t length = webrtc::CalcBufferSize(webrtc::kI420,
|
||||
test_frame_.width(),
|
||||
test_frame_.height());
|
||||
test_frame_->width(),
|
||||
test_frame_->height());
|
||||
std::unique_ptr<uint8_t[]> test_buffer(new uint8_t[length]);
|
||||
webrtc::ExtractBuffer(test_frame_, length, test_buffer.get());
|
||||
webrtc::ExtractBuffer(*test_frame_, length, test_buffer.get());
|
||||
EXPECT_EQ(0, capture_input_interface_->IncomingFrame(test_buffer.get(),
|
||||
length, capture_callback_.capability(), 0));
|
||||
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_90));
|
||||
|
||||
@ -275,14 +275,14 @@ int32_t VideoCaptureImpl::IncomingFrame(
|
||||
// Setting absolute height (in case it was negative).
|
||||
// In Windows, the image starts bottom left, instead of top left.
|
||||
// Setting a negative source height, inverts the image (within LibYuv).
|
||||
_captureFrame.CreateEmptyFrame(target_width,
|
||||
abs(target_height),
|
||||
stride_y,
|
||||
stride_uv, stride_uv);
|
||||
|
||||
// TODO(nisse): Use a pool?
|
||||
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
|
||||
target_width, abs(target_height), stride_y, stride_uv, stride_uv);
|
||||
const int conversionResult = ConvertToI420(
|
||||
commonVideoType, videoFrame, 0, 0, // No cropping
|
||||
width, height, videoFrameLength,
|
||||
apply_rotation ? _rotateFrame : kVideoRotation_0, &_captureFrame);
|
||||
apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get());
|
||||
if (conversionResult < 0)
|
||||
{
|
||||
LOG(LS_ERROR) << "Failed to convert capture frame from type "
|
||||
@ -290,15 +290,12 @@ int32_t VideoCaptureImpl::IncomingFrame(
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!apply_rotation) {
|
||||
_captureFrame.set_rotation(_rotateFrame);
|
||||
} else {
|
||||
_captureFrame.set_rotation(kVideoRotation_0);
|
||||
}
|
||||
_captureFrame.set_ntp_time_ms(captureTime);
|
||||
_captureFrame.set_render_time_ms(rtc::TimeMillis());
|
||||
VideoFrame captureFrame(
|
||||
buffer, 0, rtc::TimeMillis(),
|
||||
!apply_rotation ? _rotateFrame : kVideoRotation_0);
|
||||
captureFrame.set_ntp_time_ms(captureTime);
|
||||
|
||||
DeliverCapturedFrame(_captureFrame);
|
||||
DeliverCapturedFrame(captureFrame);
|
||||
}
|
||||
else // Encoded format
|
||||
{
|
||||
|
||||
@ -137,8 +137,6 @@ private:
|
||||
VideoRotation _rotateFrame; // Set if the frame should be rotated by the
|
||||
// capture module.
|
||||
|
||||
VideoFrame _captureFrame;
|
||||
|
||||
// Indicate whether rotation should be applied before delivered externally.
|
||||
bool apply_rotation_;
|
||||
};
|
||||
|
||||
@ -121,52 +121,47 @@ int H264DecoderImpl::AVGetBuffer2(
|
||||
return ret;
|
||||
}
|
||||
|
||||
// The video frame is stored in |video_frame|. |av_frame| is FFmpeg's version
|
||||
// of a video frame and will be set up to reference |video_frame|'s buffers.
|
||||
|
||||
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
|
||||
// Refactor to do not use a VideoFrame object at all.
|
||||
// The video frame is stored in |frame_buffer|. |av_frame| is FFmpeg's version
|
||||
// of a video frame and will be set up to reference |frame_buffer|'s data.
|
||||
|
||||
// FFmpeg expects the initial allocation to be zero-initialized according to
|
||||
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
|
||||
VideoFrame* video_frame = new VideoFrame(
|
||||
decoder->pool_.CreateBuffer(width, height),
|
||||
0 /* timestamp */, 0 /* render_time_ms */, kVideoRotation_0);
|
||||
// TODO(nisse): Delete that feature from the video pool, instead add
|
||||
// an explicit call to InitializeData here.
|
||||
rtc::scoped_refptr<I420Buffer> frame_buffer =
|
||||
decoder->pool_.CreateBuffer(width, height);
|
||||
|
||||
int y_size = width * height;
|
||||
int uv_size = ((width + 1) / 2) * ((height + 1) / 2);
|
||||
// DCHECK that we have a continuous buffer as is required.
|
||||
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataU(),
|
||||
video_frame->video_frame_buffer()->DataY() +
|
||||
video_frame->allocated_size(kYPlane));
|
||||
RTC_DCHECK_EQ(video_frame->video_frame_buffer()->DataV(),
|
||||
video_frame->video_frame_buffer()->DataU() +
|
||||
video_frame->allocated_size(kUPlane));
|
||||
int total_size = video_frame->allocated_size(kYPlane) +
|
||||
video_frame->allocated_size(kUPlane) +
|
||||
video_frame->allocated_size(kVPlane);
|
||||
RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size);
|
||||
RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size);
|
||||
int total_size = y_size + 2 * uv_size;
|
||||
|
||||
av_frame->format = context->pix_fmt;
|
||||
av_frame->reordered_opaque = context->reordered_opaque;
|
||||
|
||||
// Set |av_frame| members as required by FFmpeg.
|
||||
av_frame->data[kYPlaneIndex] =
|
||||
video_frame->video_frame_buffer()->MutableDataY();
|
||||
av_frame->linesize[kYPlaneIndex] =
|
||||
video_frame->video_frame_buffer()->StrideY();
|
||||
av_frame->data[kUPlaneIndex] =
|
||||
video_frame->video_frame_buffer()->MutableDataU();
|
||||
av_frame->linesize[kUPlaneIndex] =
|
||||
video_frame->video_frame_buffer()->StrideU();
|
||||
av_frame->data[kVPlaneIndex] =
|
||||
video_frame->video_frame_buffer()->MutableDataV();
|
||||
av_frame->linesize[kVPlaneIndex] =
|
||||
video_frame->video_frame_buffer()->StrideV();
|
||||
av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY();
|
||||
av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY();
|
||||
av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU();
|
||||
av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU();
|
||||
av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV();
|
||||
av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV();
|
||||
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
|
||||
|
||||
av_frame->buf[0] = av_buffer_create(av_frame->data[kYPlaneIndex],
|
||||
total_size,
|
||||
AVFreeBuffer2,
|
||||
static_cast<void*>(video_frame),
|
||||
0);
|
||||
// Create a VideoFrame object, to keep a reference to the buffer.
|
||||
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
|
||||
// Refactor to do not use a VideoFrame object at all.
|
||||
av_frame->buf[0] = av_buffer_create(
|
||||
av_frame->data[kYPlaneIndex],
|
||||
total_size,
|
||||
AVFreeBuffer2,
|
||||
static_cast<void*>(new VideoFrame(frame_buffer,
|
||||
0 /* timestamp */,
|
||||
0 /* render_time_ms */,
|
||||
kVideoRotation_0)),
|
||||
0);
|
||||
RTC_CHECK(av_frame->buf[0]);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -137,8 +137,7 @@ int I420Encoder::RegisterEncodeCompleteCallback(
|
||||
}
|
||||
|
||||
I420Decoder::I420Decoder()
|
||||
: _decodedImage(),
|
||||
_width(0),
|
||||
: _width(0),
|
||||
_height(0),
|
||||
_inited(false),
|
||||
_decodeCompleteCallback(NULL) {}
|
||||
@ -199,17 +198,19 @@ int I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
}
|
||||
// Set decoded image parameters.
|
||||
int half_width = (_width + 1) / 2;
|
||||
_decodedImage.CreateEmptyFrame(_width, _height, _width, half_width,
|
||||
half_width);
|
||||
// Converting from buffer to plane representation.
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> frame_buffer =
|
||||
I420Buffer::Create(_width, _height, _width, half_width, half_width);
|
||||
|
||||
// Converting from raw buffer I420Buffer.
|
||||
int ret = ConvertToI420(kI420, buffer, 0, 0, _width, _height, 0,
|
||||
kVideoRotation_0, &_decodedImage);
|
||||
kVideoRotation_0, frame_buffer.get());
|
||||
if (ret < 0) {
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
}
|
||||
_decodedImage.set_timestamp(inputImage._timeStamp);
|
||||
|
||||
_decodeCompleteCallback->Decoded(_decodedImage);
|
||||
VideoFrame decoded_image(frame_buffer, inputImage._timeStamp, 0,
|
||||
webrtc::kVideoRotation_0);
|
||||
_decodeCompleteCallback->Decoded(decoded_image);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
|
||||
@ -298,11 +298,10 @@ int SimulcastEncoderAdapter::Encode(
|
||||
return ret;
|
||||
}
|
||||
} else {
|
||||
VideoFrame dst_frame;
|
||||
// Making sure that destination frame is of sufficient size.
|
||||
// Aligning stride values based on width.
|
||||
dst_frame.CreateEmptyFrame(dst_width, dst_height, dst_width,
|
||||
(dst_width + 1) / 2, (dst_width + 1) / 2);
|
||||
rtc::scoped_refptr<I420Buffer> dst_buffer =
|
||||
I420Buffer::Create(dst_width, dst_height, dst_width,
|
||||
(dst_width + 1) / 2, (dst_width + 1) / 2);
|
||||
libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
|
||||
input_image.video_frame_buffer()->StrideY(),
|
||||
input_image.video_frame_buffer()->DataU(),
|
||||
@ -310,18 +309,16 @@ int SimulcastEncoderAdapter::Encode(
|
||||
input_image.video_frame_buffer()->DataV(),
|
||||
input_image.video_frame_buffer()->StrideV(),
|
||||
src_width, src_height,
|
||||
dst_frame.video_frame_buffer()->MutableDataY(),
|
||||
dst_frame.video_frame_buffer()->StrideY(),
|
||||
dst_frame.video_frame_buffer()->MutableDataU(),
|
||||
dst_frame.video_frame_buffer()->StrideU(),
|
||||
dst_frame.video_frame_buffer()->MutableDataV(),
|
||||
dst_frame.video_frame_buffer()->StrideV(),
|
||||
dst_buffer->MutableDataY(), dst_buffer->StrideY(),
|
||||
dst_buffer->MutableDataU(), dst_buffer->StrideU(),
|
||||
dst_buffer->MutableDataV(), dst_buffer->StrideV(),
|
||||
dst_width, dst_height,
|
||||
libyuv::kFilterBilinear);
|
||||
dst_frame.set_timestamp(input_image.timestamp());
|
||||
dst_frame.set_render_time_ms(input_image.render_time_ms());
|
||||
|
||||
int ret = streaminfos_[stream_idx].encoder->Encode(
|
||||
dst_frame, codec_specific_info, &stream_frame_types);
|
||||
VideoFrame(dst_buffer, input_image.timestamp(),
|
||||
input_image.render_time_ms(), webrtc::kVideoRotation_0),
|
||||
codec_specific_info, &stream_frame_types);
|
||||
if (ret != WEBRTC_VIDEO_CODEC_OK) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -535,17 +535,11 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) {
|
||||
.WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
|
||||
|
||||
// Send a fake frame and assert the return is software fallback.
|
||||
VideoFrame input_frame;
|
||||
int half_width = (kDefaultWidth + 1) / 2;
|
||||
input_frame.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
|
||||
half_width, half_width);
|
||||
memset(input_frame.video_frame_buffer()->MutableDataY(), 0,
|
||||
input_frame.allocated_size(kYPlane));
|
||||
memset(input_frame.video_frame_buffer()->MutableDataU(), 0,
|
||||
input_frame.allocated_size(kUPlane));
|
||||
memset(input_frame.video_frame_buffer()->MutableDataV(), 0,
|
||||
input_frame.allocated_size(kVPlane));
|
||||
|
||||
rtc::scoped_refptr<I420Buffer> input_buffer = I420Buffer::Create(
|
||||
kDefaultWidth, kDefaultHeight, kDefaultWidth, half_width, half_width);
|
||||
input_buffer->InitializeData();
|
||||
VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
|
||||
std::vector<FrameType> frame_types(3, kVideoFrameKey);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE,
|
||||
adapter_->Encode(input_frame, nullptr, &frame_types));
|
||||
|
||||
@ -236,8 +236,8 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
}
|
||||
}
|
||||
|
||||
// Fills in an VideoFrameBuffer from |plane_colors|.
|
||||
static void CreateImage(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
// Fills in an I420Buffer from |plane_colors|.
|
||||
static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer,
|
||||
int plane_colors[kNumOfPlanes]) {
|
||||
int width = buffer->width();
|
||||
int height = buffer->height();
|
||||
@ -317,14 +317,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
|
||||
EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
|
||||
int half_width = (kDefaultWidth + 1) / 2;
|
||||
input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, kDefaultWidth,
|
||||
half_width, half_width);
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
|
||||
input_frame_.allocated_size(kYPlane));
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
|
||||
input_frame_.allocated_size(kUPlane));
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
|
||||
input_frame_.allocated_size(kVPlane));
|
||||
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight,
|
||||
kDefaultWidth, half_width, half_width);
|
||||
input_buffer_->InitializeData();
|
||||
input_frame_.reset(
|
||||
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
|
||||
}
|
||||
|
||||
virtual void TearDown() {
|
||||
@ -396,33 +393,33 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
frame_types[0] = kVideoFrameKey;
|
||||
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
|
||||
frame_types[1] = kVideoFrameKey;
|
||||
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
|
||||
frame_types[2] = kVideoFrameKey;
|
||||
ExpectStreams(kVideoFrameKey, kNumberOfSimulcastStreams);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
std::fill(frame_types.begin(), frame_types.end(), kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameDelta, kNumberOfSimulcastStreams);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestPaddingAllStreams() {
|
||||
@ -431,11 +428,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 1);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 1);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestPaddingTwoStreams() {
|
||||
@ -444,11 +441,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 1);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 1);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestPaddingTwoStreamsOneMaxedOut() {
|
||||
@ -458,11 +455,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 1);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 1);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestPaddingOneStream() {
|
||||
@ -471,11 +468,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 2);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 2);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestPaddingOneStreamTwoMaxedOut() {
|
||||
@ -486,11 +483,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 2);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 2);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestSendAllStreams() {
|
||||
@ -500,11 +497,11 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 3);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 3);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestDisablingStreams() {
|
||||
@ -513,47 +510,47 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
|
||||
kVideoFrameDelta);
|
||||
ExpectStreams(kVideoFrameKey, 3);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
ExpectStreams(kVideoFrameDelta, 3);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
// We should only get two streams and padding for one.
|
||||
encoder_->SetRates(
|
||||
kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
|
||||
ExpectStreams(kVideoFrameDelta, 2);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
// We should only get the first stream and padding for two.
|
||||
encoder_->SetRates(kTargetBitrates[0] + kMinBitrates[1] / 2, 30);
|
||||
ExpectStreams(kVideoFrameDelta, 1);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
// We don't have enough bitrate for the thumbnail stream, but we should get
|
||||
// it anyway with current configuration.
|
||||
encoder_->SetRates(kTargetBitrates[0] - 1, 30);
|
||||
ExpectStreams(kVideoFrameDelta, 1);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
// We should only get two streams and padding for one.
|
||||
encoder_->SetRates(
|
||||
kTargetBitrates[0] + kTargetBitrates[1] + kMinBitrates[2] / 2, 30);
|
||||
// We get a key frame because a new stream is being enabled.
|
||||
ExpectStreams(kVideoFrameKey, 2);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
// We should get all three streams.
|
||||
encoder_->SetRates(
|
||||
kTargetBitrates[0] + kTargetBitrates[1] + kTargetBitrates[2], 30);
|
||||
// We get a key frame because a new stream is being enabled.
|
||||
ExpectStreams(kVideoFrameKey, 3);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void SwitchingToOneStream(int width, int height) {
|
||||
@ -571,14 +568,12 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
}
|
||||
// Setting input image to new resolution.
|
||||
int half_width = (settings_.width + 1) / 2;
|
||||
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
|
||||
settings_.width, half_width, half_width);
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
|
||||
input_frame_.allocated_size(kYPlane));
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
|
||||
input_frame_.allocated_size(kUPlane));
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
|
||||
input_frame_.allocated_size(kVPlane));
|
||||
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
|
||||
settings_.width, half_width, half_width);
|
||||
input_buffer_->InitializeData();
|
||||
|
||||
input_frame_.reset(
|
||||
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
|
||||
|
||||
// The for loop above did not set the bitrate of the highest layer.
|
||||
settings_.simulcastStream[settings_.numberOfSimulcastStreams - 1]
|
||||
@ -603,7 +598,7 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
.Times(1)
|
||||
.WillRepeatedly(Return(
|
||||
EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
|
||||
// Switch back.
|
||||
DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
|
||||
@ -614,15 +609,12 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
ExpectStreams(kVideoFrameKey, 1);
|
||||
// Resize |input_frame_| to the new resolution.
|
||||
half_width = (settings_.width + 1) / 2;
|
||||
input_frame_.CreateEmptyFrame(settings_.width, settings_.height,
|
||||
settings_.width, half_width, half_width);
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataY(), 0,
|
||||
input_frame_.allocated_size(kYPlane));
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataU(), 0,
|
||||
input_frame_.allocated_size(kUPlane));
|
||||
memset(input_frame_.video_frame_buffer()->MutableDataV(), 0,
|
||||
input_frame_.allocated_size(kVPlane));
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
|
||||
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
|
||||
settings_.width, half_width, half_width);
|
||||
input_buffer_->InitializeData();
|
||||
input_frame_.reset(
|
||||
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
|
||||
}
|
||||
|
||||
void TestSwitchingToOneStream() { SwitchingToOneStream(1024, 768); }
|
||||
@ -637,7 +629,7 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
|
||||
encoder_->SetRates(kMaxBitrates[2], 30); // To get all three streams.
|
||||
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
int picture_id = -1;
|
||||
int temporal_layer = -1;
|
||||
bool layer_sync = false;
|
||||
@ -647,22 +639,22 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
EXPECT_TRUE(layer_sync);
|
||||
int key_frame_picture_id = picture_id;
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
|
||||
&layer_sync, 0);
|
||||
EXPECT_EQ(2, temporal_layer);
|
||||
EXPECT_TRUE(layer_sync);
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
|
||||
&layer_sync, 0);
|
||||
EXPECT_EQ(1, temporal_layer);
|
||||
EXPECT_TRUE(layer_sync);
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
|
||||
&layer_sync, 0);
|
||||
EXPECT_EQ(2, temporal_layer);
|
||||
@ -675,8 +667,8 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
// Must match last key frame to trigger.
|
||||
codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id;
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
|
||||
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
|
||||
&layer_sync, 0);
|
||||
|
||||
@ -686,8 +678,8 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
// Must match last key frame to trigger, test bad id.
|
||||
codec_specific.codecSpecific.VP8.pictureIdRPSI = key_frame_picture_id + 17;
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
|
||||
encoder_callback.GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
|
||||
&layer_sync, 0);
|
||||
|
||||
@ -711,9 +703,9 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
plane_offset[kYPlane] = kColorY;
|
||||
plane_offset[kUPlane] = kColorU;
|
||||
plane_offset[kVPlane] = kColorV;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
int picture_id = -1;
|
||||
int temporal_layer = -1;
|
||||
bool layer_sync = false;
|
||||
@ -727,27 +719,27 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
plane_offset[kYPlane] += 1;
|
||||
plane_offset[kUPlane] += 1;
|
||||
plane_offset[kVPlane] += 1;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
|
||||
// Change color.
|
||||
plane_offset[kYPlane] += 1;
|
||||
plane_offset[kUPlane] += 1;
|
||||
plane_offset[kVPlane] += 1;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
|
||||
// Change color.
|
||||
plane_offset[kYPlane] += 1;
|
||||
plane_offset[kUPlane] += 1;
|
||||
plane_offset[kVPlane] += 1;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
|
||||
CodecSpecificInfo codec_specific;
|
||||
codec_specific.codecType = kVideoCodecVP8;
|
||||
@ -759,10 +751,10 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
plane_offset[kYPlane] = kColorY;
|
||||
plane_offset[kUPlane] = kColorU;
|
||||
plane_offset[kVPlane] = kColorV;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, &codec_specific, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, &codec_specific, NULL));
|
||||
|
||||
EncodedImage encoded_frame;
|
||||
encoder_callback.GetLastEncodedKeyFrame(&encoded_frame);
|
||||
@ -784,47 +776,47 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
bool expected_layer_sync[3] = {false, false, false};
|
||||
|
||||
// First frame: #0.
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #1.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #2.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(1, 1, 1, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(true, true, true, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #3.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #4.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(0, 0, 0, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #5.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(2, 2, 2, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
@ -853,47 +845,47 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
bool expected_layer_sync[3] = {false, false, false};
|
||||
|
||||
// First frame: #0.
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #1.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(true, true, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #2.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(1, 0, 255, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(true, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #3.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #4.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(0, 0, 255, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
|
||||
|
||||
// Next frame: #5.
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
|
||||
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
|
||||
VerifyTemporalIdxAndSyncForAllSpatialLayers(
|
||||
@ -911,24 +903,27 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
// 1. stride > width 2. stride_y != stride_uv/2
|
||||
int stride_y = kDefaultWidth + 20;
|
||||
int stride_uv = ((kDefaultWidth + 1) / 2) + 5;
|
||||
input_frame_.CreateEmptyFrame(kDefaultWidth, kDefaultHeight, stride_y,
|
||||
stride_uv, stride_uv);
|
||||
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight, stride_y,
|
||||
stride_uv, stride_uv);
|
||||
input_frame_.reset(
|
||||
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
|
||||
|
||||
// Set color.
|
||||
int plane_offset[kNumOfPlanes];
|
||||
plane_offset[kYPlane] = kColorY;
|
||||
plane_offset[kUPlane] = kColorU;
|
||||
plane_offset[kVPlane] = kColorV;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
|
||||
// Change color.
|
||||
plane_offset[kYPlane] += 1;
|
||||
plane_offset[kUPlane] += 1;
|
||||
plane_offset[kVPlane] += 1;
|
||||
CreateImage(input_frame_.video_frame_buffer(), plane_offset);
|
||||
input_frame_.set_timestamp(input_frame_.timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
|
||||
CreateImage(input_buffer_, plane_offset);
|
||||
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
|
||||
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
|
||||
|
||||
EncodedImage encoded_frame;
|
||||
// Only encoding one frame - so will be a key frame.
|
||||
@ -968,7 +963,8 @@ class TestVp8Simulcast : public ::testing::Test {
|
||||
std::unique_ptr<VP8Decoder> decoder_;
|
||||
MockDecodedImageCallback decoder_callback_;
|
||||
VideoCodec settings_;
|
||||
VideoFrame input_frame_;
|
||||
rtc::scoped_refptr<I420Buffer> input_buffer_;
|
||||
std::unique_ptr<VideoFrame> input_frame_;
|
||||
};
|
||||
|
||||
} // namespace testing
|
||||
|
||||
@ -147,13 +147,15 @@ class TestVp8Impl : public ::testing::Test {
|
||||
EXPECT_EQ(stride_y, 176);
|
||||
EXPECT_EQ(stride_uv, 96);
|
||||
|
||||
input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
|
||||
stride_y, stride_uv, stride_uv);
|
||||
input_frame_.set_timestamp(kTestTimestamp);
|
||||
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
|
||||
codec_inst_.width, codec_inst_.height, stride_y, stride_uv, stride_uv);
|
||||
// Using ConvertToI420 to add stride to the image.
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
|
||||
codec_inst_.width, codec_inst_.height, 0,
|
||||
kVideoRotation_0, &input_frame_));
|
||||
EXPECT_EQ(
|
||||
0, ConvertToI420(kI420, source_buffer_.get(), 0, 0, codec_inst_.width,
|
||||
codec_inst_.height, 0, kVideoRotation_0,
|
||||
buffer.get()));
|
||||
input_frame_.reset(
|
||||
new VideoFrame(buffer, kTestTimestamp, 0, webrtc::kVideoRotation_0));
|
||||
}
|
||||
|
||||
void SetUpEncodeDecode() {
|
||||
@ -195,7 +197,7 @@ class TestVp8Impl : public ::testing::Test {
|
||||
std::unique_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
|
||||
std::unique_ptr<uint8_t[]> source_buffer_;
|
||||
FILE* source_file_;
|
||||
VideoFrame input_frame_;
|
||||
std::unique_ptr<VideoFrame> input_frame_;
|
||||
std::unique_ptr<VideoEncoder> encoder_;
|
||||
std::unique_ptr<VideoDecoder> decoder_;
|
||||
EncodedImage encoded_frame_;
|
||||
@ -237,7 +239,7 @@ TEST_F(TestVp8Impl, EncoderParameterTest) {
|
||||
#endif
|
||||
TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
||||
SetUpEncodeDecode();
|
||||
encoder_->Encode(input_frame_, NULL, NULL);
|
||||
encoder_->Encode(*input_frame_, NULL, NULL);
|
||||
EXPECT_GT(WaitForEncodedFrame(), 0u);
|
||||
// First frame should be a key frame.
|
||||
encoded_frame_._frameType = kVideoFrameKey;
|
||||
@ -246,7 +248,7 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
||||
decoder_->Decode(encoded_frame_, false, NULL));
|
||||
EXPECT_GT(WaitForDecodedFrame(), 0u);
|
||||
// Compute PSNR on all planes (faster than SSIM).
|
||||
EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
|
||||
EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
|
||||
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
|
||||
}
|
||||
@ -258,7 +260,7 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
||||
#endif
|
||||
TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
|
||||
SetUpEncodeDecode();
|
||||
encoder_->Encode(input_frame_, NULL, NULL);
|
||||
encoder_->Encode(*input_frame_, NULL, NULL);
|
||||
EXPECT_GT(WaitForEncodedFrame(), 0u);
|
||||
// Setting complete to false -> should return an error.
|
||||
encoded_frame_._completeFrame = false;
|
||||
@ -273,7 +275,7 @@ TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
|
||||
encoded_frame_._frameType = kVideoFrameKey;
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->Decode(encoded_frame_, false, NULL));
|
||||
EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
|
||||
EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -1306,18 +1306,18 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
||||
last_frame_width_ = img->d_w;
|
||||
last_frame_height_ = img->d_h;
|
||||
// Allocate memory for decoded image.
|
||||
VideoFrame decoded_image(buffer_pool_.CreateBuffer(img->d_w, img->d_h),
|
||||
timestamp, 0, kVideoRotation_0);
|
||||
rtc::scoped_refptr<I420Buffer> buffer =
|
||||
buffer_pool_.CreateBuffer(img->d_w, img->d_h);
|
||||
|
||||
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
decoded_image.video_frame_buffer()->MutableDataY(),
|
||||
decoded_image.video_frame_buffer()->StrideY(),
|
||||
decoded_image.video_frame_buffer()->MutableDataU(),
|
||||
decoded_image.video_frame_buffer()->StrideU(),
|
||||
decoded_image.video_frame_buffer()->MutableDataV(),
|
||||
decoded_image.video_frame_buffer()->StrideV(),
|
||||
buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(),
|
||||
img->d_w, img->d_h);
|
||||
|
||||
VideoFrame decoded_image(buffer, timestamp, 0, kVideoRotation_0);
|
||||
decoded_image.set_ntp_time_ms(ntp_time_ms);
|
||||
int ret = decode_complete_callback_->Decoded(decoded_image);
|
||||
if (ret != 0)
|
||||
|
||||
@ -148,7 +148,7 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
|
||||
return -1;
|
||||
}
|
||||
EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
|
||||
webrtc::VideoFrame input_frame;
|
||||
|
||||
size_t length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
|
||||
std::unique_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
|
||||
|
||||
@ -163,14 +163,18 @@ int SequenceCoder(webrtc::test::CommandLineParser* parser) {
|
||||
int64_t starttime = rtc::TimeMillis();
|
||||
int frame_cnt = 1;
|
||||
int frames_processed = 0;
|
||||
input_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
|
||||
webrtc::I420Buffer::Create(width, height, width, half_width, half_width);
|
||||
|
||||
while (!feof(input_file) &&
|
||||
(num_frames == -1 || frames_processed < num_frames)) {
|
||||
if (fread(frame_buffer.get(), 1, length, input_file) != length)
|
||||
continue;
|
||||
if (frame_cnt >= start_frame) {
|
||||
webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width,
|
||||
height, 0, webrtc::kVideoRotation_0, &input_frame);
|
||||
height, 0, webrtc::kVideoRotation_0, &i420_buffer);
|
||||
webrtc::VideoFrame input_frame(i420_buffer, 0, 0,
|
||||
webrtc::kVideoRotation_0);
|
||||
encoder->Encode(input_frame, NULL, NULL);
|
||||
decoder->Decode(encoder_callback.encoded_image(), false, NULL);
|
||||
++frames_processed;
|
||||
|
||||
@ -141,8 +141,10 @@ TEST_F(VideoProcessingTest, Denoiser) {
|
||||
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
|
||||
frame_length_) {
|
||||
// Using ConvertToI420 to add stride to the image.
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> input_buffer =
|
||||
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_0, &video_frame_));
|
||||
0, kVideoRotation_0, input_buffer.get()));
|
||||
|
||||
rtc::scoped_refptr<I420Buffer>* p_denoised_c = &denoised_frame_c;
|
||||
rtc::scoped_refptr<I420Buffer>* p_denoised_prev_c = &denoised_frame_prev_c;
|
||||
@ -157,11 +159,9 @@ TEST_F(VideoProcessingTest, Denoiser) {
|
||||
p_denoised_sse_neon = &denoised_frame_prev_sse_neon;
|
||||
p_denoised_prev_sse_neon = &denoised_frame_sse_neon;
|
||||
}
|
||||
denoiser_c.DenoiseFrame(video_frame_.video_frame_buffer(),
|
||||
p_denoised_c, p_denoised_prev_c,
|
||||
denoiser_c.DenoiseFrame(input_buffer, p_denoised_c, p_denoised_prev_c,
|
||||
false);
|
||||
denoiser_sse_neon.DenoiseFrame(video_frame_.video_frame_buffer(),
|
||||
p_denoised_sse_neon,
|
||||
denoiser_sse_neon.DenoiseFrame(input_buffer, p_denoised_sse_neon,
|
||||
p_denoised_prev_sse_neon, false);
|
||||
// Invert the flag.
|
||||
denoised_frame_toggle ^= 1;
|
||||
|
||||
@ -15,6 +15,7 @@
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "webrtc/base/keep_ref_until_done.h"
|
||||
#include "webrtc/base/timeutils.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/test/testsupport/fileutils.h"
|
||||
@ -33,24 +34,25 @@ static void PreprocessFrameAndVerify(const VideoFrame& source,
|
||||
int target_height,
|
||||
VideoProcessing* vpm,
|
||||
const VideoFrame* out_frame);
|
||||
static void CropFrame(const uint8_t* source_data,
|
||||
int source_width,
|
||||
int source_height,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int cropped_width,
|
||||
int cropped_height,
|
||||
VideoFrame* cropped_frame);
|
||||
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
|
||||
int source_width,
|
||||
int source_height,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int cropped_width,
|
||||
int cropped_height);
|
||||
// The |source_data| is cropped and scaled to |target_width| x |target_height|,
|
||||
// and then scaled back to the expected cropped size. |expected_psnr| is used to
|
||||
// verify basic quality, and is set to be ~0.1/0.05dB lower than actual PSNR
|
||||
// verified under the same conditions.
|
||||
static void TestSize(const VideoFrame& source_frame,
|
||||
const VideoFrame& cropped_source_frame,
|
||||
int target_width,
|
||||
int target_height,
|
||||
double expected_psnr,
|
||||
VideoProcessing* vpm);
|
||||
static void TestSize(
|
||||
const VideoFrame& source_frame,
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
|
||||
int target_width,
|
||||
int target_height,
|
||||
double expected_psnr,
|
||||
VideoProcessing* vpm);
|
||||
static void WriteProcessedFrameForVisualInspection(const VideoFrame& source,
|
||||
const VideoFrame& processed);
|
||||
|
||||
@ -68,15 +70,6 @@ void VideoProcessingTest::SetUp() {
|
||||
vp_ = VideoProcessing::Create();
|
||||
ASSERT_TRUE(vp_ != NULL);
|
||||
|
||||
video_frame_.CreateEmptyFrame(width_, height_, width_,
|
||||
half_width_, half_width_);
|
||||
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
|
||||
memset(video_frame_.video_frame_buffer()->MutableDataY(), 0,
|
||||
video_frame_.allocated_size(kYPlane));
|
||||
memset(video_frame_.video_frame_buffer()->MutableDataU(), 0,
|
||||
video_frame_.allocated_size(kUPlane));
|
||||
memset(video_frame_.video_frame_buffer()->MutableDataV(), 0,
|
||||
video_frame_.allocated_size(kVPlane));
|
||||
const std::string video_file =
|
||||
webrtc::test::ResourcePath("foreman_cif", "yuv");
|
||||
source_file_ = fopen(video_file.c_str(), "rb");
|
||||
@ -109,11 +102,18 @@ TEST_F(VideoProcessingTest, PreprocessorLogic) {
|
||||
VideoFrame* out_frame = NULL;
|
||||
// Set rescaling => output frame != NULL.
|
||||
vp_->SetInputFrameResampleMode(kFastRescaling);
|
||||
PreprocessFrameAndVerify(video_frame_, resolution, resolution, vp_,
|
||||
out_frame);
|
||||
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
||||
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
|
||||
|
||||
// Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
|
||||
buffer->InitializeData();
|
||||
VideoFrame video_frame(buffer, 0, 0, webrtc::kVideoRotation_0);
|
||||
|
||||
PreprocessFrameAndVerify(video_frame, resolution, resolution, vp_, out_frame);
|
||||
// No rescaling=> output frame = NULL.
|
||||
vp_->SetInputFrameResampleMode(kNoRescaling);
|
||||
EXPECT_TRUE(vp_->PreprocessFrame(video_frame_) != nullptr);
|
||||
EXPECT_TRUE(vp_->PreprocessFrame(video_frame) != nullptr);
|
||||
}
|
||||
|
||||
#if defined(WEBRTC_IOS)
|
||||
@ -133,15 +133,15 @@ TEST_F(VideoProcessingTest, Resampler) {
|
||||
vp_->EnableTemporalDecimation(false);
|
||||
|
||||
// Reading test frame
|
||||
std::unique_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
|
||||
ASSERT_EQ(frame_length_,
|
||||
fread(video_buffer.get(), 1, frame_length_, source_file_));
|
||||
// Using ConvertToI420 to add stride to the image.
|
||||
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0, width_, height_,
|
||||
0, kVideoRotation_0, &video_frame_));
|
||||
// Cropped source frame that will contain the expected visible region.
|
||||
VideoFrame cropped_source_frame;
|
||||
cropped_source_frame.CopyFrame(video_frame_);
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
||||
I420Buffer::Create(width_, height_, width_, half_width_, half_width_);
|
||||
|
||||
ASSERT_EQ(static_cast<size_t>(size_y_),
|
||||
fread(buffer->MutableDataY(), 1, size_y_, source_file_));
|
||||
ASSERT_EQ(static_cast<size_t>(size_uv_),
|
||||
fread(buffer->MutableDataU(), 1, size_uv_, source_file_));
|
||||
ASSERT_EQ(static_cast<size_t>(size_uv_),
|
||||
fread(buffer->MutableDataV(), 1, size_uv_, source_file_));
|
||||
|
||||
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
|
||||
// Initiate test timer.
|
||||
@ -149,48 +149,37 @@ TEST_F(VideoProcessingTest, Resampler) {
|
||||
|
||||
// Init the sourceFrame with a timestamp.
|
||||
int64_t time_start_ms = time_start / rtc::kNumNanosecsPerMillisec;
|
||||
video_frame_.set_render_time_ms(time_start_ms);
|
||||
video_frame_.set_timestamp(time_start_ms * 90);
|
||||
VideoFrame video_frame(buffer, time_start_ms * 90, time_start_ms,
|
||||
webrtc::kVideoRotation_0);
|
||||
|
||||
// Test scaling to different sizes: source is of |width|/|height| = 352/288.
|
||||
// Pure scaling:
|
||||
TestSize(video_frame_, video_frame_, width_ / 4, height_ / 4, 25.2, vp_);
|
||||
TestSize(video_frame_, video_frame_, width_ / 2, height_ / 2, 28.1, vp_);
|
||||
TestSize(video_frame, buffer, width_ / 4, height_ / 4, 25.2, vp_);
|
||||
TestSize(video_frame, buffer, width_ / 2, height_ / 2, 28.1, vp_);
|
||||
// No resampling:
|
||||
TestSize(video_frame_, video_frame_, width_, height_, -1, vp_);
|
||||
TestSize(video_frame_, video_frame_, 2 * width_, 2 * height_, 32.2, vp_);
|
||||
TestSize(video_frame, buffer, width_, height_, -1, vp_);
|
||||
TestSize(video_frame, buffer, 2 * width_, 2 * height_, 32.2, vp_);
|
||||
|
||||
// Scaling and cropping. The cropped source frame is the largest center
|
||||
// aligned region that can be used from the source while preserving aspect
|
||||
// ratio.
|
||||
CropFrame(video_buffer.get(), width_, height_, 0, 56, 352, 176,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 100, 50, 24.0, vp_);
|
||||
|
||||
CropFrame(video_buffer.get(), width_, height_, 0, 30, 352, 225,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 400, 256, 31.3, vp_);
|
||||
|
||||
CropFrame(video_buffer.get(), width_, height_, 68, 0, 216, 288,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 480, 640, 32.15, vp_);
|
||||
|
||||
CropFrame(video_buffer.get(), width_, height_, 0, 12, 352, 264,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 960, 720, 32.2, vp_);
|
||||
|
||||
CropFrame(video_buffer.get(), width_, height_, 0, 44, 352, 198,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 1280, 720, 32.15, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 56, 352, 176),
|
||||
100, 50, 24.0, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 30, 352, 225),
|
||||
400, 256, 31.3, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 68, 0, 216, 288),
|
||||
480, 640, 32.15, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 12, 352, 264),
|
||||
960, 720, 32.2, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 44, 352, 198),
|
||||
1280, 720, 32.15, vp_);
|
||||
|
||||
// Upsampling to odd size.
|
||||
CropFrame(video_buffer.get(), width_, height_, 0, 26, 352, 233,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 501, 333, 32.05, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 26, 352, 233),
|
||||
501, 333, 32.05, vp_);
|
||||
// Downsample to odd size.
|
||||
CropFrame(video_buffer.get(), width_, height_, 0, 34, 352, 219,
|
||||
&cropped_source_frame);
|
||||
TestSize(video_frame_, cropped_source_frame, 281, 175, 29.3, vp_);
|
||||
TestSize(video_frame, CropBuffer(buffer, width_, height_, 0, 34, 352, 219),
|
||||
281, 175, 29.3, vp_);
|
||||
|
||||
// Stop timer.
|
||||
const int64_t runtime =
|
||||
@ -229,24 +218,32 @@ void PreprocessFrameAndVerify(const VideoFrame& source,
|
||||
EXPECT_EQ(target_height, (out_frame)->height());
|
||||
}
|
||||
|
||||
void CropFrame(const uint8_t* source_data,
|
||||
int source_width,
|
||||
int source_height,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int cropped_width,
|
||||
int cropped_height,
|
||||
VideoFrame* cropped_frame) {
|
||||
cropped_frame->CreateEmptyFrame(cropped_width, cropped_height, cropped_width,
|
||||
(cropped_width + 1) / 2,
|
||||
(cropped_width + 1) / 2);
|
||||
EXPECT_EQ(0,
|
||||
ConvertToI420(kI420, source_data, offset_x, offset_y, source_width,
|
||||
source_height, 0, kVideoRotation_0, cropped_frame));
|
||||
rtc::scoped_refptr<VideoFrameBuffer> CropBuffer(
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& source_buffer,
|
||||
int source_width,
|
||||
int source_height,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int cropped_width,
|
||||
int cropped_height) {
|
||||
// Force even.
|
||||
offset_x &= 1;
|
||||
offset_y &= 1;
|
||||
|
||||
size_t y_start = offset_x + offset_y * source_buffer->StrideY();
|
||||
size_t u_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
|
||||
size_t v_start = (offset_x / 2) + (offset_y / 2) * source_buffer->StrideU();
|
||||
|
||||
return rtc::scoped_refptr<VideoFrameBuffer>(
|
||||
new rtc::RefCountedObject<WrappedI420Buffer>(
|
||||
cropped_width, cropped_height, source_buffer->DataY() + y_start,
|
||||
source_buffer->StrideY(), source_buffer->DataU() + u_start,
|
||||
source_buffer->StrideU(), source_buffer->DataV() + v_start,
|
||||
source_buffer->StrideV(), rtc::KeepRefUntilDone(source_buffer)));
|
||||
}
|
||||
|
||||
void TestSize(const VideoFrame& source_frame,
|
||||
const VideoFrame& cropped_source_frame,
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& cropped_source_buffer,
|
||||
int target_width,
|
||||
int target_height,
|
||||
double expected_psnr,
|
||||
@ -263,12 +260,14 @@ void TestSize(const VideoFrame& source_frame,
|
||||
// Scale |resampled_source_frame| back to the source scale.
|
||||
VideoFrame resampled_source_frame;
|
||||
resampled_source_frame.CopyFrame(*out_frame);
|
||||
PreprocessFrameAndVerify(resampled_source_frame, cropped_source_frame.width(),
|
||||
cropped_source_frame.height(), vpm, out_frame);
|
||||
PreprocessFrameAndVerify(resampled_source_frame,
|
||||
cropped_source_buffer->width(),
|
||||
cropped_source_buffer->height(), vpm, out_frame);
|
||||
WriteProcessedFrameForVisualInspection(resampled_source_frame, *out_frame);
|
||||
|
||||
// Compute PSNR against the cropped source frame and check expectation.
|
||||
double psnr = I420PSNR(&cropped_source_frame, out_frame);
|
||||
double psnr =
|
||||
I420PSNR(*cropped_source_buffer, *out_frame->video_frame_buffer());
|
||||
EXPECT_GT(psnr, expected_psnr);
|
||||
printf(
|
||||
"PSNR: %f. PSNR is between source of size %d %d, and a modified "
|
||||
|
||||
@ -33,7 +33,6 @@ class VideoProcessingTest : public ::testing::Test {
|
||||
static void TearDownTestCase() { Trace::ReturnTrace(); }
|
||||
VideoProcessing* vp_;
|
||||
FILE* source_file_;
|
||||
VideoFrame video_frame_;
|
||||
const int width_;
|
||||
const int half_width_;
|
||||
const int height_;
|
||||
|
||||
@ -116,22 +116,18 @@ class VideoEncoderSoftwareFallbackWrapperTest : public ::testing::Test {
|
||||
CountingFakeEncoder fake_encoder_;
|
||||
VideoEncoderSoftwareFallbackWrapper fallback_wrapper_;
|
||||
VideoCodec codec_ = {};
|
||||
VideoFrame frame_;
|
||||
std::unique_ptr<VideoFrame> frame_;
|
||||
};
|
||||
|
||||
void VideoEncoderSoftwareFallbackWrapperTest::EncodeFrame() {
|
||||
frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, (kWidth + 1) / 2,
|
||||
(kWidth + 1) / 2);
|
||||
memset(frame_.video_frame_buffer()->MutableDataY(), 16,
|
||||
frame_.allocated_size(webrtc::kYPlane));
|
||||
memset(frame_.video_frame_buffer()->MutableDataU(), 128,
|
||||
frame_.allocated_size(webrtc::kUPlane));
|
||||
memset(frame_.video_frame_buffer()->MutableDataV(), 128,
|
||||
frame_.allocated_size(webrtc::kVPlane));
|
||||
|
||||
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(
|
||||
kWidth, kHeight, kWidth, (kWidth + 1) / 2, (kWidth + 1) / 2);
|
||||
buffer->SetToBlack();
|
||||
std::vector<FrameType> types(1, kVideoFrameKey);
|
||||
|
||||
frame_.reset(new VideoFrame(buffer, 0, 0, webrtc::kVideoRotation_0));
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
fallback_wrapper_.Encode(frame_, nullptr, &types));
|
||||
fallback_wrapper_.Encode(*frame_, nullptr, &types));
|
||||
}
|
||||
|
||||
void VideoEncoderSoftwareFallbackWrapperTest::UtilizeFallbackEncoder() {
|
||||
@ -225,9 +221,9 @@ TEST_F(VideoEncoderSoftwareFallbackWrapperTest,
|
||||
|
||||
// Encoding a frame using the fallback should arrive at the new callback.
|
||||
std::vector<FrameType> types(1, kVideoFrameKey);
|
||||
frame_.set_timestamp(frame_.timestamp() + 1000);
|
||||
frame_->set_timestamp(frame_->timestamp() + 1000);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
fallback_wrapper_.Encode(frame_, nullptr, &types));
|
||||
fallback_wrapper_.Encode(*frame_, nullptr, &types));
|
||||
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, fallback_wrapper_.Release());
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user