Update webrtc/test to new VideoFrameBuffer interface

This is a follow-up cleanup for CL
https://codereview.webrtc.org/2847383002/.

TBR=stefan@webrtc.org

Bug: webrtc:7632
Change-Id: I8275e8edbd22b557cdb251f342847f4e8306299c
Reviewed-on: https://chromium-review.googlesource.com/524084
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18528}
This commit is contained in:
Magnus Jedvert 2017-06-07 11:32:50 +02:00 committed by Commit Bot
parent 72dbe2a211
commit 90e31904c6
8 changed files with 59 additions and 82 deletions

View File

@ -187,15 +187,15 @@ TEST(TestVideoFrame, ShallowCopy) {
}
TEST(TestVideoFrame, TextureInitialValues) {
test::FakeNativeHandle* handle = new test::FakeNativeHandle();
VideoFrame frame = test::FakeNativeHandle::CreateFrame(
handle, 640, 480, 100, 10, webrtc::kVideoRotation_0);
VideoFrame frame = test::FakeNativeBuffer::CreateFrame(
640, 480, 100, 10, webrtc::kVideoRotation_0);
EXPECT_EQ(640, frame.width());
EXPECT_EQ(480, frame.height());
EXPECT_EQ(100u, frame.timestamp());
EXPECT_EQ(10, frame.render_time_ms());
ASSERT_TRUE(frame.video_frame_buffer() != nullptr);
EXPECT_EQ(handle, frame.video_frame_buffer()->native_handle());
EXPECT_TRUE(frame.video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kNative);
frame.set_timestamp(200);
EXPECT_EQ(200u, frame.timestamp());

View File

@ -13,14 +13,12 @@
namespace webrtc {
namespace test {
VideoFrame FakeNativeHandle::CreateFrame(FakeNativeHandle* native_handle,
int width,
VideoFrame FakeNativeBuffer::CreateFrame(int width,
int height,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation) {
return VideoFrame(new rtc::RefCountedObject<FakeNativeHandleBuffer>(
native_handle, width, height),
return VideoFrame(new rtc::RefCountedObject<FakeNativeBuffer>(width, height),
timestamp, render_time_ms, rotation);
}
} // namespace test

View File

@ -18,31 +18,29 @@
namespace webrtc {
namespace test {
class FakeNativeHandle {
class FakeNativeBuffer : public VideoFrameBuffer {
public:
static VideoFrame CreateFrame(FakeNativeHandle* native_handle,
int width,
static VideoFrame CreateFrame(int width,
int height,
uint32_t timestamp,
int64_t render_time_ms,
VideoRotation rotation);
};
class FakeNativeHandleBuffer : public NativeHandleBuffer {
public:
FakeNativeHandleBuffer(void* native_handle, int width, int height)
: NativeHandleBuffer(native_handle, width, height) {}
FakeNativeBuffer(int width, int height) : width_(width), height_(height) {}
~FakeNativeHandleBuffer() {
delete reinterpret_cast<FakeNativeHandle*>(native_handle_);
}
Type type() const override { return Type::kNative; }
int width() const override { return width_; }
int height() const override { return height_; }
private:
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override {
rtc::scoped_refptr<I420BufferInterface> ToI420() override {
rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create(width_, height_);
I420Buffer::SetBlack(buffer);
return buffer;
}
const int width_;
const int height_;
};
} // namespace test

View File

@ -46,20 +46,18 @@ class SquareGenerator : public FrameGenerator {
height_ = static_cast<int>(height);
RTC_CHECK(width_ > 0);
RTC_CHECK(height_ > 0);
half_width_ = (width_ + 1) / 2;
y_size_ = width_ * height_;
uv_size_ = half_width_ * ((height_ + 1) / 2);
}
VideoFrame* NextFrame() override {
rtc::CritScope lock(&crit_);
// Ensure stride == width.
rtc::scoped_refptr<I420Buffer> buffer(
I420Buffer::Create(width_, height_, width_, half_width_, half_width_));
memset(buffer->MutableDataY(), 127, y_size_);
memset(buffer->MutableDataU(), 127, uv_size_);
memset(buffer->MutableDataV(), 127, uv_size_);
rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(width_, height_));
memset(buffer->MutableDataY(), 127, height_ * buffer->StrideY());
memset(buffer->MutableDataU(), 127,
buffer->ChromaHeight() * buffer->StrideU());
memset(buffer->MutableDataV(), 127,
buffer->ChromaHeight() * buffer->StrideV());
for (const auto& square : squares_)
square->Draw(buffer);
@ -112,9 +110,6 @@ class SquareGenerator : public FrameGenerator {
rtc::CriticalSection crit_;
int width_ GUARDED_BY(&crit_);
int height_ GUARDED_BY(&crit_);
int half_width_ GUARDED_BY(&crit_);
size_t y_size_ GUARDED_BY(&crit_);
size_t uv_size_ GUARDED_BY(&crit_);
std::vector<std::unique_ptr<Square>> squares_ GUARDED_BY(&crit_);
std::unique_ptr<VideoFrame> frame_ GUARDED_BY(&crit_);
};
@ -253,25 +248,21 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
int pixels_scrolled_y =
static_cast<int>(scroll_margin_y * scroll_factor + 0.5);
int offset_y = (current_source_frame_->video_frame_buffer()->StrideY() *
pixels_scrolled_y) +
pixels_scrolled_x;
int offset_u = (current_source_frame_->video_frame_buffer()->StrideU() *
(pixels_scrolled_y / 2)) +
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
current_source_frame_->video_frame_buffer()->ToI420();
int offset_y =
(i420_buffer->StrideY() * pixels_scrolled_y) + pixels_scrolled_x;
int offset_u = (i420_buffer->StrideU() * (pixels_scrolled_y / 2)) +
(pixels_scrolled_x / 2);
int offset_v = (current_source_frame_->video_frame_buffer()->StrideV() *
(pixels_scrolled_y / 2)) +
int offset_v = (i420_buffer->StrideV() * (pixels_scrolled_y / 2)) +
(pixels_scrolled_x / 2);
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer(
current_source_frame_->video_frame_buffer());
current_frame_ = rtc::Optional<webrtc::VideoFrame>(webrtc::VideoFrame(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
target_width_, target_height_,
&frame_buffer->DataY()[offset_y], frame_buffer->StrideY(),
&frame_buffer->DataU()[offset_u], frame_buffer->StrideU(),
&frame_buffer->DataV()[offset_v], frame_buffer->StrideV(),
KeepRefUntilDone(frame_buffer)),
target_width_, target_height_, &i420_buffer->DataY()[offset_y],
i420_buffer->StrideY(), &i420_buffer->DataU()[offset_u],
i420_buffer->StrideU(), &i420_buffer->DataV()[offset_v],
i420_buffer->StrideV(), KeepRefUntilDone(i420_buffer)),
kVideoRotation_0, 0));
}

View File

@ -59,14 +59,16 @@ class FrameGeneratorTest : public ::testing::Test {
void CheckFrameAndMutate(VideoFrame* frame, uint8_t y, uint8_t u, uint8_t v) {
// Check that frame is valid, has the correct color and timestamp are clean.
ASSERT_NE(nullptr, frame);
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
frame->video_frame_buffer()->ToI420();
const uint8_t* buffer;
buffer = frame->video_frame_buffer()->DataY();
buffer = i420_buffer->DataY();
for (int i = 0; i < y_size; ++i)
ASSERT_EQ(y, buffer[i]);
buffer = frame->video_frame_buffer()->DataU();
buffer = i420_buffer->DataU();
for (int i = 0; i < uv_size; ++i)
ASSERT_EQ(u, buffer[i]);
buffer = frame->video_frame_buffer()->DataV();
buffer = i420_buffer->DataV();
for (int i = 0; i < uv_size; ++i)
ASSERT_EQ(v, buffer[i]);
EXPECT_EQ(0, frame->ntp_time_ms());

View File

@ -52,28 +52,22 @@ bool FrameBufsEqual(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& f1,
return false;
}
if (f1->width() != f2->width() || f1->height() != f2->height()) {
if (f1->width() != f2->width() || f1->height() != f2->height() ||
f1->type() != f2->type()) {
return false;
}
// Exclude native handle
if (f1->native_handle()) {
return f1->native_handle() == f2->native_handle();
}
if (f2->native_handle()) {
return false;
}
const int half_width = (f1->width() + 1) / 2;
const int half_height = (f1->height() + 1) / 2;
return EqualPlane(f1->DataY(), f2->DataY(),
f1->StrideY(), f2->StrideY(),
f1->width(), f1->height()) &&
EqualPlane(f1->DataU(), f2->DataU(),
f1->StrideU(), f2->StrideU(),
half_width, half_height) &&
EqualPlane(f1->DataV(), f2->DataV(),
f1->StrideV(), f2->StrideV(),
half_width, half_height);
rtc::scoped_refptr<webrtc::I420BufferInterface> f1_i420 = f1->ToI420();
rtc::scoped_refptr<webrtc::I420BufferInterface> f2_i420 = f2->ToI420();
return EqualPlane(f1_i420->DataY(), f2_i420->DataY(),
f1_i420->StrideY(), f2_i420->StrideY(),
f1_i420->width(), f1_i420->height()) &&
EqualPlane(f1_i420->DataU(), f2_i420->DataU(),
f1_i420->StrideU(), f2_i420->StrideU(),
f1_i420->ChromaWidth(), f1_i420->ChromaHeight()) &&
EqualPlane(f1_i420->DataV(), f2_i420->DataV(),
f1_i420->StrideV(), f2_i420->StrideV(),
f1_i420->ChromaWidth(), f1_i420->ChromaHeight());
}
rtc::scoped_refptr<I420Buffer> ReadI420Buffer(int width, int height, FILE *f) {

View File

@ -61,8 +61,7 @@ TEST_F(YuvFrameReaderTest, NumberOfFrames) {
}
TEST_F(YuvFrameReaderTest, ReadFrame) {
rtc::scoped_refptr<VideoFrameBuffer> buffer;
buffer = frame_reader_->ReadFrame();
rtc::scoped_refptr<I420BufferInterface> buffer = frame_reader_->ReadFrame();
ASSERT_TRUE(buffer);
// Expect I420 packed as YUV.
EXPECT_EQ(kInputFileContents[0], buffer->DataY()[0]);

View File

@ -1974,17 +1974,14 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
int width = 168;
int height = 132;
test::FakeNativeHandle* handle1 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle2 = new test::FakeNativeHandle();
test::FakeNativeHandle* handle3 = new test::FakeNativeHandle();
input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle1, width, height, 1, 1, kVideoRotation_0));
input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle2, width, height, 2, 2, kVideoRotation_0));
input_frames.push_back(test::FakeNativeBuffer::CreateFrame(
width, height, 1, 1, kVideoRotation_0));
input_frames.push_back(test::FakeNativeBuffer::CreateFrame(
width, height, 2, 2, kVideoRotation_0));
input_frames.push_back(CreateVideoFrame(width, height, 3));
input_frames.push_back(CreateVideoFrame(width, height, 4));
input_frames.push_back(test::FakeNativeHandle::CreateFrame(
handle3, width, height, 5, 5, kVideoRotation_0));
input_frames.push_back(test::FakeNativeBuffer::CreateFrame(
width, height, 5, 5, kVideoRotation_0));
video_send_stream_->Start();
test::FrameForwarder forwarder;
@ -2020,9 +2017,7 @@ VideoFrame CreateVideoFrame(int width, int height, uint8_t data) {
const int kSizeY = width * height * 2;
std::unique_ptr<uint8_t[]> buffer(new uint8_t[kSizeY]);
memset(buffer.get(), data, kSizeY);
VideoFrame frame(
I420Buffer::Create(width, height, width, width / 2, width / 2),
kVideoRotation_0, data);
VideoFrame frame(I420Buffer::Create(width, height), kVideoRotation_0, data);
frame.set_timestamp(data);
// Use data as a ms timestamp.
frame.set_timestamp_us(data * rtc::kNumMicrosecsPerMillisec);