From f425b55eeb3711de323105b68559c6007829dc5f Mon Sep 17 00:00:00 2001 From: "wuchengli@chromium.org" Date: Fri, 20 Jun 2014 12:04:05 +0000 Subject: [PATCH] Add tests of texture frames in video_send_stream_test. Also fix a bug in ViEFrameProviderBase::DeliverFrame that a texture frame was only delivered to the first callback. BUG=chromium:362437 TEST=Run video engine test and webrtc call on CrOS. R=kjellander@webrtc.org, pbos@webrtc.org, stefan@webrtc.org, wuchengli@google.com Review URL: https://webrtc-codereview.appspot.com/15789004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6506 4adac7df-926f-26a2-2b94-8c16560cd09d --- webrtc/video/video_send_stream_tests.cc | 168 ++++++++++++++++++ webrtc/video_engine/vie_encoder.cc | 63 ++++--- .../video_engine/vie_frame_provider_base.cc | 16 +- 3 files changed, 212 insertions(+), 35 deletions(-) diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index dcbcfe1bb8..3ae15d480d 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -13,6 +13,8 @@ #include "webrtc/call.h" #include "webrtc/common_video/interface/i420_video_frame.h" +#include "webrtc/common_video/interface/native_handle.h" +#include "webrtc/common_video/interface/texture_video_frame.h" #include "webrtc/frame_callback.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h" @@ -20,7 +22,9 @@ #include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/event_wrapper.h" +#include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/scoped_ptr.h" +#include "webrtc/system_wrappers/interface/scoped_vector.h" #include "webrtc/system_wrappers/interface/sleep.h" #include "webrtc/system_wrappers/interface/thread_wrapper.h" #include "webrtc/test/direct_transport.h" @@ -38,6 +42,23 @@ namespace webrtc { enum VideoFormat { kGeneric, kVP8, }; +void ExpectEqualFrames(const I420VideoFrame& frame1, + const I420VideoFrame& frame2); +void ExpectEqualTextureFrames(const I420VideoFrame& frame1, + const I420VideoFrame& frame2); +void ExpectEqualBufferFrames(const I420VideoFrame& frame1, + const I420VideoFrame& frame2); +void ExpectEqualFramesVector(const std::vector& frames1, + const std::vector& frames2); +I420VideoFrame* CreateI420VideoFrame(int width, int height, uint8_t data); + +class FakeNativeHandle : public NativeHandle { + public: + FakeNativeHandle() {} + virtual ~FakeNativeHandle() {} + virtual void* GetHandle() { return NULL; } +}; + class VideoSendStreamTest : public ::testing::Test { public: VideoSendStreamTest() @@ -1219,4 +1240,151 @@ TEST_F(VideoSendStreamTest, MinTransmitBitrateRespectsRemb) { call->DestroyVideoSendStream(send_stream_); } +TEST_F(VideoSendStreamTest, CapturesTextureAndI420VideoFrames) { + class FrameObserver : public I420FrameCallback { + public: + FrameObserver() : output_frame_event_(EventWrapper::Create()) {} + + void FrameCallback(I420VideoFrame* video_frame) OVERRIDE { + // Clone the frame because the caller owns it. + output_frames_.push_back(video_frame->CloneFrame()); + output_frame_event_->Set(); + } + + void WaitOutputFrame() { + const unsigned long kWaitFrameTimeoutMs = 3000; + EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(kWaitFrameTimeoutMs)) + << "Timeout while waiting for output frames."; + } + + const std::vector& output_frames() const { + return output_frames_.get(); + } + + private: + // Delivered output frames. + ScopedVector output_frames_; + + // Indicate an output frame has arrived. + scoped_ptr output_frame_event_; + }; + + // Initialize send stream. + test::NullTransport transport; + Call::Config call_config(&transport); + scoped_ptr call(Call::Create(call_config)); + CreateTestConfig(call.get(), 1); + FrameObserver observer; + send_config_.pre_encode_callback = &observer; + send_stream_ = + call->CreateVideoSendStream(send_config_, video_streams_, NULL); + + // Prepare five input frames. Send I420VideoFrame and TextureVideoFrame + // alternatively. + ScopedVector input_frames; + int width = static_cast(video_streams_[0].width); + int height = static_cast(video_streams_[0].height); + webrtc::RefCountImpl* handle1 = + new webrtc::RefCountImpl(); + webrtc::RefCountImpl* handle2 = + new webrtc::RefCountImpl(); + webrtc::RefCountImpl* handle3 = + new webrtc::RefCountImpl(); + input_frames.push_back(new TextureVideoFrame(handle1, width, height, 1, 1)); + input_frames.push_back(new TextureVideoFrame(handle2, width, height, 2, 2)); + input_frames.push_back(CreateI420VideoFrame(width, height, 1)); + input_frames.push_back(CreateI420VideoFrame(width, height, 2)); + input_frames.push_back(new TextureVideoFrame(handle3, width, height, 3, 3)); + + send_stream_->Start(); + for (size_t i = 0; i < input_frames.size(); i++) { + // Make a copy of the input frame because the buffer will be swapped. + scoped_ptr frame(input_frames[i]->CloneFrame()); + send_stream_->Input()->SwapFrame(frame.get()); + // Do not send the next frame too fast, so the frame dropper won't drop it. + if (i < input_frames.size() - 1) + SleepMs(1000 / video_streams_[0].max_framerate); + // Wait until the output frame is received before sending the next input + // frame. Or the previous input frame may be replaced without delivering. + observer.WaitOutputFrame(); + } + send_stream_->Stop(); + + // Test if the input and output frames are the same. render_time_ms and + // timestamp are not compared because capturer sets those values. + ExpectEqualFramesVector(input_frames.get(), observer.output_frames()); + + call->DestroyVideoSendStream(send_stream_); +} + +void ExpectEqualFrames(const I420VideoFrame& frame1, + const I420VideoFrame& frame2) { + if (frame1.native_handle() != NULL || frame2.native_handle() != NULL) + ExpectEqualTextureFrames(frame1, frame2); + else + ExpectEqualBufferFrames(frame1, frame2); +} + +void ExpectEqualTextureFrames(const I420VideoFrame& frame1, + const I420VideoFrame& frame2) { + EXPECT_EQ(frame1.native_handle(), frame2.native_handle()); + EXPECT_EQ(frame1.width(), frame2.width()); + EXPECT_EQ(frame1.height(), frame2.height()); +} + +void ExpectEqualBufferFrames(const I420VideoFrame& frame1, + const I420VideoFrame& frame2) { + EXPECT_EQ(frame1.width(), frame2.width()); + EXPECT_EQ(frame1.height(), frame2.height()); + EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane)); + EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane)); + EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane)); + EXPECT_EQ(frame1.ntp_time_ms(), frame2.ntp_time_ms()); + ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane)); + EXPECT_EQ(0, + memcmp(frame1.buffer(kYPlane), + frame2.buffer(kYPlane), + frame1.allocated_size(kYPlane))); + ASSERT_EQ(frame1.allocated_size(kUPlane), frame2.allocated_size(kUPlane)); + EXPECT_EQ(0, + memcmp(frame1.buffer(kUPlane), + frame2.buffer(kUPlane), + frame1.allocated_size(kUPlane))); + ASSERT_EQ(frame1.allocated_size(kVPlane), frame2.allocated_size(kVPlane)); + EXPECT_EQ(0, + memcmp(frame1.buffer(kVPlane), + frame2.buffer(kVPlane), + frame1.allocated_size(kVPlane))); +} + +void ExpectEqualFramesVector(const std::vector& frames1, + const std::vector& frames2) { + EXPECT_EQ(frames1.size(), frames2.size()); + for (size_t i = 0; i < std::min(frames1.size(), frames2.size()); ++i) + ExpectEqualFrames(*frames1[i], *frames2[i]); +} + +I420VideoFrame* CreateI420VideoFrame(int width, int height, uint8_t data) { + I420VideoFrame* frame = new I420VideoFrame(); + const int kSizeY = width * height * 2; + const int kSizeUV = width * height; + scoped_ptr buffer(new uint8_t[kSizeY]); + memset(buffer.get(), data, kSizeY); + frame->CreateFrame(kSizeY, + buffer.get(), + kSizeUV, + buffer.get(), + kSizeUV, + buffer.get(), + width, + height, + width, + width / 2, + width / 2); + frame->set_timestamp(data); + frame->set_ntp_time_ms(data); + frame->set_render_time_ms(data); + return frame; +} + } // namespace webrtc diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc index 40a61deb6d..a009c06a67 100644 --- a/webrtc/video_engine/vie_encoder.cc +++ b/webrtc/video_engine/vie_encoder.cc @@ -487,10 +487,6 @@ void ViEEncoder::DeliverFrame(int id, } encoder_paused_and_dropped_frame_ = false; } - if (video_frame->native_handle() != NULL) { - // TODO(wuchengli): add texture support. http://crbug.com/362437 - return; - } // Convert render time, in ms, to RTP timestamp. const int kMsToRtpTimestamp = 90; @@ -501,22 +497,6 @@ void ViEEncoder::DeliverFrame(int id, TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame->render_time_ms(), "Encode"); video_frame->set_timestamp(time_stamp); - { - CriticalSectionScoped cs(callback_cs_.get()); - if (effect_filter_) { - unsigned int length = CalcBufferSize(kI420, - video_frame->width(), - video_frame->height()); - scoped_ptr video_buffer(new uint8_t[length]); - ExtractBuffer(*video_frame, length, video_buffer.get()); - effect_filter_->Transform(length, - video_buffer.get(), - video_frame->ntp_time_ms(), - video_frame->timestamp(), - video_frame->width(), - video_frame->height()); - } - } // Make sure the CSRC list is correct. if (num_csrcs > 0) { @@ -530,17 +510,37 @@ void ViEEncoder::DeliverFrame(int id, } default_rtp_rtcp_->SetCSRCs(tempCSRC, (uint8_t) num_csrcs); } - // Pass frame via preprocessor. + I420VideoFrame* decimated_frame = NULL; - const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame); - if (ret == 1) { - // Drop this frame. - return; + // TODO(wuchengli): support texture frames. + if (video_frame->native_handle() == NULL) { + { + CriticalSectionScoped cs(callback_cs_.get()); + if (effect_filter_) { + unsigned int length = + CalcBufferSize(kI420, video_frame->width(), video_frame->height()); + scoped_ptr video_buffer(new uint8_t[length]); + ExtractBuffer(*video_frame, length, video_buffer.get()); + effect_filter_->Transform(length, + video_buffer.get(), + video_frame->ntp_time_ms(), + video_frame->timestamp(), + video_frame->width(), + video_frame->height()); + } + } + + // Pass frame via preprocessor. + const int ret = vpm_.PreprocessFrame(*video_frame, &decimated_frame); + if (ret == 1) { + // Drop this frame. + return; + } + if (ret != VPM_OK) { + return; + } } - if (ret != VPM_OK) { - return; - } - // Frame was not sampled => use original. + // If the frame was not resampled or scaled => use original. if (decimated_frame == NULL) { decimated_frame = video_frame; } @@ -551,6 +551,11 @@ void ViEEncoder::DeliverFrame(int id, pre_encode_callback_->FrameCallback(decimated_frame); } + if (video_frame->native_handle() != NULL) { + // TODO(wuchengli): add texture support. http://crbug.com/362437 + return; + } + #ifdef VIDEOCODEC_VP8 if (vcm_.SendCodec() == webrtc::kVideoCodecVP8) { webrtc::CodecSpecificInfo codec_specific_info; diff --git a/webrtc/video_engine/vie_frame_provider_base.cc b/webrtc/video_engine/vie_frame_provider_base.cc index 3dceb17005..a5cd838e2b 100644 --- a/webrtc/video_engine/vie_frame_provider_base.cc +++ b/webrtc/video_engine/vie_frame_provider_base.cc @@ -55,18 +55,22 @@ void ViEFrameProviderBase::DeliverFrame( // Deliver the frame to all registered callbacks. if (frame_callbacks_.size() > 0) { - if (frame_callbacks_.size() == 1 || video_frame->native_handle() != NULL) { + if (frame_callbacks_.size() == 1) { // We don't have to copy the frame. frame_callbacks_.front()->DeliverFrame(id_, video_frame, num_csrcs, CSRC); } else { - // Make a copy of the frame for all callbacks.callback for (FrameCallbacks::iterator it = frame_callbacks_.begin(); it != frame_callbacks_.end(); ++it) { - if (!extra_frame_.get()) { - extra_frame_.reset(new I420VideoFrame()); + if (video_frame->native_handle() != NULL) { + (*it)->DeliverFrame(id_, video_frame, num_csrcs, CSRC); + } else { + // Make a copy of the frame for all callbacks. + if (!extra_frame_.get()) { + extra_frame_.reset(new I420VideoFrame()); + } + extra_frame_->CopyFrame(*video_frame); + (*it)->DeliverFrame(id_, extra_frame_.get(), num_csrcs, CSRC); } - extra_frame_->CopyFrame(*video_frame); - (*it)->DeliverFrame(id_, extra_frame_.get(), num_csrcs, CSRC); } } }