diff --git a/webrtc/api/java/jni/androidmediaencoder_jni.cc b/webrtc/api/java/jni/androidmediaencoder_jni.cc index d9d7a809b8..1530f003dc 100644 --- a/webrtc/api/java/jni/androidmediaencoder_jni.cc +++ b/webrtc/api/java/jni/androidmediaencoder_jni.cc @@ -666,7 +666,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( quality_scaler_.GetScaledResolution(); if (scaled_resolution.width != frame.width() || scaled_resolution.height != frame.height()) { - if (frame.native_handle() != nullptr) { + if (frame.video_frame_buffer()->native_handle() != nullptr) { rtc::scoped_refptr scaled_buffer( static_cast( frame.video_frame_buffer().get())->ScaleAndRotate( @@ -691,7 +691,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread( const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; bool encode_status = true; - if (!input_frame.native_handle()) { + if (!input_frame.video_frame_buffer()->native_handle()) { int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); CHECK_EXCEPTION(jni); @@ -741,7 +741,8 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread( const webrtc::VideoFrame& frame) { RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); - const bool is_texture_frame = frame.native_handle() != nullptr; + const bool is_texture_frame = + frame.video_frame_buffer()->native_handle() != nullptr; const bool reconfigure_due_to_format = is_texture_frame != use_surface_; const bool reconfigure_due_to_size = frame.width() != width_ || frame.height() != height_; @@ -802,8 +803,8 @@ bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni, bool key_frame, const webrtc::VideoFrame& frame) { RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); RTC_CHECK(use_surface_); - NativeHandleImpl* handle = - static_cast(frame.native_handle()); + NativeHandleImpl* handle = static_cast( + frame.video_frame_buffer()->native_handle()); jfloatArray sampling_matrix = jni->NewFloatArray(16); jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix); @@ -1256,4 +1257,3 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( } } // namespace webrtc_jni - diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc index 9b2bdd762d..b2f59eaae8 100644 --- a/webrtc/common_video/i420_video_frame_unittest.cc +++ b/webrtc/common_video/i420_video_frame_unittest.cc @@ -246,7 +246,8 @@ TEST(TestVideoFrame, TextureInitialValues) { EXPECT_EQ(480, frame.height()); EXPECT_EQ(100u, frame.timestamp()); EXPECT_EQ(10, frame.render_time_ms()); - EXPECT_EQ(handle, frame.native_handle()); + ASSERT_TRUE(frame.video_frame_buffer() != nullptr); + EXPECT_EQ(handle, frame.video_frame_buffer()->native_handle()); frame.set_timestamp(200); EXPECT_EQ(200u, frame.timestamp()); diff --git a/webrtc/common_video/video_frame.cc b/webrtc/common_video/video_frame.cc index a30f658ea0..28e463112c 100644 --- a/webrtc/common_video/video_frame.cc +++ b/webrtc/common_video/video_frame.cc @@ -113,22 +113,12 @@ void VideoFrame::CreateFrame(const uint8_t* buffer, } void VideoFrame::CopyFrame(const VideoFrame& videoFrame) { - if (videoFrame.IsZeroSize()) { - video_frame_buffer_ = nullptr; - } else if (videoFrame.native_handle()) { - video_frame_buffer_ = videoFrame.video_frame_buffer(); - } else { - CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), - videoFrame.buffer(kVPlane), videoFrame.width(), - videoFrame.height(), videoFrame.stride(kYPlane), - videoFrame.stride(kUPlane), videoFrame.stride(kVPlane), - kVideoRotation_0); - } + ShallowCopy(videoFrame); - timestamp_ = videoFrame.timestamp_; - ntp_time_ms_ = videoFrame.ntp_time_ms_; - render_time_ms_ = videoFrame.render_time_ms_; - rotation_ = videoFrame.rotation_; + // If backed by a plain memory buffer, create a new, non-shared, copy. + if (video_frame_buffer_ && !video_frame_buffer_->native_handle()) { + video_frame_buffer_ = I420Buffer::Copy(video_frame_buffer_); + } } void VideoFrame::ShallowCopy(const VideoFrame& videoFrame) { @@ -177,10 +167,6 @@ bool VideoFrame::IsZeroSize() const { return !video_frame_buffer_; } -void* VideoFrame::native_handle() const { - return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; -} - rtc::scoped_refptr VideoFrame::video_frame_buffer() const { return video_frame_buffer_; } @@ -191,7 +177,7 @@ void VideoFrame::set_video_frame_buffer( } VideoFrame VideoFrame::ConvertNativeToI420Frame() const { - RTC_DCHECK(native_handle()); + RTC_DCHECK(video_frame_buffer_->native_handle()); VideoFrame frame; frame.ShallowCopy(*this); frame.set_video_frame_buffer(video_frame_buffer_->NativeToI420Buffer()); diff --git a/webrtc/modules/video_coding/video_sender.cc b/webrtc/modules/video_coding/video_sender.cc index bbf6480c03..3a1debae52 100644 --- a/webrtc/modules/video_coding/video_sender.cc +++ b/webrtc/modules/video_coding/video_sender.cc @@ -315,7 +315,8 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame, return VCM_PARAMETER_ERROR; } VideoFrame converted_frame = videoFrame; - if (converted_frame.native_handle() && !_encoder->SupportsNativeHandle()) { + if (converted_frame.video_frame_buffer()->native_handle() && + !_encoder->SupportsNativeHandle()) { // This module only supports software encoding. // TODO(pbos): Offload conversion from the encoder thread. converted_frame = converted_frame.ConvertNativeToI420Frame(); diff --git a/webrtc/test/frame_utils.cc b/webrtc/test/frame_utils.cc index 0f41144745..21daa44f97 100644 --- a/webrtc/test/frame_utils.cc +++ b/webrtc/test/frame_utils.cc @@ -16,53 +16,60 @@ namespace test { bool EqualPlane(const uint8_t* data1, const uint8_t* data2, - int stride, + int stride1, + int stride2, int width, int height) { for (int y = 0; y < height; ++y) { if (memcmp(data1, data2, width) != 0) return false; - data1 += stride; - data2 += stride; + data1 += stride1; + data2 += stride2; } return true; } + bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2) { - if (f1.width() != f2.width() || f1.height() != f2.height() || - f1.stride(webrtc::kYPlane) != f2.stride(webrtc::kYPlane) || - f1.stride(webrtc::kUPlane) != f2.stride(webrtc::kUPlane) || - f1.stride(webrtc::kVPlane) != f2.stride(webrtc::kVPlane) || - f1.timestamp() != f2.timestamp() || + if (f1.timestamp() != f2.timestamp() || f1.ntp_time_ms() != f2.ntp_time_ms() || f1.render_time_ms() != f2.render_time_ms()) { return false; } - const int half_width = (f1.width() + 1) / 2; - const int half_height = (f1.height() + 1) / 2; - return EqualPlane(f1.buffer(webrtc::kYPlane), f2.buffer(webrtc::kYPlane), - f1.stride(webrtc::kYPlane), f1.width(), f1.height()) && - EqualPlane(f1.buffer(webrtc::kUPlane), f2.buffer(webrtc::kUPlane), - f1.stride(webrtc::kUPlane), half_width, half_height) && - EqualPlane(f1.buffer(webrtc::kVPlane), f2.buffer(webrtc::kVPlane), - f1.stride(webrtc::kVPlane), half_width, half_height); + return FrameBufsEqual(f1.video_frame_buffer(), f2.video_frame_buffer()); } bool FrameBufsEqual(const rtc::scoped_refptr& f1, const rtc::scoped_refptr& f2) { - if (f1->width() != f2->width() || f1->height() != f2->height() || - f1->stride(webrtc::kYPlane) != f2->stride(webrtc::kYPlane) || - f1->stride(webrtc::kUPlane) != f2->stride(webrtc::kUPlane) || - f1->stride(webrtc::kVPlane) != f2->stride(webrtc::kVPlane)) { + if (f1 == f2) { + return true; + } + // Exlude nullptr (except if both are nullptr, as above) + if (!f1 || !f2) { + return false; + } + + if (f1->width() != f2->width() || f1->height() != f2->height()) { + return false; + } + // Exclude native handle + if (f1->native_handle()) { + return f1->native_handle() == f2->native_handle(); + } + + if (f2->native_handle()) { return false; } const int half_width = (f1->width() + 1) / 2; const int half_height = (f1->height() + 1) / 2; return EqualPlane(f1->data(webrtc::kYPlane), f2->data(webrtc::kYPlane), - f1->stride(webrtc::kYPlane), f1->width(), f1->height()) && + f1->stride(webrtc::kYPlane), f2->stride(webrtc::kYPlane), + f1->width(), f1->height()) && EqualPlane(f1->data(webrtc::kUPlane), f2->data(webrtc::kUPlane), - f1->stride(webrtc::kUPlane), half_width, half_height) && + f1->stride(webrtc::kUPlane), f2->stride(webrtc::kUPlane), + half_width, half_height) && EqualPlane(f1->data(webrtc::kVPlane), f2->data(webrtc::kVPlane), - f1->stride(webrtc::kVPlane), half_width, half_height); + f1->stride(webrtc::kVPlane), f2->stride(webrtc::kVPlane), + half_width, half_height); } } // namespace test diff --git a/webrtc/test/frame_utils.h b/webrtc/test/frame_utils.h index 668d9994ab..aef3c9ff2a 100644 --- a/webrtc/test/frame_utils.h +++ b/webrtc/test/frame_utils.h @@ -20,10 +20,19 @@ namespace test { bool EqualPlane(const uint8_t* data1, const uint8_t* data2, - int stride, + int stride1, + int stride2, int width, int height); +static inline bool EqualPlane(const uint8_t* data1, + const uint8_t* data2, + int stride, + int width, + int height) { + return EqualPlane(data1, data2, stride, stride, width, height); +} + bool FramesEqual(const webrtc::VideoFrame& f1, const webrtc::VideoFrame& f2); bool FrameBufsEqual(const rtc::scoped_refptr& f1, diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc index b36c2577f9..22a0f9d0bd 100644 --- a/webrtc/video/video_capture_input_unittest.cc +++ b/webrtc/video/video_capture_input_unittest.cc @@ -16,6 +16,7 @@ #include "webrtc/base/event.h" #include "webrtc/base/refcount.h" #include "webrtc/test/fake_texture_frame.h" +#include "webrtc/test/frame_utils.h" #include "webrtc/video/send_statistics_proxy.h" // If an output frame does not arrive in 500ms, the test will fail. @@ -23,9 +24,6 @@ namespace webrtc { -bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2); -bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2); -bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2); bool EqualFramesVector(const std::vector>& frames1, const std::vector>& frames2); std::unique_ptr CreateVideoFrame(uint8_t length); @@ -54,7 +52,8 @@ class VideoCaptureInputTest : public ::testing::Test { EXPECT_TRUE(capture_event_.Wait(FRAME_TIMEOUT_MS)); VideoFrame frame; EXPECT_TRUE(input_->GetVideoFrame(&frame)); - if (!frame.native_handle()) { + ASSERT_TRUE(frame.video_frame_buffer()); + if (!frame.video_frame_buffer()->native_handle()) { output_frame_ybuffers_.push_back( static_cast(&frame)->buffer(kYPlane)); } @@ -168,7 +167,9 @@ TEST_F(VideoCaptureInputTest, TestTextureFrames) { i + 1, webrtc::kVideoRotation_0)))); AddInputFrame(input_frames_[i].get()); WaitOutputFrame(); - EXPECT_EQ(dummy_handle, output_frames_[i]->native_handle()); + ASSERT_TRUE(output_frames_[i]->video_frame_buffer()); + EXPECT_EQ(dummy_handle, + output_frames_[i]->video_frame_buffer()->native_handle()); } EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); @@ -198,7 +199,9 @@ TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) { dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)))); AddInputFrame(input_frames_[0].get()); WaitOutputFrame(); - EXPECT_EQ(dummy_handle, output_frames_[0]->native_handle()); + ASSERT_TRUE(output_frames_[0]->video_frame_buffer()); + EXPECT_EQ(dummy_handle, + output_frames_[0]->video_frame_buffer()->native_handle()); input_frames_.push_back(CreateVideoFrame(2)); AddInputFrame(input_frames_[1].get()); @@ -222,43 +225,17 @@ TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) { EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_)); } -bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { - if (frame1.native_handle() || frame2.native_handle()) - return EqualTextureFrames(frame1, frame2); - return EqualBufferFrames(frame1, frame2); -} - -bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) { - return ((frame1.native_handle() == frame2.native_handle()) && - (frame1.width() == frame2.width()) && - (frame1.height() == frame2.height())); -} - -bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) { - return ((frame1.width() == frame2.width()) && - (frame1.height() == frame2.height()) && - (frame1.stride(kYPlane) == frame2.stride(kYPlane)) && - (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && - (frame1.stride(kVPlane) == frame2.stride(kVPlane)) && - (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && - (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && - (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) && - (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), - frame1.allocated_size(kYPlane)) == 0) && - (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), - frame1.allocated_size(kUPlane)) == 0) && - (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), - frame1.allocated_size(kVPlane)) == 0)); -} - bool EqualFramesVector( const std::vector>& frames1, const std::vector>& frames2) { if (frames1.size() != frames2.size()) return false; for (size_t i = 0; i < frames1.size(); ++i) { - if (!EqualFrames(*frames1[i], *frames2[i])) + // Compare frame buffers, since we don't care about differing timestamps. + if (!test::FrameBufsEqual(frames1[i]->video_frame_buffer(), + frames2[i]->video_frame_buffer())) { return false; + } } return true; } diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc index 3435ae5ff3..a34544d448 100644 --- a/webrtc/video/video_receive_stream.cc +++ b/webrtc/video/video_receive_stream.cc @@ -376,7 +376,7 @@ void VideoReceiveStream::FrameCallback(VideoFrame* video_frame) { stats_proxy_.OnDecodedFrame(); // Post processing is not supported if the frame is backed by a texture. - if (!video_frame->native_handle()) { + if (!video_frame->video_frame_buffer()->native_handle()) { if (config_.pre_render_callback) config_.pre_render_callback->FrameCallback(video_frame); } diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index 53f72de502..57a3420e45 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -32,6 +32,7 @@ #include "webrtc/test/call_test.h" #include "webrtc/test/configurable_frame_size_encoder.h" #include "webrtc/test/fake_texture_frame.h" +#include "webrtc/test/frame_utils.h" #include "webrtc/test/null_transport.h" #include "webrtc/test/testsupport/perf_test.h" #include "webrtc/video/send_statistics_proxy.h" @@ -42,11 +43,6 @@ namespace webrtc { enum VideoFormat { kGeneric, kVP8, }; -void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2); -void ExpectEqualTextureFrames(const VideoFrame& frame1, - const VideoFrame& frame2); -void ExpectEqualBufferFrames(const VideoFrame& frame1, - const VideoFrame& frame2); void ExpectEqualFramesVector(const std::vector& frames1, const std::vector& frames2); VideoFrame CreateVideoFrame(int width, int height, uint8_t data); @@ -1244,49 +1240,13 @@ TEST_F(VideoSendStreamTest, CapturesTextureAndVideoFrames) { DestroyStreams(); } -void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { - if (frame1.native_handle() || frame2.native_handle()) - ExpectEqualTextureFrames(frame1, frame2); - else - ExpectEqualBufferFrames(frame1, frame2); -} - -void ExpectEqualTextureFrames(const VideoFrame& frame1, - const VideoFrame& frame2) { - EXPECT_EQ(frame1.native_handle(), frame2.native_handle()); - EXPECT_EQ(frame1.width(), frame2.width()); - EXPECT_EQ(frame1.height(), frame2.height()); -} - -void ExpectEqualBufferFrames(const VideoFrame& frame1, - const VideoFrame& frame2) { - EXPECT_EQ(frame1.width(), frame2.width()); - EXPECT_EQ(frame1.height(), frame2.height()); - EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane)); - EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane)); - EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane)); - ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane)); - EXPECT_EQ(0, - memcmp(frame1.buffer(kYPlane), - frame2.buffer(kYPlane), - frame1.allocated_size(kYPlane))); - ASSERT_EQ(frame1.allocated_size(kUPlane), frame2.allocated_size(kUPlane)); - EXPECT_EQ(0, - memcmp(frame1.buffer(kUPlane), - frame2.buffer(kUPlane), - frame1.allocated_size(kUPlane))); - ASSERT_EQ(frame1.allocated_size(kVPlane), frame2.allocated_size(kVPlane)); - EXPECT_EQ(0, - memcmp(frame1.buffer(kVPlane), - frame2.buffer(kVPlane), - frame1.allocated_size(kVPlane))); -} - void ExpectEqualFramesVector(const std::vector& frames1, const std::vector& frames2) { EXPECT_EQ(frames1.size(), frames2.size()); for (size_t i = 0; i < std::min(frames1.size(), frames2.size()); ++i) - ExpectEqualFrames(frames1[i], frames2[i]); + // Compare frame buffers, since we don't care about differing timestamps. + EXPECT_TRUE(test::FrameBufsEqual(frames1[i].video_frame_buffer(), + frames2[i].video_frame_buffer())); } VideoFrame CreateVideoFrame(int width, int height, uint8_t data) { diff --git a/webrtc/video/vie_encoder.cc b/webrtc/video/vie_encoder.cc index 6e87b353a7..e7d3539f9e 100644 --- a/webrtc/video/vie_encoder.cc +++ b/webrtc/video/vie_encoder.cc @@ -334,7 +334,7 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { "Encode"); const VideoFrame* frame_to_send = &video_frame; // TODO(wuchengli): support texture frames. - if (!video_frame.native_handle()) { + if (!video_frame.video_frame_buffer()->native_handle()) { // Pass frame via preprocessor. frame_to_send = vp_->PreprocessFrame(video_frame); if (!frame_to_send) { diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h index 28a6b8716d..b5e13f039b 100644 --- a/webrtc/video_frame.h +++ b/webrtc/video_frame.h @@ -125,12 +125,8 @@ class VideoFrame { // Return true if underlying plane buffers are of zero size, false if not. bool IsZeroSize() const; - // Return the handle of the underlying video frame. This is used when the - // frame is backed by a texture. The object should be destroyed when it is no - // longer in use, so the underlying resource can be freed. - void* native_handle() const; - - // Return the underlying buffer. + // Return the underlying buffer. Never nullptr for a properly + // initialized VideoFrame. rtc::scoped_refptr video_frame_buffer() const; // Set the underlying buffer.