From b05f994bb6f3055c852891c8acb531aee916a668 Mon Sep 17 00:00:00 2001 From: terelius Date: Mon, 25 Apr 2016 11:41:47 -0700 Subject: [PATCH] Revert of Delete cricket::VideoFrame methods GetYPlane and GetYPitch. (patchset #5 id:80001 of https://codereview.webrtc.org/1901973002/ ) Reason for revert: GetYPlane, GetYPitch etc is used by Chromium. Original issue's description: > Delete cricket::VideoFrame methods GetYPlane and GetYPitch. > > (And similarly for U and V). Also change video_frame_buffer method to > return a const ref to a scoped_ref_ptr. > > This cl is analogous to https://codereview.webrtc.org/1900673002/, > which delete corresponding methods in webrtc::VideoFrame. > > BUG=webrtc:5682 > > Committed: https://crrev.com/1c27c6bf4cf0476dd2f09425509afaae4cdfe599 > Cr-Commit-Position: refs/heads/master@{#12492} TBR=magjed@webrtc.org,perkj@webrtc.org,pbos@webrtc.org,pthatcher@webrtc.org,nisse@webrtc.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=webrtc:5682 Review URL: https://codereview.webrtc.org/1921493004 Cr-Commit-Position: refs/heads/master@{#12494} --- talk/app/webrtc/objc/RTCI420Frame.mm | 27 +-- talk/app/webrtc/objc/public/RTCI420Frame.h | 2 - webrtc/api/java/jni/peerconnection_jni.cc | 19 +- webrtc/media/base/fakevideorenderer.h | 14 +- .../media/base/videobroadcaster_unittest.cc | 4 +- webrtc/media/base/videoframe.cc | 58 ++--- webrtc/media/base/videoframe.h | 16 +- webrtc/media/base/videoframe_unittest.h | 210 +++++++----------- webrtc/media/engine/webrtcvideoframe.cc | 63 ++++-- webrtc/media/engine/webrtcvideoframe.h | 11 +- 10 files changed, 204 insertions(+), 220 deletions(-) diff --git a/talk/app/webrtc/objc/RTCI420Frame.mm b/talk/app/webrtc/objc/RTCI420Frame.mm index 6c6c564582..aacdfe33b2 100644 --- a/talk/app/webrtc/objc/RTCI420Frame.mm +++ b/talk/app/webrtc/objc/RTCI420Frame.mm @@ -55,39 +55,30 @@ } - (const uint8_t*)yPlane { - const rtc::scoped_refptr& buffer = - _videoFrame->video_frame_buffer(); - return buffer ? buffer->DataY() : nullptr; + const cricket::VideoFrame* const_frame = _videoFrame.get(); + return const_frame->GetYPlane(); } - (const uint8_t*)uPlane { - const rtc::scoped_refptr& buffer = - _videoFrame->video_frame_buffer(); - return buffer ? buffer->DataU() : nullptr; + const cricket::VideoFrame* const_frame = _videoFrame.get(); + return const_frame->GetUPlane(); } - (const uint8_t*)vPlane { - const rtc::scoped_refptr& buffer = - _videoFrame->video_frame_buffer(); - return buffer ? buffer->DataV() : nullptr; + const cricket::VideoFrame* const_frame = _videoFrame.get(); + return const_frame->GetVPlane(); } - (NSInteger)yPitch { - const rtc::scoped_refptr& buffer = - _videoFrame->video_frame_buffer(); - return buffer ? buffer->StrideY() : 0; + return _videoFrame->GetYPitch(); } - (NSInteger)uPitch { - const rtc::scoped_refptr& buffer = - _videoFrame->video_frame_buffer(); - return buffer ? buffer->StrideU() : 0; + return _videoFrame->GetUPitch(); } - (NSInteger)vPitch { - const rtc::scoped_refptr& buffer = - _videoFrame->video_frame_buffer(); - return buffer ? buffer->StrideV() : 0; + return _videoFrame->GetVPitch(); } @end diff --git a/talk/app/webrtc/objc/public/RTCI420Frame.h b/talk/app/webrtc/objc/public/RTCI420Frame.h index 1d145ee72a..890d4aa629 100644 --- a/talk/app/webrtc/objc/public/RTCI420Frame.h +++ b/talk/app/webrtc/objc/public/RTCI420Frame.h @@ -28,8 +28,6 @@ #import // RTCI420Frame is an ObjectiveC version of cricket::VideoFrame. -// TODO(nisse): It appears it doesn't support any VideoFrame methods, -// so let it wrap an webrtc::VideoFrameBuffer instead? @interface RTCI420Frame : NSObject @property(nonatomic, readonly) NSUInteger width; diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc index 38d2ff37e7..fdd2d44bae 100644 --- a/webrtc/api/java/jni/peerconnection_jni.cc +++ b/webrtc/api/java/jni/peerconnection_jni.cc @@ -763,23 +763,20 @@ class JavaVideoRendererWrapper jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) { jintArray strides = jni()->NewIntArray(3); jint* strides_array = jni()->GetIntArrayElements(strides, NULL); - strides_array[0] = frame->video_frame_buffer()->StrideY(); - strides_array[1] = frame->video_frame_buffer()->StrideU(); - strides_array[2] = frame->video_frame_buffer()->StrideV(); + strides_array[0] = frame->GetYPitch(); + strides_array[1] = frame->GetUPitch(); + strides_array[2] = frame->GetVPitch(); jni()->ReleaseIntArrayElements(strides, strides_array, 0); jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL); - jobject y_buffer = jni()->NewDirectByteBuffer( - const_cast(frame->video_frame_buffer()->DataY()), - frame->video_frame_buffer()->StrideY() * - frame->video_frame_buffer()->height()); + jobject y_buffer = + jni()->NewDirectByteBuffer(const_cast(frame->GetYPlane()), + frame->GetYPitch() * frame->GetHeight()); size_t chroma_size = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2); jobject u_buffer = jni()->NewDirectByteBuffer( - const_cast(frame->video_frame_buffer()->DataU()), - chroma_size); + const_cast(frame->GetUPlane()), chroma_size); jobject v_buffer = jni()->NewDirectByteBuffer( - const_cast(frame->video_frame_buffer()->DataV()), - chroma_size); + const_cast(frame->GetVPlane()), chroma_size); jni()->SetObjectArrayElement(planes, 0, y_buffer); jni()->SetObjectArrayElement(planes, 1, u_buffer); jni()->SetObjectArrayElement(planes, 2, v_buffer); diff --git a/webrtc/media/base/fakevideorenderer.h b/webrtc/media/base/fakevideorenderer.h index 7398bbaa8b..51f1f90ee8 100644 --- a/webrtc/media/base/fakevideorenderer.h +++ b/webrtc/media/base/fakevideorenderer.h @@ -82,15 +82,15 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface { uint8_t v_min, uint8_t v_max, const cricket::VideoFrame* frame) { - if (!frame || !frame->video_frame_buffer()) { + if (!frame) { return false; } // Y int y_width = frame->width(); int y_height = frame->height(); - const uint8_t* y_plane = frame->video_frame_buffer()->DataY(); + const uint8_t* y_plane = frame->GetYPlane(); const uint8_t* y_pos = y_plane; - int32_t y_pitch = frame->video_frame_buffer()->StrideY(); + int32_t y_pitch = frame->GetYPitch(); for (int i = 0; i < y_height; ++i) { for (int j = 0; j < y_width; ++j) { uint8_t y_value = *(y_pos + j); @@ -103,12 +103,12 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface { // U and V int chroma_width = (frame->width() + 1)/2; int chroma_height = (frame->height() + 1)/2; - const uint8_t* u_plane = frame->video_frame_buffer()->DataU(); - const uint8_t* v_plane = frame->video_frame_buffer()->DataV(); + const uint8_t* u_plane = frame->GetUPlane(); + const uint8_t* v_plane = frame->GetVPlane(); const uint8_t* u_pos = u_plane; const uint8_t* v_pos = v_plane; - int32_t u_pitch = frame->video_frame_buffer()->StrideU(); - int32_t v_pitch = frame->video_frame_buffer()->StrideV(); + int32_t u_pitch = frame->GetUPitch(); + int32_t v_pitch = frame->GetVPitch(); for (int i = 0; i < chroma_height; ++i) { for (int j = 0; j < chroma_width; ++j) { uint8_t u_value = *(u_pos + j); diff --git a/webrtc/media/base/videobroadcaster_unittest.cc b/webrtc/media/base/videobroadcaster_unittest.cc index e4e6d32564..0299d0ee36 100644 --- a/webrtc/media/base/videobroadcaster_unittest.cc +++ b/webrtc/media/base/videobroadcaster_unittest.cc @@ -137,7 +137,7 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) { cricket::WebRtcVideoFrame frame1; frame1.InitToBlack(100, 200, 10000 /*ts*/); // Make it not all-black - frame1.video_frame_buffer()->MutableDataU()[0] = 0; + frame1.GetUPlane()[0] = 0; broadcaster.OnFrame(frame1); EXPECT_TRUE(sink1.black_frame()); EXPECT_EQ(10000, sink1.timestamp()); @@ -153,7 +153,7 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) { cricket::WebRtcVideoFrame frame2; frame2.InitToBlack(100, 200, 30000 /*ts*/); // Make it not all-black - frame2.video_frame_buffer()->MutableDataU()[0] = 0; + frame2.GetUPlane()[0] = 0; broadcaster.OnFrame(frame2); EXPECT_FALSE(sink1.black_frame()); EXPECT_EQ(30000, sink1.timestamp()); diff --git a/webrtc/media/base/videoframe.cc b/webrtc/media/base/videoframe.cc index d5c24adf3c..be63157d7b 100644 --- a/webrtc/media/base/videoframe.cc +++ b/webrtc/media/base/videoframe.cc @@ -31,17 +31,15 @@ bool VideoFrame::CopyToPlanes(uint8_t* dst_y, int32_t dst_pitch_y, int32_t dst_pitch_u, int32_t dst_pitch_v) const { - const rtc::scoped_refptr& buffer = - video_frame_buffer(); - if (!buffer) { - LOG(LS_ERROR) << "NULL video buffer."; + if (!GetYPlane() || !GetUPlane() || !GetVPlane()) { + LOG(LS_ERROR) << "NULL plane pointer."; return false; } int32_t src_width = width(); int32_t src_height = height(); - return libyuv::I420Copy(buffer->DataY(), buffer->StrideY(), - buffer->DataU(), buffer->StrideU(), - buffer->DataV(), buffer->StrideV(), + return libyuv::I420Copy(GetYPlane(), GetYPitch(), + GetUPlane(), GetUPitch(), + GetVPlane(), GetVPitch(), dst_y, dst_pitch_y, dst_u, dst_pitch_u, dst_v, dst_pitch_v, @@ -58,11 +56,9 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, return needed; } - if (libyuv::ConvertFromI420( - video_frame_buffer()->DataY(), video_frame_buffer()->StrideY(), - video_frame_buffer()->DataU(), video_frame_buffer()->StrideU(), - video_frame_buffer()->DataV(), video_frame_buffer()->StrideV(), - buffer, stride_rgb, width(), height(), to_fourcc)) { + if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(), + GetUPitch(), GetVPlane(), GetVPitch(), buffer, + stride_rgb, width(), height(), to_fourcc)) { LOG(LS_ERROR) << "RGB type not supported: " << to_fourcc; return 0; // 0 indicates error } @@ -82,8 +78,8 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y, size_t dst_height, bool interpolate, bool vert_crop) const { - if (!video_frame_buffer()) { - LOG(LS_ERROR) << "NULL frame buffer."; + if (!GetYPlane() || !GetUPlane() || !GetVPlane()) { + LOG(LS_ERROR) << "NULL plane pointer."; return; } @@ -93,9 +89,9 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y, CopyToPlanes(dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v); return; } - const uint8_t* src_y = video_frame_buffer()->DataY(); - const uint8_t* src_u = video_frame_buffer()->DataU(); - const uint8_t* src_v = video_frame_buffer()->DataV(); + const uint8_t* src_y = GetYPlane(); + const uint8_t* src_u = GetUPlane(); + const uint8_t* src_v = GetVPlane(); if (vert_crop) { // Adjust the input width:height ratio to be the same as the output ratio. @@ -112,16 +108,15 @@ void VideoFrame::StretchToPlanes(uint8_t* dst_y, int32_t iheight_offset = static_cast((height() - src_height) >> 2); iheight_offset <<= 1; // Ensure that iheight_offset is even. - src_y += iheight_offset * video_frame_buffer()->StrideY(); - src_u += iheight_offset / 2 * video_frame_buffer()->StrideU(); - src_v += iheight_offset / 2 * video_frame_buffer()->StrideV(); + src_y += iheight_offset * GetYPitch(); + src_u += iheight_offset / 2 * GetUPitch(); + src_v += iheight_offset / 2 * GetVPitch(); } } // Scale to the output I420 frame. - libyuv::Scale(src_y, src_u, src_v, video_frame_buffer()->StrideY(), - video_frame_buffer()->StrideU(), - video_frame_buffer()->StrideV(), + libyuv::Scale(src_y, src_u, src_v, + GetYPitch(), GetUPitch(), GetVPitch(), static_cast(src_width), static_cast(src_height), dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v, static_cast(dst_width), static_cast(dst_height), @@ -135,12 +130,8 @@ void VideoFrame::StretchToFrame(VideoFrame* dst, return; } - StretchToPlanes(dst->video_frame_buffer()->MutableDataY(), - dst->video_frame_buffer()->MutableDataU(), - dst->video_frame_buffer()->MutableDataV(), - dst->video_frame_buffer()->StrideY(), - dst->video_frame_buffer()->StrideU(), - dst->video_frame_buffer()->StrideV(), + StretchToPlanes(dst->GetYPlane(), dst->GetUPlane(), dst->GetVPlane(), + dst->GetYPitch(), dst->GetUPitch(), dst->GetVPitch(), dst->width(), dst->height(), interpolate, vert_crop); dst->SetTimeStamp(GetTimeStamp()); @@ -160,12 +151,9 @@ VideoFrame* VideoFrame::Stretch(size_t dst_width, size_t dst_height, } bool VideoFrame::SetToBlack() { - return libyuv::I420Rect(video_frame_buffer()->MutableDataY(), - video_frame_buffer()->StrideY(), - video_frame_buffer()->MutableDataU(), - video_frame_buffer()->StrideU(), - video_frame_buffer()->MutableDataV(), - video_frame_buffer()->StrideV(), + return libyuv::I420Rect(GetYPlane(), GetYPitch(), + GetUPlane(), GetUPitch(), + GetVPlane(), GetVPitch(), 0, 0, width(), height(), 16, 128, 128) == 0; diff --git a/webrtc/media/base/videoframe.h b/webrtc/media/base/videoframe.h index 4026c26b3b..9e0fbfd6d4 100644 --- a/webrtc/media/base/videoframe.h +++ b/webrtc/media/base/videoframe.h @@ -35,6 +35,18 @@ class VideoFrame { virtual size_t GetWidth() const final { return width(); } virtual size_t GetHeight() const final { return height(); } + // These can return NULL if the object is not backed by a buffer. + virtual const uint8_t* GetYPlane() const = 0; + virtual const uint8_t* GetUPlane() const = 0; + virtual const uint8_t* GetVPlane() const = 0; + virtual uint8_t* GetYPlane() = 0; + virtual uint8_t* GetUPlane() = 0; + virtual uint8_t* GetVPlane() = 0; + + virtual int32_t GetYPitch() const = 0; + virtual int32_t GetUPitch() const = 0; + virtual int32_t GetVPitch() const = 0; + // Returns the handle of the underlying video frame. This is used when the // frame is backed by a texture. The object should be destroyed when it is no // longer in use, so the underlying resource can be freed. @@ -42,8 +54,8 @@ class VideoFrame { // Returns the underlying video frame buffer. This function is ok to call // multiple times, but the returned object will refer to the same memory. - virtual const rtc::scoped_refptr& - video_frame_buffer() const = 0; + virtual rtc::scoped_refptr video_frame_buffer() + const = 0; // System monotonic clock, same timebase as rtc::TimeMicros(). virtual int64_t timestamp_us() const = 0; diff --git a/webrtc/media/base/videoframe_unittest.h b/webrtc/media/base/videoframe_unittest.h index 6ceba833b5..9e9b7dddb1 100644 --- a/webrtc/media/base/videoframe_unittest.h +++ b/webrtc/media/base/videoframe_unittest.h @@ -267,27 +267,24 @@ class VideoFrameTest : public testing::Test { const uint8_t* start = reinterpret_cast(ms->GetBuffer()); int awidth = (width + 1) & ~1; frame->InitToBlack(width, height, 0); - int stride_y = frame->video_frame_buffer()->StrideY(); - int stride_u = frame->video_frame_buffer()->StrideU(); - int stride_v = frame->video_frame_buffer()->StrideV(); - uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY(); - uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU(); - uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV(); + int stride_y = frame->GetYPitch(); + int stride_u = frame->GetUPitch(); + int stride_v = frame->GetVPitch(); for (uint32_t y = 0; y < height; ++y) { for (uint32_t x = 0; x < width; x += 2) { const uint8_t* quad1 = start + (y * awidth + x) * 2; - plane_y[stride_y * y + x] = quad1[y1_pos]; + frame->GetYPlane()[stride_y * y + x] = quad1[y1_pos]; if ((x + 1) < width) { - plane_y[stride_y * y + x + 1] = quad1[y2_pos]; + frame->GetYPlane()[stride_y * y + x + 1] = quad1[y2_pos]; } if ((y & 1) == 0) { const uint8_t* quad2 = quad1 + awidth * 2; if ((y + 1) >= height) { quad2 = quad1; } - plane_u[stride_u * (y / 2) + x / 2] = + frame->GetUPlane()[stride_u * (y / 2) + x / 2] = (quad1[u_pos] + quad2[u_pos] + 1) / 2; - plane_v[stride_v * (y / 2) + x / 2] = + frame->GetVPlane()[stride_v * (y / 2) + x / 2] = (quad1[v_pos] + quad2[v_pos] + 1) / 2; } } @@ -314,12 +311,9 @@ class VideoFrameTest : public testing::Test { pitch = -pitch; } frame->InitToBlack(width, height, 0); - int stride_y = frame->video_frame_buffer()->StrideY(); - int stride_u = frame->video_frame_buffer()->StrideU(); - int stride_v = frame->video_frame_buffer()->StrideV(); - uint8_t* plane_y = frame->video_frame_buffer()->MutableDataY(); - uint8_t* plane_u = frame->video_frame_buffer()->MutableDataU(); - uint8_t* plane_v = frame->video_frame_buffer()->MutableDataV(); + int stride_y = frame->GetYPitch(); + int stride_u = frame->GetUPitch(); + int stride_v = frame->GetVPitch(); for (int32_t y = 0; y < height; y += 2) { for (int32_t x = 0; x < width; x += 2) { const uint8_t* rgb[4]; @@ -332,19 +326,19 @@ class VideoFrameTest : public testing::Test { ConvertRgbPixel(rgb[i][r_pos], rgb[i][g_pos], rgb[i][b_pos], &yuv[i][0], &yuv[i][1], &yuv[i][2]); } - plane_y[stride_y * y + x] = yuv[0][0]; + frame->GetYPlane()[stride_y * y + x] = yuv[0][0]; if ((x + 1) < width) { - plane_y[stride_y * y + x + 1] = yuv[1][0]; + frame->GetYPlane()[stride_y * y + x + 1] = yuv[1][0]; } if ((y + 1) < height) { - plane_y[stride_y * (y + 1) + x] = yuv[2][0]; + frame->GetYPlane()[stride_y * (y + 1) + x] = yuv[2][0]; if ((x + 1) < width) { - plane_y[stride_y * (y + 1) + x + 1] = yuv[3][0]; + frame->GetYPlane()[stride_y * (y + 1) + x + 1] = yuv[3][0]; } } - plane_u[stride_u * (y / 2) + x / 2] = + frame->GetUPlane()[stride_u * (y / 2) + x / 2] = (yuv[0][1] + yuv[1][1] + yuv[2][1] + yuv[3][1] + 2) / 4; - plane_v[stride_v * (y / 2) + x / 2] = + frame->GetVPlane()[stride_v * (y / 2) + x / 2] = (yuv[0][2] + yuv[1][2] + yuv[2][2] + yuv[3][2] + 2) / 4; } } @@ -401,15 +395,15 @@ class VideoFrameTest : public testing::Test { // Comparison functions for testing. static bool IsNull(const cricket::VideoFrame& frame) { - return !frame.video_frame_buffer(); + return !frame.GetYPlane(); } static bool IsSize(const cricket::VideoFrame& frame, int width, int height) { - return !IsNull(frame) && frame.video_frame_buffer()->StrideY() >= width && - frame.video_frame_buffer()->StrideU() >= width / 2 && - frame.video_frame_buffer()->StrideV() >= width / 2 && + return !IsNull(frame) && frame.GetYPitch() >= width && + frame.GetUPitch() >= width / 2 && + frame.GetVPitch() >= width / 2 && frame.width() == width && frame.height() == height; } @@ -450,17 +444,15 @@ class VideoFrameTest : public testing::Test { const uint8_t* v, uint32_t vpitch, int max_error) { - return IsSize(frame, width, height) && frame.GetTimeStamp() == time_stamp && - IsPlaneEqual("y", frame.video_frame_buffer()->DataY(), - frame.video_frame_buffer()->StrideY(), y, ypitch, + return IsSize(frame, width, height) && + frame.GetTimeStamp() == time_stamp && + IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch, static_cast(width), static_cast(height), max_error) && - IsPlaneEqual("u", frame.video_frame_buffer()->DataU(), - frame.video_frame_buffer()->StrideU(), u, upitch, + IsPlaneEqual("u", frame.GetUPlane(), frame.GetUPitch(), u, upitch, static_cast((width + 1) / 2), static_cast((height + 1) / 2), max_error) && - IsPlaneEqual("v", frame.video_frame_buffer()->DataV(), - frame.video_frame_buffer()->StrideV(), v, vpitch, + IsPlaneEqual("v", frame.GetVPlane(), frame.GetVPitch(), v, vpitch, static_cast((width + 1) / 2), static_cast((height + 1) / 2), max_error); } @@ -471,12 +463,9 @@ class VideoFrameTest : public testing::Test { return IsEqual(frame1, frame2.width(), frame2.height(), frame2.GetTimeStamp(), - frame2.video_frame_buffer()->DataY(), - frame2.video_frame_buffer()->StrideY(), - frame2.video_frame_buffer()->DataU(), - frame2.video_frame_buffer()->StrideU(), - frame2.video_frame_buffer()->DataV(), - frame2.video_frame_buffer()->StrideV(), + frame2.GetYPlane(), frame2.GetYPitch(), + frame2.GetUPlane(), frame2.GetUPitch(), + frame2.GetVPlane(), frame2.GetVPitch(), max_error); } @@ -489,26 +478,23 @@ class VideoFrameTest : public testing::Test { frame2.width() - hcrop * 2, frame2.height() - vcrop * 2, frame2.GetTimeStamp(), - frame2.video_frame_buffer()->DataY() - + vcrop * frame2.video_frame_buffer()->StrideY() + frame2.GetYPlane() + vcrop * frame2.GetYPitch() + hcrop, - frame2.video_frame_buffer()->StrideY(), - frame2.video_frame_buffer()->DataU() - + vcrop * frame2.video_frame_buffer()->StrideU() / 2 + frame2.GetYPitch(), + frame2.GetUPlane() + vcrop * frame2.GetUPitch() / 2 + hcrop / 2, - frame2.video_frame_buffer()->StrideU(), - frame2.video_frame_buffer()->DataV() - + vcrop * frame2.video_frame_buffer()->StrideV() / 2 + frame2.GetUPitch(), + frame2.GetVPlane() + vcrop * frame2.GetVPitch() / 2 + hcrop / 2, - frame2.video_frame_buffer()->StrideV(), + frame2.GetVPitch(), max_error); } static bool IsBlack(const cricket::VideoFrame& frame) { return !IsNull(frame) && - *frame.video_frame_buffer()->DataY() == 16 && - *frame.video_frame_buffer()->DataU() == 128 && - *frame.video_frame_buffer()->DataV() == 128; + *frame.GetYPlane() == 16 && + *frame.GetUPlane() == 128 && + *frame.GetVPlane() == 128; } //////////////////////// @@ -555,12 +541,9 @@ class VideoFrameTest : public testing::Test { uint8_t* y = ALIGNP(buf.get(), kAlignment); uint8_t* u = y + kWidth * kHeight; uint8_t* v = u + (kWidth / 2) * kHeight; - EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(), - frame1.video_frame_buffer()->StrideY(), - frame1.video_frame_buffer()->DataU(), - frame1.video_frame_buffer()->StrideU(), - frame1.video_frame_buffer()->DataV(), - frame1.video_frame_buffer()->StrideV(), + EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(), + frame1.GetUPlane(), frame1.GetUPitch(), + frame1.GetVPlane(), frame1.GetVPitch(), y, kWidth, u, kWidth / 2, v, kWidth / 2, @@ -577,12 +560,9 @@ class VideoFrameTest : public testing::Test { size_t buf_size = kWidth * kHeight * 2; std::unique_ptr buf(new uint8_t[buf_size + kAlignment]); uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment); - EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(), - frame1.video_frame_buffer()->StrideY(), - frame1.video_frame_buffer()->DataU(), - frame1.video_frame_buffer()->StrideU(), - frame1.video_frame_buffer()->DataV(), - frame1.video_frame_buffer()->StrideV(), + EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(), + frame1.GetUPlane(), frame1.GetUPitch(), + frame1.GetVPlane(), frame1.GetVPitch(), yuy2, kWidth * 2, kWidth, kHeight)); EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2, @@ -597,12 +577,9 @@ class VideoFrameTest : public testing::Test { size_t buf_size = kWidth * kHeight * 2; std::unique_ptr buf(new uint8_t[buf_size + kAlignment + 1]); uint8_t* yuy2 = ALIGNP(buf.get(), kAlignment) + 1; - EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.video_frame_buffer()->DataY(), - frame1.video_frame_buffer()->StrideY(), - frame1.video_frame_buffer()->DataU(), - frame1.video_frame_buffer()->StrideU(), - frame1.video_frame_buffer()->DataV(), - frame1.video_frame_buffer()->StrideV(), + EXPECT_EQ(0, libyuv::I420ToYUY2(frame1.GetYPlane(), frame1.GetYPitch(), + frame1.GetUPlane(), frame1.GetUPitch(), + frame1.GetVPlane(), frame1.GetVPitch(), yuy2, kWidth * 2, kWidth, kHeight)); EXPECT_TRUE(LoadFrame(yuy2, buf_size, cricket::FOURCC_YUY2, @@ -815,23 +792,16 @@ class VideoFrameTest : public testing::Test { EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \ kHeight, \ reinterpret_cast(ms->GetBuffer()), \ - data_size, 0, webrtc::kVideoRotation_0)); \ - int width_rotate = frame1.width(); \ - int height_rotate = frame1.height(); \ - EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \ - libyuv::I420Mirror(frame2.video_frame_buffer()->DataY(), \ - frame2.video_frame_buffer()->StrideY(), \ - frame2.video_frame_buffer()->DataU(), \ - frame2.video_frame_buffer()->StrideU(), \ - frame2.video_frame_buffer()->DataV(), \ - frame2.video_frame_buffer()->StrideV(), \ - frame3.video_frame_buffer()->MutableDataY(), \ - frame3.video_frame_buffer()->StrideY(), \ - frame3.video_frame_buffer()->MutableDataU(), \ - frame3.video_frame_buffer()->StrideU(), \ - frame3.video_frame_buffer()->MutableDataV(), \ - frame3.video_frame_buffer()->StrideV(), \ - kWidth, kHeight); \ + data_size, 0, webrtc::kVideoRotation_0)); \ + int width_rotate = frame1.width(); \ + int height_rotate = frame1.height(); \ + EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \ + libyuv::I420Mirror( \ + frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \ + frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \ + frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \ + frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \ + kHeight); \ EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \ } @@ -853,23 +823,16 @@ class VideoFrameTest : public testing::Test { EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \ kHeight, \ reinterpret_cast(ms->GetBuffer()), \ - data_size, 0, webrtc::kVideoRotation_0)); \ - int width_rotate = frame1.width(); \ - int height_rotate = frame1.height(); \ - EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \ - libyuv::I420Rotate(frame2.video_frame_buffer()->DataY(), \ - frame2.video_frame_buffer()->StrideY(), \ - frame2.video_frame_buffer()->DataU(), \ - frame2.video_frame_buffer()->StrideU(), \ - frame2.video_frame_buffer()->DataV(), \ - frame2.video_frame_buffer()->StrideV(), \ - frame3.video_frame_buffer()->MutableDataY(), \ - frame3.video_frame_buffer()->StrideY(), \ - frame3.video_frame_buffer()->MutableDataU(), \ - frame3.video_frame_buffer()->StrideU(), \ - frame3.video_frame_buffer()->MutableDataV(), \ - frame3.video_frame_buffer()->StrideV(), \ - kWidth, kHeight, libyuv::kRotate##ROTATE); \ + data_size, 0, webrtc::kVideoRotation_0)); \ + int width_rotate = frame1.width(); \ + int height_rotate = frame1.height(); \ + EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 0)); \ + libyuv::I420Rotate( \ + frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(), \ + frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(), \ + frame3.GetYPlane(), frame3.GetYPitch(), frame3.GetUPlane(), \ + frame3.GetUPitch(), frame3.GetVPlane(), frame3.GetVPitch(), kWidth, \ + kHeight, libyuv::kRotate##ROTATE); \ EXPECT_TRUE(IsEqual(frame1, frame3, 0)); \ } @@ -989,9 +952,9 @@ class VideoFrameTest : public testing::Test { } EXPECT_EQ(5, frame.width()); EXPECT_EQ(5, frame.height()); - EXPECT_EQ(5, frame.video_frame_buffer()->StrideY()); - EXPECT_EQ(3, frame.video_frame_buffer()->StrideU()); - EXPECT_EQ(3, frame.video_frame_buffer()->StrideV()); + EXPECT_EQ(5, frame.GetYPitch()); + EXPECT_EQ(3, frame.GetUPitch()); + EXPECT_EQ(3, frame.GetVPitch()); } // Test 1 pixel edge case image ARGB buffer. @@ -1158,10 +1121,8 @@ class VideoFrameTest : public testing::Test { ASSERT_TRUE(LoadFrameNoRepeat(&frame1)); ASSERT_TRUE(LoadFrame(kJpeg400Filename, cricket::FOURCC_MJPG, kWidth, kHeight, &frame2)); - EXPECT_TRUE(IsPlaneEqual("y", frame1.video_frame_buffer()->DataY(), - frame1.video_frame_buffer()->StrideY(), - frame2.video_frame_buffer()->DataY(), - frame2.video_frame_buffer()->StrideY(), + EXPECT_TRUE(IsPlaneEqual("y", frame1.GetYPlane(), frame1.GetYPitch(), + frame2.GetYPlane(), frame2.GetYPitch(), kWidth, kHeight, 32)); EXPECT_TRUE(IsEqual(frame1, frame2, 128)); } @@ -1343,7 +1304,9 @@ class VideoFrameTest : public testing::Test { EXPECT_TRUE(frame2.Init(frame1)); } EXPECT_TRUE(IsEqual(frame1, frame2, 0)); - EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer()); + EXPECT_EQ(frame1.GetYPlane(), frame2.GetYPlane()); + EXPECT_EQ(frame1.GetUPlane(), frame2.GetUPlane()); + EXPECT_EQ(frame1.GetVPlane(), frame2.GetVPlane()); } // Test creating an empty image and initing it to black. @@ -1456,12 +1419,9 @@ class VideoFrameTest : public testing::Test { EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 0)); for (int i = 0; i < repeat_from; ++i) { EXPECT_EQ(0, RGBToI420(out, stride, - frame2.video_frame_buffer()->MutableDataY(), - frame2.video_frame_buffer()->StrideY(), - frame2.video_frame_buffer()->MutableDataU(), - frame2.video_frame_buffer()->StrideU(), - frame2.video_frame_buffer()->MutableDataV(), - frame2.video_frame_buffer()->StrideV(), + frame2.GetYPlane(), frame2.GetYPitch(), + frame2.GetUPlane(), frame2.GetUPitch(), + frame2.GetVPlane(), frame2.GetVPitch(), kWidth, kHeight)); } if (rowpad) { @@ -1764,12 +1724,9 @@ class VideoFrameTest : public testing::Test { uint8_t* v = u + (kWidth / 2) * kHeight; ASSERT_TRUE(LoadFrameNoRepeat(&frame1)); for (int i = 0; i < repeat_; ++i) { - EXPECT_EQ(0, libyuv::I420ToI422(frame1.video_frame_buffer()->DataY(), - frame1.video_frame_buffer()->StrideY(), - frame1.video_frame_buffer()->DataU(), - frame1.video_frame_buffer()->StrideU(), - frame1.video_frame_buffer()->DataV(), - frame1.video_frame_buffer()->StrideV(), + EXPECT_EQ(0, libyuv::I420ToI422(frame1.GetYPlane(), frame1.GetYPitch(), + frame1.GetUPlane(), frame1.GetUPitch(), + frame1.GetVPlane(), frame1.GetVPitch(), y, kWidth, u, kWidth / 2, v, kWidth / 2, @@ -1792,8 +1749,7 @@ class VideoFrameTest : public testing::Test { target.reset(source->Copy()); EXPECT_TRUE(IsEqual(*source, *target, 0)); source.reset(); - ASSERT_TRUE(target->video_frame_buffer() != NULL); - EXPECT_TRUE(target->video_frame_buffer()->DataY() != NULL); + EXPECT_TRUE(target->GetYPlane() != NULL); } void CopyIsRef() { @@ -1803,7 +1759,9 @@ class VideoFrameTest : public testing::Test { target.reset(source->Copy()); EXPECT_TRUE(IsEqual(*source, *target, 0)); const T* const_source = source.get(); - EXPECT_EQ(const_source->video_frame_buffer(), target->video_frame_buffer()); + EXPECT_EQ(const_source->GetYPlane(), target->GetYPlane()); + EXPECT_EQ(const_source->GetUPlane(), target->GetUPlane()); + EXPECT_EQ(const_source->GetVPlane(), target->GetVPlane()); } void StretchToFrame() { diff --git a/webrtc/media/engine/webrtcvideoframe.cc b/webrtc/media/engine/webrtcvideoframe.cc index cda00275f7..145f265f28 100644 --- a/webrtc/media/engine/webrtcvideoframe.cc +++ b/webrtc/media/engine/webrtcvideoframe.cc @@ -79,6 +79,45 @@ int WebRtcVideoFrame::height() const { return video_frame_buffer_ ? video_frame_buffer_->height() : 0; } +const uint8_t* WebRtcVideoFrame::GetYPlane() const { + return video_frame_buffer_ ? video_frame_buffer_->DataY() : nullptr; +} + +const uint8_t* WebRtcVideoFrame::GetUPlane() const { + return video_frame_buffer_ ? video_frame_buffer_->DataU() : nullptr; +} + +const uint8_t* WebRtcVideoFrame::GetVPlane() const { + return video_frame_buffer_ ? video_frame_buffer_->DataV() : nullptr; +} + +uint8_t* WebRtcVideoFrame::GetYPlane() { + return video_frame_buffer_ ? video_frame_buffer_->MutableData(kYPlane) + : nullptr; +} + +uint8_t* WebRtcVideoFrame::GetUPlane() { + return video_frame_buffer_ ? video_frame_buffer_->MutableData(kUPlane) + : nullptr; +} + +uint8_t* WebRtcVideoFrame::GetVPlane() { + return video_frame_buffer_ ? video_frame_buffer_->MutableData(kVPlane) + : nullptr; +} + +int32_t WebRtcVideoFrame::GetYPitch() const { + return video_frame_buffer_ ? video_frame_buffer_->StrideY() : 0; +} + +int32_t WebRtcVideoFrame::GetUPitch() const { + return video_frame_buffer_ ? video_frame_buffer_->StrideU() : 0; +} + +int32_t WebRtcVideoFrame::GetVPitch() const { + return video_frame_buffer_ ? video_frame_buffer_->StrideV() : 0; +} + bool WebRtcVideoFrame::IsExclusive() const { return video_frame_buffer_->IsMutable(); } @@ -87,7 +126,7 @@ void* WebRtcVideoFrame::GetNativeHandle() const { return video_frame_buffer_ ? video_frame_buffer_->native_handle() : nullptr; } -const rtc::scoped_refptr& +rtc::scoped_refptr WebRtcVideoFrame::video_frame_buffer() const { return video_frame_buffer_; } @@ -142,12 +181,9 @@ bool WebRtcVideoFrame::Reset(uint32_t format, int idh = (h < 0) ? -dh : dh; int r = libyuv::ConvertToI420( sample, sample_size, - video_frame_buffer_->MutableDataY(), - video_frame_buffer_->StrideY(), - video_frame_buffer_->MutableDataU(), - video_frame_buffer_->StrideU(), - video_frame_buffer_->MutableDataV(), - video_frame_buffer_->StrideV(), + GetYPlane(), GetYPitch(), + GetUPlane(), GetUPitch(), + GetVPlane(), GetVPitch(), horiz_crop, vert_crop, w, h, dw, idh, @@ -216,15 +252,10 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const { // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from // VideoRotation to libyuv::RotationMode. int ret = libyuv::I420Rotate( - video_frame_buffer_->DataY(), video_frame_buffer_->StrideY(), - video_frame_buffer_->DataU(), video_frame_buffer_->StrideU(), - video_frame_buffer_->DataV(), video_frame_buffer_->StrideV(), - rotated_frame_->video_frame_buffer()->MutableDataY(), - rotated_frame_->video_frame_buffer()->StrideY(), - rotated_frame_->video_frame_buffer()->MutableDataU(), - rotated_frame_->video_frame_buffer()->StrideU(), - rotated_frame_->video_frame_buffer()->MutableDataV(), - rotated_frame_->video_frame_buffer()->StrideV(), + GetYPlane(), GetYPitch(), GetUPlane(), GetUPitch(), GetVPlane(), + GetVPitch(), rotated_frame_->GetYPlane(), rotated_frame_->GetYPitch(), + rotated_frame_->GetUPlane(), rotated_frame_->GetUPitch(), + rotated_frame_->GetVPlane(), rotated_frame_->GetVPitch(), orig_width, orig_height, static_cast(rotation())); if (ret == 0) { diff --git a/webrtc/media/engine/webrtcvideoframe.h b/webrtc/media/engine/webrtcvideoframe.h index ee34c414a4..a2034ec77d 100644 --- a/webrtc/media/engine/webrtcvideoframe.h +++ b/webrtc/media/engine/webrtcvideoframe.h @@ -68,8 +68,17 @@ class WebRtcVideoFrame : public VideoFrame { int width() const override; int height() const override; + const uint8_t* GetYPlane() const override; + const uint8_t* GetUPlane() const override; + const uint8_t* GetVPlane() const override; + uint8_t* GetYPlane() override; + uint8_t* GetUPlane() override; + uint8_t* GetVPlane() override; + int32_t GetYPitch() const override; + int32_t GetUPitch() const override; + int32_t GetVPitch() const override; void* GetNativeHandle() const override; - const rtc::scoped_refptr& video_frame_buffer() + rtc::scoped_refptr video_frame_buffer() const override; /* System monotonic clock */