diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm index bad3dc8473..20e8f927e2 100644 --- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm +++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCVideoFrame.mm @@ -31,27 +31,27 @@ } - (const uint8_t *)dataY { - return _videoBuffer->DataY(); + return _videoBuffer->GetI420()->DataY(); } - (const uint8_t *)dataU { - return _videoBuffer->DataU(); + return _videoBuffer->GetI420()->DataU(); } - (const uint8_t *)dataV { - return _videoBuffer->DataV(); + return _videoBuffer->GetI420()->DataV(); } - (int)strideY { - return _videoBuffer->StrideY(); + return _videoBuffer->GetI420()->StrideY(); } - (int)strideU { - return _videoBuffer->StrideU(); + return _videoBuffer->GetI420()->StrideU(); } - (int)strideV { - return _videoBuffer->StrideV(); + return _videoBuffer->GetI420()->StrideV(); } - (int64_t)timeStampNs { @@ -59,12 +59,14 @@ } - (CVPixelBufferRef)nativeHandle { - return static_cast(_videoBuffer->native_handle()); + return (_videoBuffer->type() == webrtc::VideoFrameBuffer::Type::kNative) ? + static_cast(_videoBuffer.get())->pixel_buffer() : + nil; } - (RTCVideoFrame *)newI420VideoFrame { return [[RTCVideoFrame alloc] - initWithVideoBuffer:_videoBuffer->NativeToI420Buffer() + initWithVideoBuffer:_videoBuffer->ToI420() rotation:_rotation timeStampNs:_timeStampNs]; } diff --git a/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.cc b/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.cc index 9593d13b34..660b2aab9e 100644 --- a/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.cc +++ b/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.cc @@ -25,24 +25,29 @@ CoreVideoFrameBuffer::CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer, int crop_height, int crop_x, int crop_y) - : NativeHandleBuffer(pixel_buffer, adapted_width, adapted_height), - pixel_buffer_(pixel_buffer), + : pixel_buffer_(pixel_buffer), + width_(adapted_width), + height_(adapted_height), buffer_width_(CVPixelBufferGetWidth(pixel_buffer)), buffer_height_(CVPixelBufferGetHeight(pixel_buffer)), - crop_width_(crop_width), crop_height_(crop_height), + crop_width_(crop_width), + crop_height_(crop_height), // Can only crop at even pixels. - crop_x_(crop_x & ~1), crop_y_(crop_y & ~1) { + crop_x_(crop_x & ~1), + crop_y_(crop_y & ~1) { CVBufferRetain(pixel_buffer_); } CoreVideoFrameBuffer::CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer) - : NativeHandleBuffer(pixel_buffer, - CVPixelBufferGetWidth(pixel_buffer), - CVPixelBufferGetHeight(pixel_buffer)), - pixel_buffer_(pixel_buffer), - buffer_width_(width_), buffer_height_(height_), - crop_width_(width_), crop_height_(height_), - crop_x_(0), crop_y_(0) { + : pixel_buffer_(pixel_buffer), + width_(CVPixelBufferGetWidth(pixel_buffer)), + height_(CVPixelBufferGetHeight(pixel_buffer)), + buffer_width_(width_), + buffer_height_(height_), + crop_width_(width_), + crop_height_(height_), + crop_x_(0), + crop_y_(0) { CVBufferRetain(pixel_buffer_); } @@ -50,8 +55,19 @@ CoreVideoFrameBuffer::~CoreVideoFrameBuffer() { CVBufferRelease(pixel_buffer_); } -rtc::scoped_refptr -CoreVideoFrameBuffer::NativeToI420Buffer() { +VideoFrameBuffer::Type CoreVideoFrameBuffer::type() const { + return Type::kNative; +} + +int CoreVideoFrameBuffer::width() const { + return width_; +} + +int CoreVideoFrameBuffer::height() const { + return height_; +} + +rtc::scoped_refptr CoreVideoFrameBuffer::ToI420() { const OSType pixel_format = CVPixelBufferGetPixelFormatType(pixel_buffer_); RTC_DCHECK(pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange || pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); diff --git a/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h b/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h index fc8c171a0b..603cbc01c4 100644 --- a/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h +++ b/webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h @@ -19,7 +19,7 @@ namespace webrtc { -class CoreVideoFrameBuffer : public NativeHandleBuffer { +class CoreVideoFrameBuffer : public VideoFrameBuffer { public: explicit CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer); CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer, @@ -31,7 +31,8 @@ class CoreVideoFrameBuffer : public NativeHandleBuffer { int crop_y); ~CoreVideoFrameBuffer() override; - rtc::scoped_refptr NativeToI420Buffer() override; + CVPixelBufferRef pixel_buffer() { return pixel_buffer_; } + // Returns true if the internal pixel buffer needs to be cropped. bool RequiresCropping() const; // Crop and scales the internal pixel buffer to the output pixel buffer. The @@ -41,11 +42,17 @@ class CoreVideoFrameBuffer : public NativeHandleBuffer { CVPixelBufferRef output_pixel_buffer) const; private: + Type type() const override; + int width() const override; + int height() const override; + rtc::scoped_refptr ToI420() override; + CVPixelBufferRef pixel_buffer_; - // buffer_width/height is the actual pixel buffer resolution. The width/height - // in NativeHandleBuffer, i.e. width()/height(), is the resolution we will - // scale to in NativeToI420Buffer(). Cropping happens before scaling, so: - // buffer_width >= crop_width >= width(). + // buffer_width/height is the actual pixel buffer resolution. The + // width_/height_ is the resolution we will scale to in ToI420(). Cropping + // happens before scaling, so: buffer_width >= crop_width >= width(). + const int width_; + const int height_; const int buffer_width_; const int buffer_height_; const int crop_width_; diff --git a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm index 8ff27f8d3a..bc6672b9ac 100644 --- a/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm +++ b/webrtc/sdk/objc/Framework/Classes/VideoToolbox/encoder.mm @@ -155,9 +155,8 @@ struct FrameEncodeParams { // We receive I420Frames as input, but we need to feed CVPixelBuffers into the // encoder. This performs the copy and format conversion. // TODO(tkchin): See if encoder will accept i420 frames and compare performance. -bool CopyVideoFrameToPixelBuffer( - const rtc::scoped_refptr& frame, - CVPixelBufferRef pixel_buffer) { +bool CopyVideoFrameToPixelBuffer(const rtc::scoped_refptr& frame, + CVPixelBufferRef pixel_buffer) { RTC_DCHECK(pixel_buffer); RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixel_buffer), kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); @@ -412,13 +411,12 @@ int H264VideoToolboxEncoder::Encode( } #endif - CVPixelBufferRef pixel_buffer = static_cast( - frame.video_frame_buffer()->native_handle()); - if (pixel_buffer) { - // Native frame. + CVPixelBufferRef pixel_buffer; + if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative) { rtc::scoped_refptr core_video_frame_buffer( static_cast(frame.video_frame_buffer().get())); if (!core_video_frame_buffer->RequiresCropping()) { + pixel_buffer = core_video_frame_buffer->pixel_buffer(); // This pixel buffer might have a higher resolution than what the // compression session is configured to. The compression session can // handle that and will output encoded frames in the configured @@ -441,7 +439,7 @@ int H264VideoToolboxEncoder::Encode( return WEBRTC_VIDEO_CODEC_ERROR; } RTC_DCHECK(pixel_buffer); - if (!internal::CopyVideoFrameToPixelBuffer(frame.video_frame_buffer(), + if (!internal::CopyVideoFrameToPixelBuffer(frame.video_frame_buffer()->ToI420(), pixel_buffer)) { LOG(LS_ERROR) << "Failed to copy frame data."; CVBufferRelease(pixel_buffer);