From 7d06a8cfe420772aa0bb5658f7b28396ab9b1e26 Mon Sep 17 00:00:00 2001 From: tkchin Date: Mon, 4 Apr 2016 14:10:43 -0700 Subject: [PATCH] Add CoreVideoFrameBuffer. - Makes vt h264 decoder output CoreVideoFrameBuffer - Makes iOS renderer convert frame buffer if it is not i420 BUG= Review URL: https://codereview.webrtc.org/1853503003 Cr-Commit-Position: refs/heads/master@{#12224} --- webrtc/api/objc/RTCEAGLVideoView.m | 10 ++- webrtc/api/objc/RTCVideoFrame+Private.h | 3 + webrtc/api/objc/RTCVideoFrame.h | 12 ++++ webrtc/api/objc/RTCVideoFrame.mm | 59 +++++++++++++++--- webrtc/api/statscollector_unittest.cc | 15 ++--- webrtc/common_video/BUILD.gn | 8 +++ webrtc/common_video/common_video.gyp | 13 ++++ webrtc/common_video/corevideo_frame_buffer.cc | 61 +++++++++++++++++++ .../include/corevideo_frame_buffer.h | 34 +++++++++++ .../codecs/h264/h264_video_toolbox_decoder.cc | 45 ++------------ 10 files changed, 202 insertions(+), 58 deletions(-) create mode 100644 webrtc/common_video/corevideo_frame_buffer.cc create mode 100644 webrtc/common_video/include/corevideo_frame_buffer.h diff --git a/webrtc/api/objc/RTCEAGLVideoView.m b/webrtc/api/objc/RTCEAGLVideoView.m index e664ede455..58fd108c4c 100644 --- a/webrtc/api/objc/RTCEAGLVideoView.m +++ b/webrtc/api/objc/RTCEAGLVideoView.m @@ -8,12 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "RTCEAGLVideoView.h" +#import "webrtc/api/objc/RTCEAGLVideoView.h" #import -#import "RTCVideoFrame.h" -#import "RTCOpenGLVideoRenderer.h" +#import "webrtc/api/objc/RTCOpenGLVideoRenderer.h" +#import "webrtc/api/objc/RTCVideoFrame.h" // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // refreshes, which should be 30fps. We wrap the display link in order to avoid @@ -210,6 +210,10 @@ } - (void)renderFrame:(RTCVideoFrame *)frame { + // Generate the i420 frame on video send thread instead of main thread. + // TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading + // CVPixelBuffer textures. + [frame convertBufferIfNeeded]; self.videoFrame = frame; } diff --git a/webrtc/api/objc/RTCVideoFrame+Private.h b/webrtc/api/objc/RTCVideoFrame+Private.h index 873d3ebbab..52f532ce88 100644 --- a/webrtc/api/objc/RTCVideoFrame+Private.h +++ b/webrtc/api/objc/RTCVideoFrame+Private.h @@ -16,6 +16,9 @@ NS_ASSUME_NONNULL_BEGIN @interface RTCVideoFrame () +@property(nonatomic, readonly) + rtc::scoped_refptr i420Buffer; + - (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame NS_DESIGNATED_INITIALIZER; diff --git a/webrtc/api/objc/RTCVideoFrame.h b/webrtc/api/objc/RTCVideoFrame.h index 791e41c5ec..b44bf7331d 100644 --- a/webrtc/api/objc/RTCVideoFrame.h +++ b/webrtc/api/objc/RTCVideoFrame.h @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import #import NS_ASSUME_NONNULL_BEGIN @@ -30,8 +31,19 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, readonly) int32_t uPitch; @property(nonatomic, readonly) int32_t vPitch; +/** Timestamp in nanoseconds. */ +@property(nonatomic, readonly) int64_t timeStamp; + +/** The native handle should be a pixel buffer on iOS. */ +@property(nonatomic, readonly) CVPixelBufferRef nativeHandle; + - (instancetype)init NS_UNAVAILABLE; +/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame. + * Calling the yuv plane properties will call this method if needed. + */ +- (void)convertBufferIfNeeded; + @end NS_ASSUME_NONNULL_END diff --git a/webrtc/api/objc/RTCVideoFrame.mm b/webrtc/api/objc/RTCVideoFrame.mm index d70ab372ac..95f4ac82bf 100644 --- a/webrtc/api/objc/RTCVideoFrame.mm +++ b/webrtc/api/objc/RTCVideoFrame.mm @@ -16,6 +16,7 @@ @implementation RTCVideoFrame { rtc::scoped_ptr _videoFrame; + rtc::scoped_refptr _i420Buffer; } - (size_t)width { @@ -38,30 +39,65 @@ } - (const uint8_t *)yPlane { - const cricket::VideoFrame *const_frame = _videoFrame.get(); - return const_frame->GetYPlane(); + if (!self.i420Buffer) { + return nullptr; + } + return self.i420Buffer->data(webrtc::kYPlane); } - (const uint8_t *)uPlane { - const cricket::VideoFrame *const_frame = _videoFrame.get(); - return const_frame->GetUPlane(); + if (!self.i420Buffer) { + return nullptr; + } + return self.i420Buffer->data(webrtc::kUPlane); } - (const uint8_t *)vPlane { - const cricket::VideoFrame *const_frame = _videoFrame.get(); - return const_frame->GetVPlane(); + if (!self.i420Buffer) { + return nullptr; + } + return self.i420Buffer->data(webrtc::kVPlane); } - (int32_t)yPitch { - return _videoFrame->GetYPitch(); + if (!self.i420Buffer) { + return 0; + } + return self.i420Buffer->stride(webrtc::kYPlane); } - (int32_t)uPitch { - return _videoFrame->GetUPitch(); + if (!self.i420Buffer) { + return 0; + } + return self.i420Buffer->stride(webrtc::kUPlane); } - (int32_t)vPitch { - return _videoFrame->GetVPitch(); + if (!self.i420Buffer) { + return 0; + } + return self.i420Buffer->stride(webrtc::kVPlane); +} + +- (int64_t)timeStamp { + return _videoFrame->GetTimeStamp(); +} + +- (CVPixelBufferRef)nativeHandle { + return static_cast(_videoFrame->GetNativeHandle()); +} + +- (void)convertBufferIfNeeded { + if (!_i420Buffer) { + if (_videoFrame->GetNativeHandle()) { + // Convert to I420. + _i420Buffer = _videoFrame->GetVideoFrameBuffer()->NativeToI420Buffer(); + } else { + // Should already be I420. + _i420Buffer = _videoFrame->GetVideoFrameBuffer(); + } + } } #pragma mark - Private @@ -75,4 +111,9 @@ return self; } +- (rtc::scoped_refptr)i420Buffer { + [self convertBufferIfNeeded]; + return _i420Buffer; +} + @end diff --git a/webrtc/api/statscollector_unittest.cc b/webrtc/api/statscollector_unittest.cc index a1cd1e820f..3b04383a85 100644 --- a/webrtc/api/statscollector_unittest.cc +++ b/webrtc/api/statscollector_unittest.cc @@ -44,11 +44,6 @@ using webrtc::PeerConnectionInterface; using webrtc::StatsReport; using webrtc::StatsReports; -namespace { -// This value comes from openssl/tls1.h -const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014; -} // namespace - namespace cricket { class ChannelManager; @@ -57,6 +52,11 @@ class ChannelManager; namespace webrtc { +namespace internal { +// This value comes from openssl/tls1.h +static const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014; +} // namespace internal + // Error return values const char kNotFound[] = "NOT FOUND"; @@ -674,7 +674,8 @@ class StatsCollectorTest : public testing::Test { cricket::TransportChannelStats channel_stats; channel_stats.component = 1; channel_stats.srtp_crypto_suite = rtc::SRTP_AES128_CM_SHA1_80; - channel_stats.ssl_cipher_suite = TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA; + channel_stats.ssl_cipher_suite = + internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA; cricket::TransportStats transport_stats; transport_stats.transport_name = "audio"; @@ -740,7 +741,7 @@ class StatsCollectorTest : public testing::Test { ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports, StatsReport::kStatsValueNameDtlsCipher); EXPECT_EQ(rtc::SSLStreamAdapter::SslCipherSuiteToName( - TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA), + internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA), dtls_cipher_suite); std::string srtp_crypto_suite = ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports, diff --git a/webrtc/common_video/BUILD.gn b/webrtc/common_video/BUILD.gn index 4ef968d60f..ed877bd344 100644 --- a/webrtc/common_video/BUILD.gn +++ b/webrtc/common_video/BUILD.gn @@ -60,4 +60,12 @@ source_set("common_video") { # Need to add a directory normally exported by libyuv. include_dirs += [ "$rtc_libyuv_dir/include" ] } + + if (is_ios || is_mac) { + sources += [ + "corevideo_frame_buffer.cc", + "include/corevideo_frame_buffer.h", + ] + libs = [ "CoreVideo.framework" ] + } } diff --git a/webrtc/common_video/common_video.gyp b/webrtc/common_video/common_video.gyp index fe14da1d2e..b392bd2aa4 100644 --- a/webrtc/common_video/common_video.gyp +++ b/webrtc/common_video/common_video.gyp @@ -37,6 +37,19 @@ # Need to add a directory normally exported by libyuv.gyp. 'include_dirs': ['<(libyuv_dir)/include',], }], + ['OS=="ios" or OS=="mac"', { + 'sources': [ + 'corevideo_frame_buffer.cc', + 'include/corevideo_frame_buffer.h', + ], + 'link_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework CoreVideo', + ], + }, + }, + }], ], 'sources': [ 'i420_buffer_pool.cc', diff --git a/webrtc/common_video/corevideo_frame_buffer.cc b/webrtc/common_video/corevideo_frame_buffer.cc new file mode 100644 index 0000000000..55dc00da85 --- /dev/null +++ b/webrtc/common_video/corevideo_frame_buffer.cc @@ -0,0 +1,61 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/common_video/include/corevideo_frame_buffer.h" + +#include "libyuv/convert.h" +#include "webrtc/base/checks.h" +#include "webrtc/base/logging.h" + +namespace webrtc { + +CoreVideoFrameBuffer::CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer) + : NativeHandleBuffer(pixel_buffer, + CVPixelBufferGetWidth(pixel_buffer), + CVPixelBufferGetHeight(pixel_buffer)), + pixel_buffer_(pixel_buffer) { + CVBufferRetain(pixel_buffer_); +} + +CoreVideoFrameBuffer::~CoreVideoFrameBuffer() { + CVBufferRelease(pixel_buffer_); +} + +rtc::scoped_refptr +CoreVideoFrameBuffer::NativeToI420Buffer() { + RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer_) == + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); + size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0); + size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0); + // TODO(tkchin): Use a frame buffer pool. + rtc::scoped_refptr buffer = + new rtc::RefCountedObject(width, height); + CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly); + const uint8_t* src_y = static_cast( + CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 0)); + int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 0); + const uint8_t* src_uv = static_cast( + CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 1)); + int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1); + int ret = libyuv::NV12ToI420( + src_y, src_y_stride, src_uv, src_uv_stride, + buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), + buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), + buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), + width, height); + CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly); + if (ret) { + LOG(LS_ERROR) << "Error converting NV12 to I420: " << ret; + return nullptr; + } + return buffer; +} + +} // namespace webrtc diff --git a/webrtc/common_video/include/corevideo_frame_buffer.h b/webrtc/common_video/include/corevideo_frame_buffer.h new file mode 100644 index 0000000000..ed5361d5cf --- /dev/null +++ b/webrtc/common_video/include/corevideo_frame_buffer.h @@ -0,0 +1,34 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_ +#define WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_ + +#include + +#include "webrtc/common_video/include/video_frame_buffer.h" + +namespace webrtc { + +class CoreVideoFrameBuffer : public NativeHandleBuffer { + public: + explicit CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer); + ~CoreVideoFrameBuffer() override; + + rtc::scoped_refptr NativeToI420Buffer() override; + + private: + CVPixelBufferRef pixel_buffer_; +}; + +} // namespace webrtc + +#endif // WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_ + diff --git a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc index 5db2cd1e9e..0ea2600197 100644 --- a/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc +++ b/webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.cc @@ -21,12 +21,14 @@ #if defined(WEBRTC_IOS) #include "webrtc/base/objc/RTCUIApplication.h" #endif -#include "webrtc/common_video/include/video_frame_buffer.h" +#include "webrtc/common_video/include/corevideo_frame_buffer.h" #include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h" #include "webrtc/video_frame.h" namespace internal { +static const int64_t kMsPerSec = 1000; + // Convenience function for creating a dictionary. inline CFDictionaryRef CreateCFDictionary(CFTypeRef* keys, CFTypeRef* values, @@ -45,42 +47,6 @@ struct FrameDecodeParams { int64_t timestamp; }; -// On decode we receive a CVPixelBuffer, which we need to convert to a frame -// buffer for use in the rest of WebRTC. Unfortunately this involves a frame -// copy. -// TODO(tkchin): Stuff CVPixelBuffer into a TextureBuffer and pass that along -// instead once the pipeline supports it. -rtc::scoped_refptr VideoFrameBufferForPixelBuffer( - CVPixelBufferRef pixel_buffer) { - RTC_DCHECK(pixel_buffer); - RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) == - kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); - size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer, 0); - size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer, 0); - // TODO(tkchin): Use a frame buffer pool. - rtc::scoped_refptr buffer = - new rtc::RefCountedObject(width, height); - CVPixelBufferLockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly); - const uint8_t* src_y = reinterpret_cast( - CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0)); - int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0); - const uint8_t* src_uv = reinterpret_cast( - CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1)); - int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1); - int ret = libyuv::NV12ToI420( - src_y, src_y_stride, src_uv, src_uv_stride, - buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), - buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), - buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), - width, height); - CVPixelBufferUnlockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly); - if (ret) { - LOG(LS_ERROR) << "Error converting NV12 to I420: " << ret; - return nullptr; - } - return buffer; -} - // This is the callback function that VideoToolbox calls when decode is // complete. void VTDecompressionOutputCallback(void* decoder, @@ -98,8 +64,9 @@ void VTDecompressionOutputCallback(void* decoder, } // TODO(tkchin): Handle CVO properly. rtc::scoped_refptr buffer = - VideoFrameBufferForPixelBuffer(image_buffer); - webrtc::VideoFrame decoded_frame(buffer, decode_params->timestamp, 0, + new rtc::RefCountedObject(image_buffer); + webrtc::VideoFrame decoded_frame(buffer, decode_params->timestamp, + CMTimeGetSeconds(timestamp) * kMsPerSec, webrtc::kVideoRotation_0); decode_params->callback->Decoded(decoded_frame); }