Add CoreVideoFrameBuffer.

- Makes vt h264 decoder output CoreVideoFrameBuffer
- Makes iOS renderer convert frame buffer if it is not i420

BUG=

Review URL: https://codereview.webrtc.org/1853503003

Cr-Commit-Position: refs/heads/master@{#12224}
This commit is contained in:
tkchin 2016-04-04 14:10:43 -07:00 committed by Commit bot
parent 73023a9e72
commit 7d06a8cfe4
10 changed files with 202 additions and 58 deletions

View File

@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCEAGLVideoView.h"
#import "webrtc/api/objc/RTCEAGLVideoView.h"
#import <GLKit/GLKit.h>
#import "RTCVideoFrame.h"
#import "RTCOpenGLVideoRenderer.h"
#import "webrtc/api/objc/RTCOpenGLVideoRenderer.h"
#import "webrtc/api/objc/RTCVideoFrame.h"
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
// refreshes, which should be 30fps. We wrap the display link in order to avoid
@ -210,6 +210,10 @@
}
- (void)renderFrame:(RTCVideoFrame *)frame {
// Generate the i420 frame on video send thread instead of main thread.
// TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
// CVPixelBuffer textures.
[frame convertBufferIfNeeded];
self.videoFrame = frame;
}

View File

@ -16,6 +16,9 @@ NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer;
- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame
NS_DESIGNATED_INITIALIZER;

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@ -30,8 +31,19 @@ NS_ASSUME_NONNULL_BEGIN
@property(nonatomic, readonly) int32_t uPitch;
@property(nonatomic, readonly) int32_t vPitch;
/** Timestamp in nanoseconds. */
@property(nonatomic, readonly) int64_t timeStamp;
/** The native handle should be a pixel buffer on iOS. */
@property(nonatomic, readonly) CVPixelBufferRef nativeHandle;
- (instancetype)init NS_UNAVAILABLE;
/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame.
* Calling the yuv plane properties will call this method if needed.
*/
- (void)convertBufferIfNeeded;
@end
NS_ASSUME_NONNULL_END

View File

@ -16,6 +16,7 @@
@implementation RTCVideoFrame {
rtc::scoped_ptr<cricket::VideoFrame> _videoFrame;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
}
- (size_t)width {
@ -38,30 +39,65 @@
}
- (const uint8_t *)yPlane {
const cricket::VideoFrame *const_frame = _videoFrame.get();
return const_frame->GetYPlane();
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->data(webrtc::kYPlane);
}
- (const uint8_t *)uPlane {
const cricket::VideoFrame *const_frame = _videoFrame.get();
return const_frame->GetUPlane();
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->data(webrtc::kUPlane);
}
- (const uint8_t *)vPlane {
const cricket::VideoFrame *const_frame = _videoFrame.get();
return const_frame->GetVPlane();
if (!self.i420Buffer) {
return nullptr;
}
return self.i420Buffer->data(webrtc::kVPlane);
}
- (int32_t)yPitch {
return _videoFrame->GetYPitch();
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->stride(webrtc::kYPlane);
}
- (int32_t)uPitch {
return _videoFrame->GetUPitch();
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->stride(webrtc::kUPlane);
}
- (int32_t)vPitch {
return _videoFrame->GetVPitch();
if (!self.i420Buffer) {
return 0;
}
return self.i420Buffer->stride(webrtc::kVPlane);
}
- (int64_t)timeStamp {
return _videoFrame->GetTimeStamp();
}
- (CVPixelBufferRef)nativeHandle {
return static_cast<CVPixelBufferRef>(_videoFrame->GetNativeHandle());
}
- (void)convertBufferIfNeeded {
if (!_i420Buffer) {
if (_videoFrame->GetNativeHandle()) {
// Convert to I420.
_i420Buffer = _videoFrame->GetVideoFrameBuffer()->NativeToI420Buffer();
} else {
// Should already be I420.
_i420Buffer = _videoFrame->GetVideoFrameBuffer();
}
}
}
#pragma mark - Private
@ -75,4 +111,9 @@
return self;
}
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)i420Buffer {
[self convertBufferIfNeeded];
return _i420Buffer;
}
@end

View File

@ -44,11 +44,6 @@ using webrtc::PeerConnectionInterface;
using webrtc::StatsReport;
using webrtc::StatsReports;
namespace {
// This value comes from openssl/tls1.h
const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014;
} // namespace
namespace cricket {
class ChannelManager;
@ -57,6 +52,11 @@ class ChannelManager;
namespace webrtc {
namespace internal {
// This value comes from openssl/tls1.h
static const int TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014;
} // namespace internal
// Error return values
const char kNotFound[] = "NOT FOUND";
@ -674,7 +674,8 @@ class StatsCollectorTest : public testing::Test {
cricket::TransportChannelStats channel_stats;
channel_stats.component = 1;
channel_stats.srtp_crypto_suite = rtc::SRTP_AES128_CM_SHA1_80;
channel_stats.ssl_cipher_suite = TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
channel_stats.ssl_cipher_suite =
internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA;
cricket::TransportStats transport_stats;
transport_stats.transport_name = "audio";
@ -740,7 +741,7 @@ class StatsCollectorTest : public testing::Test {
ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,
StatsReport::kStatsValueNameDtlsCipher);
EXPECT_EQ(rtc::SSLStreamAdapter::SslCipherSuiteToName(
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA),
internal::TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA),
dtls_cipher_suite);
std::string srtp_crypto_suite =
ExtractStatsValue(StatsReport::kStatsReportTypeComponent, reports,

View File

@ -60,4 +60,12 @@ source_set("common_video") {
# Need to add a directory normally exported by libyuv.
include_dirs += [ "$rtc_libyuv_dir/include" ]
}
if (is_ios || is_mac) {
sources += [
"corevideo_frame_buffer.cc",
"include/corevideo_frame_buffer.h",
]
libs = [ "CoreVideo.framework" ]
}
}

View File

@ -37,6 +37,19 @@
# Need to add a directory normally exported by libyuv.gyp.
'include_dirs': ['<(libyuv_dir)/include',],
}],
['OS=="ios" or OS=="mac"', {
'sources': [
'corevideo_frame_buffer.cc',
'include/corevideo_frame_buffer.h',
],
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework CoreVideo',
],
},
},
}],
],
'sources': [
'i420_buffer_pool.cc',

View File

@ -0,0 +1,61 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
#include "libyuv/convert.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
namespace webrtc {
CoreVideoFrameBuffer::CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer)
: NativeHandleBuffer(pixel_buffer,
CVPixelBufferGetWidth(pixel_buffer),
CVPixelBufferGetHeight(pixel_buffer)),
pixel_buffer_(pixel_buffer) {
CVBufferRetain(pixel_buffer_);
}
CoreVideoFrameBuffer::~CoreVideoFrameBuffer() {
CVBufferRelease(pixel_buffer_);
}
rtc::scoped_refptr<VideoFrameBuffer>
CoreVideoFrameBuffer::NativeToI420Buffer() {
RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer_) ==
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer_, 0);
size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer_, 0);
// TODO(tkchin): Use a frame buffer pool.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
CVPixelBufferLockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
const uint8_t* src_y = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 0));
int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 0);
const uint8_t* src_uv = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer_, 1));
int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer_, 1);
int ret = libyuv::NV12ToI420(
src_y, src_y_stride, src_uv, src_uv_stride,
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
width, height);
CVPixelBufferUnlockBaseAddress(pixel_buffer_, kCVPixelBufferLock_ReadOnly);
if (ret) {
LOG(LS_ERROR) << "Error converting NV12 to I420: " << ret;
return nullptr;
}
return buffer;
}
} // namespace webrtc

View File

@ -0,0 +1,34 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_
#define WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_
#include <CoreVideo/CoreVideo.h>
#include "webrtc/common_video/include/video_frame_buffer.h"
namespace webrtc {
class CoreVideoFrameBuffer : public NativeHandleBuffer {
public:
explicit CoreVideoFrameBuffer(CVPixelBufferRef pixel_buffer);
~CoreVideoFrameBuffer() override;
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
private:
CVPixelBufferRef pixel_buffer_;
};
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_INCLUDE_COREVIDEO_FRAME_BUFFER_H_

View File

@ -21,12 +21,14 @@
#if defined(WEBRTC_IOS)
#include "webrtc/base/objc/RTCUIApplication.h"
#endif
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/common_video/include/corevideo_frame_buffer.h"
#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
#include "webrtc/video_frame.h"
namespace internal {
static const int64_t kMsPerSec = 1000;
// Convenience function for creating a dictionary.
inline CFDictionaryRef CreateCFDictionary(CFTypeRef* keys,
CFTypeRef* values,
@ -45,42 +47,6 @@ struct FrameDecodeParams {
int64_t timestamp;
};
// On decode we receive a CVPixelBuffer, which we need to convert to a frame
// buffer for use in the rest of WebRTC. Unfortunately this involves a frame
// copy.
// TODO(tkchin): Stuff CVPixelBuffer into a TextureBuffer and pass that along
// instead once the pipeline supports it.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> VideoFrameBufferForPixelBuffer(
CVPixelBufferRef pixel_buffer) {
RTC_DCHECK(pixel_buffer);
RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) ==
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer, 0);
size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer, 0);
// TODO(tkchin): Use a frame buffer pool.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
CVPixelBufferLockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly);
const uint8_t* src_y = reinterpret_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
const uint8_t* src_uv = reinterpret_cast<const uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
int ret = libyuv::NV12ToI420(
src_y, src_y_stride, src_uv, src_uv_stride,
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
width, height);
CVPixelBufferUnlockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly);
if (ret) {
LOG(LS_ERROR) << "Error converting NV12 to I420: " << ret;
return nullptr;
}
return buffer;
}
// This is the callback function that VideoToolbox calls when decode is
// complete.
void VTDecompressionOutputCallback(void* decoder,
@ -98,8 +64,9 @@ void VTDecompressionOutputCallback(void* decoder,
}
// TODO(tkchin): Handle CVO properly.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
VideoFrameBufferForPixelBuffer(image_buffer);
webrtc::VideoFrame decoded_frame(buffer, decode_params->timestamp, 0,
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
webrtc::VideoFrame decoded_frame(buffer, decode_params->timestamp,
CMTimeGetSeconds(timestamp) * kMsPerSec,
webrtc::kVideoRotation_0);
decode_params->callback->Decoded(decoded_frame);
}