diff --git a/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m b/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m index d39c598764..537397f378 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m +++ b/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m @@ -215,12 +215,6 @@ } - (void)renderFrame:(RTCVideoFrame *)frame { -#if !TARGET_OS_IPHONE - // Generate the i420 frame on video send thread instead of main thread. - // TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading - // CVPixelBuffer textures on OSX. - [frame convertBufferIfNeeded]; -#endif self.videoFrame = frame; } diff --git a/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm b/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm index d325840cf5..7a09c41a84 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm @@ -15,7 +15,6 @@ #import "RTCShader+Private.h" #import "WebRTC/RTCVideoFrame.h" -#include "webrtc/api/video/video_rotation.h" #include "webrtc/base/optional.h" // |kNumTextures| must not exceed 8, which is the limit in OpenGLES2. Two sets @@ -62,7 +61,7 @@ static const char kI420FragmentShaderSource[] = GLint _vSampler; // Store current rotation and only upload new vertex data when rotation // changes. - rtc::Optional _currentRotation; + rtc::Optional _currentRotation; // Used to create a non-padded plane for GPU upload when we receive padded // frames. std::vector _planeBuffer; @@ -126,8 +125,7 @@ static const char kI420FragmentShaderSource[] = #endif glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); if (!_currentRotation || frame.rotation != *_currentRotation) { - _currentRotation = rtc::Optional( - static_cast(frame.rotation)); + _currentRotation = rtc::Optional(frame.rotation); RTCSetVertexData(*_currentRotation); } glDrawArrays(GL_TRIANGLE_FAN, 0, 4); @@ -188,32 +186,34 @@ static const char kI420FragmentShaderSource[] = GLint textureOffset = _currentTextureSet * 3; NSAssert(textureOffset + 3 <= kNumTextures, @"invalid offset"); - if (frame.yPitch != static_cast(frame.width) || - frame.uPitch != static_cast(frame.chromaWidth) || - frame.vPitch != static_cast(frame.chromaWidth)) { + const int chromaWidth = (frame.width + 1) / 2; + const int chromaHeight = (frame.height + 1) / 2; + if (frame.strideY != frame.width || + frame.strideU != chromaWidth || + frame.strideV != chromaWidth) { _planeBuffer.resize(frame.width * frame.height); } - [self uploadPlane:frame.yPlane + [self uploadPlane:frame.dataY sampler:_ySampler offset:textureOffset width:frame.width height:frame.height - stride:frame.yPitch]; + stride:frame.strideY]; - [self uploadPlane:frame.uPlane + [self uploadPlane:frame.dataU sampler:_uSampler offset:textureOffset + 1 - width:frame.chromaWidth - height:frame.chromaHeight - stride:frame.uPitch]; + width:chromaWidth + height:chromaHeight + stride:frame.strideU]; - [self uploadPlane:frame.vPlane + [self uploadPlane:frame.dataV sampler:_vSampler offset:textureOffset + 2 - width:frame.chromaWidth - height:frame.chromaHeight - stride:frame.vPitch]; + width:chromaWidth + height:chromaHeight + stride:frame.strideV]; _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; return YES; diff --git a/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm b/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm index 5000dbc2fa..2d402eef14 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm @@ -19,7 +19,6 @@ #import "RTCShader+Private.h" #import "WebRTC/RTCVideoFrame.h" -#include "webrtc/api/video/video_rotation.h" #include "webrtc/base/checks.h" #include "webrtc/base/optional.h" @@ -50,7 +49,7 @@ static const char kNV12FragmentShaderSource[] = CVOpenGLESTextureCacheRef _textureCache; // Store current rotation and only upload new vertex data when rotation // changes. - rtc::Optional _currentRotation; + rtc::Optional _currentRotation; } - (instancetype)initWithContext:(GlContextType *)context { @@ -155,8 +154,7 @@ static const char kNV12FragmentShaderSource[] = glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); if (!_currentRotation || frame.rotation != *_currentRotation) { - _currentRotation = rtc::Optional( - static_cast(frame.rotation)); + _currentRotation = rtc::Optional(frame.rotation); RTCSetVertexData(*_currentRotation); } glDrawArrays(GL_TRIANGLE_FAN, 0, 4); diff --git a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm index aaf7e71296..bfd6eebe7e 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm @@ -48,16 +48,20 @@ _nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_context]; } shader = _nv12Shader; -#else - // Rendering native CVPixelBuffer is not supported on OS X. - if (false) { -#endif } else { if (!_i420Shader) { _i420Shader = [[RTCI420Shader alloc] initWithContext:_context]; } shader = _i420Shader; } +#else + // Rendering native CVPixelBuffer is not supported on OS X. + frame = [frame newI420VideoFrame]; + if (!_i420Shader) { + _i420Shader = [[RTCI420Shader alloc] initWithContext:_context]; + } + shader = _i420Shader; +#endif if (!shader || ![shader drawFrame:frame]) { return NO; } diff --git a/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h index ea0f787fc8..a9931bdd51 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h +++ b/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h @@ -11,6 +11,7 @@ #import "RTCShader.h" #import "WebRTC/RTCMacros.h" +#import "WebRTC/RTCVideoFrame.h" #if TARGET_OS_IPHONE #import @@ -27,4 +28,4 @@ RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader); RTC_EXTERN GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]); RTC_EXTERN BOOL RTCSetupVerticesForProgram( GLuint program, GLuint* vertexBuffer, GLuint* vertexArray); -RTC_EXTERN void RTCSetVertexData(webrtc::VideoRotation rotation); +RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation); diff --git a/webrtc/sdk/objc/Framework/Classes/RTCShader.mm b/webrtc/sdk/objc/Framework/Classes/RTCShader.mm index 26dc64f92b..155a0165a1 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCShader.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCShader.mm @@ -138,7 +138,7 @@ BOOL RTCSetupVerticesForProgram(GLuint program, GLuint* vertexBuffer, GLuint* ve } // Set vertex data to the currently bound vertex buffer. -void RTCSetVertexData(webrtc::VideoRotation rotation) { +void RTCSetVertexData(RTCVideoRotation rotation) { // When modelview and projection matrices are identity (default) the world is // contained in the square around origin with unit size 2. Drawing to these // coordinates is equivalent to drawing to the entire screen. The texture is @@ -156,16 +156,16 @@ void RTCSetVertexData(webrtc::VideoRotation rotation) { // Rotate the UV coordinates. int rotation_offset; switch (rotation) { - case webrtc::kVideoRotation_0: + case RTCVideoRotation_0: rotation_offset = 0; break; - case webrtc::kVideoRotation_90: + case RTCVideoRotation_90: rotation_offset = 1; break; - case webrtc::kVideoRotation_180: + case RTCVideoRotation_180: rotation_offset = 2; break; - case webrtc::kVideoRotation_270: + case RTCVideoRotation_270: rotation_offset = 3; break; } diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h index e844d64790..a7a40386cc 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h +++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame+Private.h @@ -11,18 +11,14 @@ #import "WebRTC/RTCVideoFrame.h" #include "webrtc/api/video/video_frame_buffer.h" -#include "webrtc/api/video/video_rotation.h" NS_ASSUME_NONNULL_BEGIN @interface RTCVideoFrame () -@property(nonatomic, readonly) - rtc::scoped_refptr i420Buffer; - - (instancetype)initWithVideoBuffer: (rtc::scoped_refptr)videoBuffer - rotation:(webrtc::VideoRotation)rotation + rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs NS_DESIGNATED_INITIALIZER; diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm index 5805e30cb5..0403557f75 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoFrame.mm @@ -10,80 +10,46 @@ #import "RTCVideoFrame+Private.h" -#include - -#include "webrtc/api/video/video_rotation.h" - @implementation RTCVideoFrame { rtc::scoped_refptr _videoBuffer; - webrtc::VideoRotation _rotation; + RTCVideoRotation _rotation; int64_t _timeStampNs; - rtc::scoped_refptr _i420Buffer; } -- (size_t)width { +- (int)width { return _videoBuffer->width(); } -- (size_t)height { +- (int)height { return _videoBuffer->height(); } -- (int)rotation { - return static_cast(_rotation); +- (RTCVideoRotation)rotation { + return _rotation; } -// TODO(nisse): chromaWidth and chromaHeight are used only in -// RTCOpenGLVideoRenderer.mm. Update, and then delete these -// properties. -- (size_t)chromaWidth { - return (self.width + 1) / 2; +- (const uint8_t *)dataY { + return _videoBuffer->DataY(); } -- (size_t)chromaHeight { - return (self.height + 1) / 2; +- (const uint8_t *)dataU { + return _videoBuffer->DataU(); } -- (const uint8_t *)yPlane { - if (!self.i420Buffer) { - return nullptr; - } - return self.i420Buffer->DataY(); +- (const uint8_t *)dataV { + return _videoBuffer->DataV(); } -- (const uint8_t *)uPlane { - if (!self.i420Buffer) { - return nullptr; - } - return self.i420Buffer->DataU(); +- (int)strideY { + return _videoBuffer->StrideY(); } -- (const uint8_t *)vPlane { - if (!self.i420Buffer) { - return nullptr; - } - return self.i420Buffer->DataV(); +- (int)strideU { + return _videoBuffer->StrideU(); } -- (int32_t)yPitch { - if (!self.i420Buffer) { - return 0; - } - return self.i420Buffer->StrideY(); -} - -- (int32_t)uPitch { - if (!self.i420Buffer) { - return 0; - } - return self.i420Buffer->StrideU(); -} - -- (int32_t)vPitch { - if (!self.i420Buffer) { - return 0; - } - return self.i420Buffer->StrideV(); +- (int)strideV { + return _videoBuffer->StrideV(); } - (int64_t)timeStampNs { @@ -94,19 +60,18 @@ return static_cast(_videoBuffer->native_handle()); } -- (void)convertBufferIfNeeded { - if (!_i420Buffer) { - _i420Buffer = _videoBuffer->native_handle() - ? _videoBuffer->NativeToI420Buffer() - : _videoBuffer; - } +- (RTCVideoFrame *)newI420VideoFrame { + return [[RTCVideoFrame alloc] + initWithVideoBuffer:_videoBuffer->NativeToI420Buffer() + rotation:_rotation + timeStampNs:_timeStampNs]; } #pragma mark - Private - (instancetype)initWithVideoBuffer: (rtc::scoped_refptr)videoBuffer - rotation:(webrtc::VideoRotation)rotation + rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs { if (self = [super init]) { _videoBuffer = videoBuffer; @@ -116,9 +81,4 @@ return self; } -- (rtc::scoped_refptr)i420Buffer { - [self convertBufferIfNeeded]; - return _i420Buffer; -} - @end diff --git a/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm index 8bc01b1ce8..1d5107b1b0 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCVideoRendererAdapter.mm @@ -27,9 +27,10 @@ class VideoRendererAdapter void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithVideoBuffer:nativeVideoFrame.video_frame_buffer() - rotation:nativeVideoFrame.rotation() + rotation:static_cast( + nativeVideoFrame.rotation()) timeStampNs:nativeVideoFrame.timestamp_us() * - rtc::kNumNanosecsPerMicrosec]; + rtc::kNumNanosecsPerMicrosec]; CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width); diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h index bfad608897..cbd8cc74e8 100644 --- a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h +++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCVideoFrame.h @@ -15,25 +15,33 @@ NS_ASSUME_NONNULL_BEGIN +typedef NS_ENUM(NSInteger, RTCVideoRotation) { + RTCVideoRotation_0 = 0, + RTCVideoRotation_90 = 90, + RTCVideoRotation_180 = 180, + RTCVideoRotation_270 = 270, +}; + // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. RTC_EXPORT @interface RTCVideoFrame : NSObject /** Width without rotation applied. */ -@property(nonatomic, readonly) size_t width; +@property(nonatomic, readonly) int width; /** Height without rotation applied. */ -@property(nonatomic, readonly) size_t height; -@property(nonatomic, readonly) int rotation; -@property(nonatomic, readonly) size_t chromaWidth; -@property(nonatomic, readonly) size_t chromaHeight; -// These can return NULL if the object is not backed by a buffer. -@property(nonatomic, readonly, nullable) const uint8_t *yPlane; -@property(nonatomic, readonly, nullable) const uint8_t *uPlane; -@property(nonatomic, readonly, nullable) const uint8_t *vPlane; -@property(nonatomic, readonly) int32_t yPitch; -@property(nonatomic, readonly) int32_t uPitch; -@property(nonatomic, readonly) int32_t vPitch; +@property(nonatomic, readonly) int height; +@property(nonatomic, readonly) RTCVideoRotation rotation; +/** Accessing YUV data should only be done for I420 frames, i.e. if nativeHandle + * is null. It is always possible to get such a frame by calling + * newI420VideoFrame. + */ +@property(nonatomic, readonly, nullable) const uint8_t *dataY; +@property(nonatomic, readonly, nullable) const uint8_t *dataU; +@property(nonatomic, readonly, nullable) const uint8_t *dataV; +@property(nonatomic, readonly) int strideY; +@property(nonatomic, readonly) int strideU; +@property(nonatomic, readonly) int strideV; /** Timestamp in nanoseconds. */ @property(nonatomic, readonly) int64_t timeStampNs; @@ -43,10 +51,10 @@ RTC_EXPORT - (instancetype)init NS_UNAVAILABLE; -/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame. - * Calling the yuv plane properties will call this method if needed. +/** Return a frame that is guaranteed to be I420, i.e. it is possible to access + * the YUV data on it. */ -- (void)convertBufferIfNeeded; +- (RTCVideoFrame *)newI420VideoFrame; @end