diff --git a/webrtc/sdk/BUILD.gn b/webrtc/sdk/BUILD.gn index d8e745c83f..38c5b68de6 100644 --- a/webrtc/sdk/BUILD.gn +++ b/webrtc/sdk/BUILD.gn @@ -126,8 +126,6 @@ if (is_ios || is_mac) { "objc/Framework/Classes/RTCMediaStreamTrack+Private.h", "objc/Framework/Classes/RTCMediaStreamTrack.mm", "objc/Framework/Classes/RTCOpenGLDefines.h", - "objc/Framework/Classes/RTCOpenGLVideoRenderer.h", - "objc/Framework/Classes/RTCOpenGLVideoRenderer.mm", "objc/Framework/Classes/RTCPeerConnection+DataChannel.mm", "objc/Framework/Classes/RTCPeerConnection+Private.h", "objc/Framework/Classes/RTCPeerConnection+Stats.mm", diff --git a/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m b/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m index 537397f378..1fb03bc909 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m +++ b/webrtc/sdk/objc/Framework/Classes/RTCEAGLVideoView.m @@ -12,7 +12,8 @@ #import -#import "RTCOpenGLVideoRenderer.h" +#import "RTCShader+Private.h" +#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCVideoFrame.h" // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen @@ -88,7 +89,6 @@ // from the display link callback so atomicity is required. @property(atomic, strong) RTCVideoFrame *videoFrame; @property(nonatomic, readonly) GLKView *glkView; -@property(nonatomic, readonly) RTCOpenGLVideoRenderer *glRenderer; @end @implementation RTCEAGLVideoView { @@ -97,12 +97,14 @@ // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; + id _i420Shader; + id _nv12Shader; + RTCVideoFrame *_lastDrawnFrame; } @synthesize delegate = _delegate; @synthesize videoFrame = _videoFrame; @synthesize glkView = _glkView; -@synthesize glRenderer = _glRenderer; - (instancetype)initWithFrame:(CGRect)frame { if (self = [super initWithFrame:frame]) { @@ -125,7 +127,6 @@ glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; } _glContext = glContext; - _glRenderer = [[RTCOpenGLVideoRenderer alloc] initWithContext:_glContext]; // GLKView manages a framebuffer for us. _glkView = [[GLKView alloc] initWithFrame:CGRectZero @@ -200,7 +201,29 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { // The renderer will draw the frame to the framebuffer corresponding to the // one used by |view|. - [_glRenderer drawFrame:self.videoFrame]; + RTCVideoFrame *frame = self.videoFrame; + if (!frame || frame == _lastDrawnFrame) { + return; + } + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + id shader = nil; + if (frame.nativeHandle) { + if (!_nv12Shader) { + _nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_glContext]; + } + shader = _nv12Shader; + } else { + if (!_i420Shader) { + _i420Shader = [[RTCI420Shader alloc] initWithContext:_glContext]; + } + shader = _i420Shader; + } + if (shader && [shader drawFrame:frame]) { + _lastDrawnFrame = frame; + } else { + RTCLog(@"Failed to draw frame."); + } } #pragma mark - RTCVideoRenderer @@ -223,7 +246,7 @@ - (void)displayLinkTimerDidFire { // Don't render unless video frame have changed or the view content // has explicitly been marked dirty. - if (!_isDirty && _glRenderer.lastDrawnFrame == self.videoFrame) { + if (!_isDirty && _lastDrawnFrame == self.videoFrame) { return; } @@ -242,7 +265,8 @@ - (void)setupGL { self.videoFrame = nil; - [_glRenderer setupGL]; + [self ensureGLContext]; + glDisable(GL_DITHER); _timer.isPaused = NO; } @@ -250,7 +274,9 @@ self.videoFrame = nil; _timer.isPaused = YES; [_glkView deleteDrawable]; - [_glRenderer teardownGL]; + [self ensureGLContext]; + _i420Shader = nil; + _nv12Shader = nil; } - (void)didBecomeActive { @@ -261,4 +287,11 @@ [self teardownGL]; } +- (void)ensureGLContext { + NSAssert(_glContext, @"context shouldn't be nil"); + if ([EAGLContext currentContext] != _glContext) { + [EAGLContext setCurrentContext:_glContext]; + } +} + @end diff --git a/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm b/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm index 7a09c41a84..3c3ffa72f0 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCI420Shader.mm @@ -13,6 +13,7 @@ #include #import "RTCShader+Private.h" +#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCVideoFrame.h" #include "webrtc/base/optional.h" @@ -77,6 +78,7 @@ static const char kI420FragmentShaderSource[] = glPixelStorei(GL_UNPACK_ALIGNMENT, 1); if (![self setupI420Program] || ![self setupTextures] || !RTCSetupVerticesForProgram(_i420Program, &_vertexBuffer, &_vertexArray)) { + RTCLog(@"Failed to initialize RTCI420Shader."); self = nil; } } diff --git a/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m b/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m index 18dc4d1315..530d9a7e6a 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m +++ b/webrtc/sdk/objc/Framework/Classes/RTCNSGLVideoView.m @@ -14,17 +14,20 @@ #import "WebRTC/RTCNSGLVideoView.h" +#import #import #import -#import "RTCOpenGLVideoRenderer.h" +#import "RTCShader+Private.h" +#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCVideoFrame.h" @interface RTCNSGLVideoView () // |videoFrame| is set when we receive a frame from a worker thread and is read // from the display link callback so atomicity is required. @property(atomic, strong) RTCVideoFrame *videoFrame; -@property(atomic, strong) RTCOpenGLVideoRenderer *glRenderer; +@property(atomic, strong) id i420Shader; + - (void)drawFrame; @end @@ -41,11 +44,12 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, @implementation RTCNSGLVideoView { CVDisplayLinkRef _displayLink; + RTCVideoFrame *_lastDrawnFrame; } @synthesize delegate = _delegate; @synthesize videoFrame = _videoFrame; -@synthesize glRenderer = _glRenderer; +@synthesize i420Shader = _i420Shader; - (void)dealloc { [self teardownDisplayLink]; @@ -74,17 +78,14 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, - (void)prepareOpenGL { [super prepareOpenGL]; - if (!self.glRenderer) { - self.glRenderer = - [[RTCOpenGLVideoRenderer alloc] initWithContext:[self openGLContext]]; - } - [self.glRenderer setupGL]; + [self ensureGLContext]; + glDisable(GL_DITHER); [self setupDisplayLink]; } - (void)clearGLContext { - [self.glRenderer teardownGL]; - self.glRenderer = nil; + [self ensureGLContext]; + self.i420Shader = nil; [super clearGLContext]; } @@ -104,14 +105,30 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, #pragma mark - Private - (void)drawFrame { - RTCVideoFrame *videoFrame = self.videoFrame; - if (self.glRenderer.lastDrawnFrame != videoFrame) { - // This method may be called from CVDisplayLink callback which isn't on the - // main thread so we have to lock the GL context before drawing. - CGLLockContext([[self openGLContext] CGLContextObj]); - [self.glRenderer drawFrame:videoFrame]; - CGLUnlockContext([[self openGLContext] CGLContextObj]); + RTCVideoFrame *frame = self.videoFrame; + if (!frame || frame == _lastDrawnFrame) { + return; } + // This method may be called from CVDisplayLink callback which isn't on the + // main thread so we have to lock the GL context before drawing. + NSOpenGLContext *context = [self openGLContext]; + CGLLockContext([context CGLContextObj]); + + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + + // Rendering native CVPixelBuffer is not supported on OS X. + frame = [frame newI420VideoFrame]; + if (!self.i420Shader) { + self.i420Shader = [[RTCI420Shader alloc] initWithContext:context]; + } + if (self.i420Shader && [self.i420Shader drawFrame:frame]) { + [context flushBuffer]; + _lastDrawnFrame = frame; + } else { + RTCLog(@"Failed to draw frame."); + } + CGLUnlockContext([context CGLContextObj]); } - (void)setupDisplayLink { @@ -143,6 +160,14 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, _displayLink = NULL; } +- (void)ensureGLContext { + NSOpenGLContext* context = [self openGLContext]; + NSAssert(context, @"context shouldn't be nil"); + if ([NSOpenGLContext currentContext] != context) { + [context makeCurrentContext]; + } +} + @end #endif // !TARGET_OS_IPHONE diff --git a/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm b/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm index 2d402eef14..0e221bfd11 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm +++ b/webrtc/sdk/objc/Framework/Classes/RTCNativeNV12Shader.mm @@ -17,6 +17,7 @@ #import #import "RTCShader+Private.h" +#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCVideoFrame.h" #include "webrtc/base/checks.h" @@ -56,6 +57,7 @@ static const char kNV12FragmentShaderSource[] = if (self = [super init]) { if (![self setupNV12Program] || ![self setupTextureCacheWithContext:context] || !RTCSetupVerticesForProgram(_nv12Program, &_vertexBuffer, nullptr)) { + RTCLog(@"Failed to initialize RTCNativeNV12Shader."); self = nil; } } diff --git a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h deleted file mode 100644 index 7041861014..0000000000 --- a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.h +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import -#if TARGET_OS_IPHONE -#import -#else -#import -#endif - -#import "WebRTC/RTCMacros.h" - -NS_ASSUME_NONNULL_BEGIN - -@class RTCVideoFrame; - -// RTCOpenGLVideoRenderer issues appropriate OpenGL commands to draw a frame to -// the currently bound framebuffer. Supports OpenGL 3.2 and OpenGLES 2.0. OpenGL -// framebuffer creation and management should be handled elsewhere using the -// same context used to initialize this class. -RTC_EXPORT -@interface RTCOpenGLVideoRenderer : NSObject - -// The last successfully drawn frame. Used to avoid drawing frames unnecessarily -// hence saving battery life by reducing load. -@property(nonatomic, readonly) RTCVideoFrame *lastDrawnFrame; - -#if TARGET_OS_IPHONE -- (instancetype)initWithContext:(EAGLContext *)context - NS_DESIGNATED_INITIALIZER; -#else -- (instancetype)initWithContext:(NSOpenGLContext *)context - NS_DESIGNATED_INITIALIZER; -#endif - -// Draws |frame| onto the currently bound OpenGL framebuffer. |setupGL| must be -// called before this function will succeed. -- (BOOL)drawFrame:(RTCVideoFrame *)frame; - -// The following methods are used to manage OpenGL resources. On iOS -// applications should release resources when placed in background for use in -// the foreground application. In fact, attempting to call OpenGLES commands -// while in background will result in application termination. - -// Sets up the OpenGL state needed for rendering. -- (void)setupGL; -// Tears down the OpenGL state created by |setupGL|. -- (void)teardownGL; - -- (instancetype)init NS_UNAVAILABLE; - -@end - -NS_ASSUME_NONNULL_END diff --git a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm b/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm deleted file mode 100644 index bfd6eebe7e..0000000000 --- a/webrtc/sdk/objc/Framework/Classes/RTCOpenGLVideoRenderer.mm +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCOpenGLVideoRenderer.h" - -#import "RTCShader+Private.h" -#import "WebRTC/RTCVideoFrame.h" - -@implementation RTCOpenGLVideoRenderer { - GlContextType *_context; - BOOL _isInitialized; - id _i420Shader; - id _nv12Shader; -} - -@synthesize lastDrawnFrame = _lastDrawnFrame; - -+ (void)initialize { - // Disable dithering for performance. - glDisable(GL_DITHER); -} - -- (instancetype)initWithContext:(GlContextType *)context { - NSAssert(context != nil, @"context cannot be nil"); - if (self = [super init]) { - _context = context; - } - return self; -} - -- (BOOL)drawFrame:(RTCVideoFrame *)frame { - if (!_isInitialized || !frame || frame == _lastDrawnFrame) { - return NO; - } - [self ensureGLContext]; - glClear(GL_COLOR_BUFFER_BIT); - id shader = nil; -#if TARGET_OS_IPHONE - if (frame.nativeHandle) { - if (!_nv12Shader) { - _nv12Shader = [[RTCNativeNV12Shader alloc] initWithContext:_context]; - } - shader = _nv12Shader; - } else { - if (!_i420Shader) { - _i420Shader = [[RTCI420Shader alloc] initWithContext:_context]; - } - shader = _i420Shader; - } -#else - // Rendering native CVPixelBuffer is not supported on OS X. - frame = [frame newI420VideoFrame]; - if (!_i420Shader) { - _i420Shader = [[RTCI420Shader alloc] initWithContext:_context]; - } - shader = _i420Shader; -#endif - if (!shader || ![shader drawFrame:frame]) { - return NO; - } - -#if !TARGET_OS_IPHONE - [_context flushBuffer]; -#endif - _lastDrawnFrame = frame; - - return YES; -} - -- (void)setupGL { - if (_isInitialized) { - return; - } - [self ensureGLContext]; - _isInitialized = YES; -} - -- (void)teardownGL { - if (!_isInitialized) { - return; - } - [self ensureGLContext]; - _i420Shader = nil; - _nv12Shader = nil; - _isInitialized = NO; -} - -#pragma mark - Private - -- (void)ensureGLContext { - NSAssert(_context, @"context shouldn't be nil"); -#if TARGET_OS_IPHONE - if ([EAGLContext currentContext] != _context) { - [EAGLContext setCurrentContext:_context]; - } -#else - if ([NSOpenGLContext currentContext] != _context) { - [_context makeCurrentContext]; - } -#endif -} - -@end diff --git a/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h b/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h index a9931bdd51..62da2b89fd 100644 --- a/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h +++ b/webrtc/sdk/objc/Framework/Classes/RTCShader+Private.h @@ -19,8 +19,6 @@ #import #endif -#include "webrtc/api/video/video_rotation.h" - RTC_EXTERN const char kRTCVertexShaderSource[]; RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar *source);