diff --git a/examples/BUILD.gn b/examples/BUILD.gn index 6ae2b71d85..9fa5d6f618 100644 --- a/examples/BUILD.gn +++ b/examples/BUILD.gn @@ -352,7 +352,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:videocapture_objc", "../sdk:videocodec_objc", ] - if (rtc_ios_use_opengl_rendering) { + if (rtc_ios_macos_use_opengl_rendering) { deps += [ "../sdk:opengl_ui_objc" ] } @@ -509,7 +509,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:videotoolbox_objc", ] - if (rtc_ios_use_opengl_rendering) { + if (rtc_ios_macos_use_opengl_rendering) { deps += [ "../sdk:opengl_ui_objc" ] } } @@ -548,6 +548,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:helpers_objc", "../sdk:mediaconstraints_objc", "../sdk:metal_objc", + "../sdk:opengl_ui_objc", "../sdk:peerconnectionfactory_base_objc", "../sdk:peerconnectionfactory_base_objc", "../sdk:videocapture_objc", diff --git a/examples/objc/AppRTCMobile/mac/APPRTCViewController.m b/examples/objc/AppRTCMobile/mac/APPRTCViewController.m index 7b65d4f058..8904187215 100644 --- a/examples/objc/AppRTCMobile/mac/APPRTCViewController.m +++ b/examples/objc/AppRTCMobile/mac/APPRTCViewController.m @@ -14,6 +14,7 @@ #import "sdk/objc/api/peerconnection/RTCVideoTrack.h" #import "sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h" +#import "sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h" #import "ARDAppClient.h" #import "ARDCaptureController.h" @@ -44,7 +45,7 @@ static NSUInteger const kBottomViewHeight = 200; @end -@interface APPRTCMainView () +@interface APPRTCMainView () @end @implementation APPRTCMainView { NSScrollView* _scrollView; @@ -177,9 +178,9 @@ static NSUInteger const kBottomViewHeight = 200; [self setNeedsUpdateConstraints:YES]; } -#pragma mark - RTCVideoViewDelegate +#pragma mark - RTC_OBJC_TYPE(RTCNSGLVideoViewDelegate) -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { +- (void)videoView:(RTC_OBJC_TYPE(RTCNSGLVideoView) *)videoView didChangeVideoSize:(NSSize)size { if (videoView == _remoteVideoView) { _remoteVideoSize = size; } else if (videoView == _localVideoView) { @@ -215,8 +216,38 @@ static NSUInteger const kBottomViewHeight = 200; [_scrollView setDocumentView:_logView]; [self addSubview:_scrollView]; - _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; - _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; +// NOTE (daniela): Ignoring Clang diagonstic here. +// We're performing run time check to make sure class is available on runtime. +// If not we're providing sensible default. +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wpartial-availability" + if ([RTC_OBJC_TYPE(RTCMTLNSVideoView) class] && + [RTC_OBJC_TYPE(RTCMTLNSVideoView) isMetalAvailable]) { + _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; + _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; + } +#pragma clang diagnostic pop + if (_remoteVideoView == nil) { + NSOpenGLPixelFormatAttribute attributes[] = { + NSOpenGLPFADoubleBuffer, + NSOpenGLPFADepthSize, 24, + NSOpenGLPFAOpenGLProfile, + NSOpenGLProfileVersion3_2Core, + 0 + }; + NSOpenGLPixelFormat* pixelFormat = + [[NSOpenGLPixelFormat alloc] initWithAttributes:attributes]; + + RTC_OBJC_TYPE(RTCNSGLVideoView)* remote = + [[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; + remote.delegate = self; + _remoteVideoView = remote; + + RTC_OBJC_TYPE(RTCNSGLVideoView)* local = + [[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; + local.delegate = self; + _localVideoView = local; + } [_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO]; [self addSubview:_remoteVideoView]; diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 0e9ce250e9..ff89b21721 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -230,6 +230,9 @@ if (is_ios || is_mac) { "objc/native/api/audio_device_module.h", "objc/native/api/audio_device_module.mm", ] + if (is_mac) { + frameworks = [ "AudioUnit.framework" ] + } deps = [ ":audio_device", @@ -250,33 +253,6 @@ if (is_ios || is_mac) { deps = [ "../rtc_base:threading" ] } - rtc_library("opengl_ui_objc") { - visibility = [ "*" ] - allow_poison = [ - "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove. - "default_task_queue", - ] - sources = [ - "objc/components/renderer/opengl/RTCDisplayLinkTimer.h", - "objc/components/renderer/opengl/RTCDisplayLinkTimer.m", - "objc/components/renderer/opengl/RTCEAGLVideoView.h", - "objc/components/renderer/opengl/RTCEAGLVideoView.m", - ] - - # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning - # workaround. - defines = [ "GLES_SILENCE_DEPRECATION" ] - configs += [ "..:common_objc" ] - deps = [ - ":base_objc", - ":helpers_objc", - ":metal_objc", - ":opengl_objc", - ":videocapture_objc", - ":videoframebuffer_objc", - ] - } - rtc_library("audio_device") { visibility = [ "*" ] @@ -602,6 +578,41 @@ if (is_ios || is_mac) { ] } + rtc_library("opengl_ui_objc") { + visibility = [ "*" ] + allow_poison = [ + "audio_codecs", # TODO(bugs.webrtc.org/8396): Remove. + "default_task_queue", + ] + if (is_ios) { + sources = [ + "objc/components/renderer/opengl/RTCDisplayLinkTimer.h", + "objc/components/renderer/opengl/RTCDisplayLinkTimer.m", + "objc/components/renderer/opengl/RTCEAGLVideoView.h", + "objc/components/renderer/opengl/RTCEAGLVideoView.m", + ] + + # TODO(bugs.webrtc.org/12937): Remove OpenGL deprecation warning + # workaround. + defines = [ "GLES_SILENCE_DEPRECATION" ] + } + if (is_mac) { + sources = [ + "objc/components/renderer/opengl/RTCNSGLVideoView.h", + "objc/components/renderer/opengl/RTCNSGLVideoView.m", + ] + } + configs += [ "..:common_objc" ] + deps = [ + ":base_objc", + ":helpers_objc", + ":metal_objc", + ":opengl_objc", + ":videocapture_objc", + ":videoframebuffer_objc", + ] + } + rtc_library("metal_objc") { visibility = [ "*" ] allow_poison = [ @@ -1191,7 +1202,7 @@ if (is_ios || is_mac) { "//third_party/libyuv", ] - if (rtc_ios_use_opengl_rendering) { + if (rtc_ios_macos_use_opengl_rendering) { deps += [ ":opengl_objc" ] } @@ -1372,6 +1383,9 @@ if (is_ios || is_mac) { ":videocodec_objc", ":videotoolbox_objc", ] + if (rtc_ios_macos_use_opengl_rendering) { + deps += [ ":opengl_ui_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1475,6 +1489,7 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", + "objc/components/renderer/opengl/RTCNSGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h", @@ -1499,6 +1514,7 @@ if (is_ios || is_mac) { ":default_codec_factory_objc", ":native_api", ":native_video", + ":opengl_ui_objc", ":peerconnectionfactory_base_objc", ":videocapture_objc", ":videocodec_objc", diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index 5a2e7d380f..f70e2ad5ee 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -12,6 +12,8 @@ #import "RTCVideoRenderer.h" +NS_AVAILABLE_MAC(10.11) + RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h new file mode 100644 index 0000000000..c9ee986f88 --- /dev/null +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h @@ -0,0 +1,42 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#if !TARGET_OS_IPHONE + +#import + +#import "RTCVideoRenderer.h" +#import "RTCVideoViewShading.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCNSGLVideoView); + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCNSGLVideoViewDelegate) @end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView + +@property(nonatomic, weak) id delegate; + +- (instancetype)initWithFrame:(NSRect)frameRect + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader + NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END + +#endif diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m new file mode 100644 index 0000000000..168c73126f --- /dev/null +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -0,0 +1,199 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#if !TARGET_OS_IPHONE + +#import "RTCNSGLVideoView.h" + +#import +#import +#import + +#import "RTCDefaultShader.h" +#import "RTCI420TextureCache.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" + +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) +() + // `videoFrame` is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * + videoFrame; +@property(atomic, strong) RTCI420TextureCache *i420TextureCache; + +- (void)drawFrame; +@end + +static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, + const CVTimeStamp *now, + const CVTimeStamp *outputTime, + CVOptionFlags flagsIn, + CVOptionFlags *flagsOut, + void *displayLinkContext) { + RTC_OBJC_TYPE(RTCNSGLVideoView) *view = + (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext; + [view drawFrame]; + return kCVReturnSuccess; +} + +@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { + CVDisplayLinkRef _displayLink; + RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame; + id _shader; +} + +@synthesize delegate = _delegate; +@synthesize videoFrame = _videoFrame; +@synthesize i420TextureCache = _i420TextureCache; + +- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { + return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]]; +} + +- (instancetype)initWithFrame:(NSRect)frame + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader { + if (self = [super initWithFrame:frame pixelFormat:format]) { + _shader = shader; + } + return self; +} + +- (void)dealloc { + [self teardownDisplayLink]; +} + +- (void)drawRect:(NSRect)rect { + [self drawFrame]; +} + +- (void)reshape { + [super reshape]; + NSRect frame = [self frame]; + [self ensureGLContext]; + CGLLockContext([[self openGLContext] CGLContextObj]); + glViewport(0, 0, frame.size.width, frame.size.height); + CGLUnlockContext([[self openGLContext] CGLContextObj]); +} + +- (void)lockFocus { + NSOpenGLContext *context = [self openGLContext]; + [super lockFocus]; + if ([context view] != self) { + [context setView:self]; + } + [context makeCurrentContext]; +} + +- (void)prepareOpenGL { + [super prepareOpenGL]; + [self ensureGLContext]; + glDisable(GL_DITHER); + [self setupDisplayLink]; +} + +- (void)clearGLContext { + [self ensureGLContext]; + self.i420TextureCache = nil; + [super clearGLContext]; +} + +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) + +// These methods may be called on non-main thread. +- (void)setSize:(CGSize)size { + dispatch_async(dispatch_get_main_queue(), ^{ + [self.delegate videoView:self didChangeVideoSize:size]; + }); +} + +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + self.videoFrame = frame; +} + +#pragma mark - Private + +- (void)drawFrame { + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; + if (!frame || frame == _lastDrawnFrame) { + return; + } + // This method may be called from CVDisplayLink callback which isn't on the + // main thread so we have to lock the GL context before drawing. + NSOpenGLContext *context = [self openGLContext]; + CGLLockContext([context CGLContextObj]); + + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + + // Rendering native CVPixelBuffer is not supported on OS X. + // TODO(magjed): Add support for NV12 texture cache on OS X. + frame = [frame newI420VideoFrame]; + if (!self.i420TextureCache) { + self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context]; + } + RTCI420TextureCache *i420TextureCache = self.i420TextureCache; + if (i420TextureCache) { + [i420TextureCache uploadFrameToTextures:frame]; + [_shader applyShadingForFrameWithWidth:frame.width + height:frame.height + rotation:frame.rotation + yPlane:i420TextureCache.yTexture + uPlane:i420TextureCache.uTexture + vPlane:i420TextureCache.vTexture]; + [context flushBuffer]; + _lastDrawnFrame = frame; + } + CGLUnlockContext([context CGLContextObj]); +} + +- (void)setupDisplayLink { + if (_displayLink) { + return; + } + // Synchronize buffer swaps with vertical refresh rate. + GLint swapInt = 1; + [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; + + // Create display link. + CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink); + CVDisplayLinkSetOutputCallback(_displayLink, + &OnDisplayLinkFired, + (__bridge void *)self); + // Set the display link for the current renderer. + CGLContextObj cglContext = [[self openGLContext] CGLContextObj]; + CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj]; + CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext( + _displayLink, cglContext, cglPixelFormat); + CVDisplayLinkStart(_displayLink); +} + +- (void)teardownDisplayLink { + if (!_displayLink) { + return; + } + CVDisplayLinkRelease(_displayLink); + _displayLink = NULL; +} + +- (void)ensureGLContext { + NSOpenGLContext* context = [self openGLContext]; + NSAssert(context, @"context shouldn't be nil"); + if ([NSOpenGLContext currentContext] != context) { + [context makeCurrentContext]; + } +} + +@end + +#endif // !TARGET_OS_IPHONE diff --git a/webrtc.gni b/webrtc.gni index 8ef21f21b2..5e4a5d3c81 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -204,8 +204,9 @@ declare_args() { rtc_apprtcmobile_broadcast_extension = false } - # Determines whether OpenGL is available on iOS. - rtc_ios_use_opengl_rendering = is_ios && target_environment != "catalyst" + # Determines whether OpenGL is available on iOS/macOS. + rtc_ios_macos_use_opengl_rendering = + !(is_ios && target_environment == "catalyst") # When set to false, builtin audio encoder/decoder factories and all the # audio codecs they depend on will not be included in libwebrtc.{a|lib}