diff --git a/webrtc/sdk/BUILD.gn b/webrtc/sdk/BUILD.gn index ea816782dc..644a94f444 100644 --- a/webrtc/sdk/BUILD.gn +++ b/webrtc/sdk/BUILD.gn @@ -103,6 +103,7 @@ if (is_ios || is_mac) { "objc/Framework/Classes/RTCAudioSource.mm", "objc/Framework/Classes/RTCAudioTrack+Private.h", "objc/Framework/Classes/RTCAudioTrack.mm", + "objc/Framework/Classes/RTCCameraVideoCapturer.m", "objc/Framework/Classes/RTCConfiguration+Private.h", "objc/Framework/Classes/RTCConfiguration.mm", "objc/Framework/Classes/RTCDataChannel+Private.h", @@ -169,6 +170,7 @@ if (is_ios || is_mac) { "objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h", "objc/Framework/Headers/WebRTC/RTCAudioSource.h", "objc/Framework/Headers/WebRTC/RTCAudioTrack.h", + "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h", "objc/Framework/Headers/WebRTC/RTCConfiguration.h", "objc/Framework/Headers/WebRTC/RTCDataChannel.h", "objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h", diff --git a/webrtc/sdk/objc/Framework/Classes/RTCCameraVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/RTCCameraVideoCapturer.m new file mode 100644 index 0000000000..6545b2f0d9 --- /dev/null +++ b/webrtc/sdk/objc/Framework/Classes/RTCCameraVideoCapturer.m @@ -0,0 +1,417 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "WebRTC/RTCCameraVideoCapturer.h" +#import "WebRTC/RTCLogging.h" + +#if TARGET_OS_IPHONE +#import "WebRTC/UIDevice+RTCDevice.h" +#endif + +#import "RTCDispatcher+Private.h" + +const int64_t kNanosecondsPerSecond = 1000000000; + +static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { + return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || + mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); +} + +@interface RTCCameraVideoCapturer () +@property(nonatomic, readonly) dispatch_queue_t frameQueue; +@end + +@implementation RTCCameraVideoCapturer { + AVCaptureVideoDataOutput *_videoDataOutput; + AVCaptureSession *_captureSession; + AVCaptureDevice *_currentDevice; + RTCVideoRotation _rotation; + BOOL _hasRetriedOnFatalError; + BOOL _isRunning; + // Will the session be running once all asynchronous operations have been completed? + BOOL _willBeRunning; +} + +@synthesize frameQueue = _frameQueue; +@synthesize captureSession = _captureSession; + +- (instancetype)initWithDelegate:(__weak id)delegate { + if (self = [super initWithDelegate:delegate]) { + // Create the capture session and all relevant inputs and outputs. We need + // to do this in init because the application may want the capture session + // before we start the capturer for e.g. AVCapturePreviewLayer. All objects + // created here are retained until dealloc and never recreated. + if (![self setupCaptureSession]) { + return nil; + } + NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; +#if TARGET_OS_IPHONE + [center addObserver:self + selector:@selector(deviceOrientationDidChange:) + name:UIDeviceOrientationDidChangeNotification + object:nil]; + [center addObserver:self + selector:@selector(handleCaptureSessionInterruption:) + name:AVCaptureSessionWasInterruptedNotification + object:_captureSession]; + [center addObserver:self + selector:@selector(handleCaptureSessionInterruptionEnded:) + name:AVCaptureSessionInterruptionEndedNotification + object:_captureSession]; + [center addObserver:self + selector:@selector(handleApplicationDidBecomeActive:) + name:UIApplicationDidBecomeActiveNotification + object:[UIApplication sharedApplication]]; +#endif + [center addObserver:self + selector:@selector(handleCaptureSessionRuntimeError:) + name:AVCaptureSessionRuntimeErrorNotification + object:_captureSession]; + [center addObserver:self + selector:@selector(handleCaptureSessionDidStartRunning:) + name:AVCaptureSessionDidStartRunningNotification + object:_captureSession]; + [center addObserver:self + selector:@selector(handleCaptureSessionDidStopRunning:) + name:AVCaptureSessionDidStopRunningNotification + object:_captureSession]; + } + return self; +} + +- (void)dealloc { + NSAssert( + !_willBeRunning, + @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?"); + [[NSNotificationCenter defaultCenter] removeObserver:self]; +} + ++ (NSArray *)captureDevices { + return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; +} + ++ (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device { + NSMutableArray *eligibleDeviceFormats = [NSMutableArray array]; + + for (AVCaptureDeviceFormat *format in device.formats) { + // Filter out subTypes that we currently don't support in the stack + FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription); + if (IsMediaSubTypeSupported(mediaSubType)) { + [eligibleDeviceFormats addObject:format]; + } + } + + return eligibleDeviceFormats; +} + +- (void)startCaptureWithDevice:(AVCaptureDevice *)device + format:(AVCaptureDeviceFormat *)format + fps:(int)fps { + _willBeRunning = true; + [RTCDispatcher + dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + RTCLogInfo("startCaptureWithDevice %@ @ %d fps", format, fps); + +#if TARGET_OS_IPHONE + [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; +#endif + + _currentDevice = device; + + NSError *error = nil; + if ([_currentDevice lockForConfiguration:&error]) { + [self updateDeviceCaptureFormat:format fps:fps]; + } else { + RTCLogError(@"Failed to lock device %@. Error: %@", _currentDevice, + error.userInfo); + return; + } + + [self reconfigureCaptureSessionInput]; + [self updateOrientation]; + [_captureSession startRunning]; + + [_currentDevice unlockForConfiguration]; + _isRunning = true; + }]; +} + +- (void)stopCapture { + _willBeRunning = false; + [RTCDispatcher + dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + RTCLogInfo("Stop"); + _currentDevice = nil; + for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { + [_captureSession removeInput:oldInput]; + } + [_captureSession stopRunning]; + +#if TARGET_OS_IPHONE + [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; +#endif + _isRunning = false; + }]; +} + +#pragma mark iOS notifications + +#if TARGET_OS_IPHONE +- (void)deviceOrientationDidChange:(NSNotification *)notification { + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + [self updateOrientation]; + }]; +} +#endif + +#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate + +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection { + NSParameterAssert(captureOutput == _videoDataOutput); + + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * + kNanosecondsPerSecond; + RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer + rotation:_rotation + timeStampNs:timeStampNs]; + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +- (void)captureOutput:(AVCaptureOutput *)captureOutput + didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection { + RTCLogError(@"Dropped sample buffer."); +} + +#pragma mark - AVCaptureSession notifications + +- (void)handleCaptureSessionInterruption:(NSNotification *)notification { + NSString *reasonString = nil; +#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ + __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 + if ([UIDevice isIOS9OrLater]) { + NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey]; + if (reason) { + switch (reason.intValue) { + case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground: + reasonString = @"VideoDeviceNotAvailableInBackground"; + break; + case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: + reasonString = @"AudioDeviceInUseByAnotherClient"; + break; + case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: + reasonString = @"VideoDeviceInUseByAnotherClient"; + break; + case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps: + reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; + break; + } + } + } +#endif + RTCLog(@"Capture session interrupted: %@", reasonString); +} + +- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { + RTCLog(@"Capture session interruption ended."); +} + +- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { + NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; + RTCLogError(@"Capture session runtime error: %@", error); + + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ +#if TARGET_OS_IPHONE + if (error.code == AVErrorMediaServicesWereReset) { + [self handleNonFatalError]; + } else { + [self handleFatalError]; + } +#else + [self handleFatalError]; +#endif + }]; +} + +- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { + RTCLog(@"Capture session started."); + + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + // If we successfully restarted after an unknown error, + // allow future retries on fatal errors. + _hasRetriedOnFatalError = NO; + }]; +} + +- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { + RTCLog(@"Capture session stopped."); +} + +- (void)handleFatalError { + [RTCDispatcher + dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + if (!_hasRetriedOnFatalError) { + RTCLogWarning(@"Attempting to recover from fatal capture error."); + [self handleNonFatalError]; + _hasRetriedOnFatalError = YES; + } else { + RTCLogError(@"Previous fatal error recovery failed."); + } + }]; +} + +- (void)handleNonFatalError { + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + RTCLog(@"Restarting capture session after error."); + if (_isRunning) { + [_captureSession startRunning]; + } + }]; +} + +#if TARGET_OS_IPHONE + +#pragma mark - UIApplication notifications + +- (void)handleApplicationDidBecomeActive:(NSNotification *)notification { + [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + if (_isRunning && !_captureSession.isRunning) { + RTCLog(@"Restarting capture session on active."); + [_captureSession startRunning]; + } + }]; +} + +#endif // TARGET_OS_IPHONE + +#pragma mark - Private + +- (dispatch_queue_t)frameQueue { + if (!_frameQueue) { + _frameQueue = + dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL); + dispatch_set_target_queue(_frameQueue, + dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); + } + return _frameQueue; +} + +- (BOOL)setupCaptureSession { + NSAssert(_captureSession == nil, @"Setup capture session called twice."); + _captureSession = [[AVCaptureSession alloc] init]; +#if defined(WEBRTC_IOS) + _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority; + _captureSession.usesApplicationAudioSession = NO; +#endif + [self setupVideoDataOutput]; + // Add the output. + if (![_captureSession canAddOutput:_videoDataOutput]) { + RTCLogError(@"Video data output unsupported."); + return NO; + } + [_captureSession addOutput:_videoDataOutput]; + + return YES; +} + +- (void)setupVideoDataOutput { + NSAssert(_videoDataOutput == nil, @"Setup video data output called twice."); + // Make the capturer output NV12. Ideally we want I420 but that's not + // currently supported on iPhone / iPad. + AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; + videoDataOutput.videoSettings = @{ + (NSString *) + // TODO(denicija): Remove this color conversion and use the original capture format directly. + kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + }; + videoDataOutput.alwaysDiscardsLateVideoFrames = NO; + [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; + _videoDataOutput = videoDataOutput; +} + +#pragma mark - Private, called inside capture queue + +- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(int)fps { + @try { + _currentDevice.activeFormat = format; + _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + } @catch (NSException *exception) { + RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); + return; + } +} + +- (void)reconfigureCaptureSessionInput { + NSError *error = nil; + AVCaptureDeviceInput *input = + [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; + if (!input) { + RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); + return; + } + for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { + [_captureSession removeInput:oldInput]; + } + if ([_captureSession canAddInput:input]) { + [_captureSession addInput:input]; + } else { + RTCLogError(@"Cannot add camera as an input to the session."); + return; + } +} + +- (void)updateOrientation { +#if TARGET_OS_IPHONE + BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFront; + switch ([UIDevice currentDevice].orientation) { + case UIDeviceOrientationPortrait: + _rotation = RTCVideoRotation_90; + break; + case UIDeviceOrientationPortraitUpsideDown: + _rotation = RTCVideoRotation_270; + break; + case UIDeviceOrientationLandscapeLeft: + _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; + break; + case UIDeviceOrientationLandscapeRight: + _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; + break; + case UIDeviceOrientationFaceUp: + case UIDeviceOrientationFaceDown: + case UIDeviceOrientationUnknown: + // Ignore. + break; + } +#endif +} + +@end diff --git a/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h new file mode 100644 index 0000000000..3a6cf0e558 --- /dev/null +++ b/webrtc/sdk/objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h @@ -0,0 +1,40 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +RTC_EXPORT +// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate +// (usually RTCVideoSource). +@interface RTCCameraVideoCapturer : RTCVideoCapturer + +// Returns list of available capture devices that support video capture. ++ (NSArray *)captureDevices; +// Returns list of formats that are supported by this class for this device. ++ (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; + +// Starts and stops the capture session asynchronously. +- (void)startCaptureWithDevice:(AVCaptureDevice *)device + format:(AVCaptureDeviceFormat *)format + fps:(int)fps; +// Stops the capture session asynchronously. +- (void)stopCapture; + +// Capture session that is used for capturing. Valid from initialization to dealloc. +@property(readonly, nonatomic) AVCaptureSession *captureSession; + +@end + +NS_ASSUME_NONNULL_END