From 3b673c66a4dde1d1d8e1737ef6b2ff93f415d6f4 Mon Sep 17 00:00:00 2001 From: jtteh Date: Tue, 25 Jul 2017 15:48:39 -0700 Subject: [PATCH] Removed file RTCCameraVideoCapturer.mm that isn't needed Also added post commit review changes. BUG=webrtc:7898 Review-Url: https://codereview.webrtc.org/2988783002 Cr-Commit-Position: refs/heads/master@{#19145} --- webrtc/sdk/BUILD.gn | 4 +- .../PeerConnection/RTCCameraVideoCapturer.m | 15 +- .../PeerConnection/RTCCameraVideoCapturer.mm | 449 ------------------ ...ce.h => AVCaptureSession+DevicePosition.h} | 2 +- ....mm => AVCaptureSession+DevicePosition.mm} | 4 +- .../RTCAVFoundationVideoCapturerInternal.mm | 4 +- .../UnitTests/RTCCameraVideoCapturerTests.mm | 2 +- 7 files changed, 16 insertions(+), 464 deletions(-) delete mode 100644 webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.mm rename webrtc/sdk/objc/Framework/Classes/Video/{AVCaptureSession+Device.h => AVCaptureSession+DevicePosition.h} (93%) rename webrtc/sdk/objc/Framework/Classes/Video/{AVCaptureSession+Device.mm => AVCaptureSession+DevicePosition.mm} (95%) diff --git a/webrtc/sdk/BUILD.gn b/webrtc/sdk/BUILD.gn index 39f48626fb..e80a2da960 100644 --- a/webrtc/sdk/BUILD.gn +++ b/webrtc/sdk/BUILD.gn @@ -142,8 +142,8 @@ if (is_ios || is_mac) { rtc_static_library("objc_video") { sources = [ - "objc/Framework/Classes/Video/AVCaptureSession+Device.h", - "objc/Framework/Classes/Video/AVCaptureSession+Device.mm", + "objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h", + "objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm", "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h", "objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm", "objc/Framework/Classes/Video/RTCDefaultShader.h", diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m index 2d1aedd110..bb1c035834 100644 --- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m +++ b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m @@ -18,7 +18,7 @@ #import "WebRTC/UIDevice+RTCDevice.h" #endif -#import "AVCaptureSession+Device.h" +#import "AVCaptureSession+DevicePosition.h" #import "RTCDispatcher+Private.h" const int64_t kNanosecondsPerSecond = 1000000000; @@ -198,16 +198,17 @@ static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { #if TARGET_OS_IPHONE // Default to portrait orientation on iPhone. RTCVideoRotation rotation = RTCVideoRotation_90; - // Check here, which camera this frame is from, to avoid any race conditions. - AVCaptureDeviceInput *deviceInput = - (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input; - BOOL usingFrontCamera = deviceInput.device.position == AVCaptureDevicePositionFront; - // Check the image's EXIF for the actual camera the image came as the image could have been + BOOL usingFrontCamera = NO; + // Check the image's EXIF for the camera the image came from as the image could have been // delayed as we set alwaysDiscardsLateVideoFrames to NO. AVCaptureDevicePosition cameraPosition = [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer]; if (cameraPosition != AVCaptureDevicePositionUnspecified) { - usingFrontCamera = cameraPosition == AVCaptureDevicePositionFront; + usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition; + } else { + AVCaptureDeviceInput *deviceInput = + (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input; + usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position; } switch (_orientation) { case UIDeviceOrientationPortrait: diff --git a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.mm b/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.mm deleted file mode 100644 index 82db6700de..0000000000 --- a/webrtc/sdk/objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.mm +++ /dev/null @@ -1,449 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import - -#import "WebRTC/RTCCameraVideoCapturer.h" -#import "WebRTC/RTCLogging.h" -#import "WebRTC/RTCVideoFrameBuffer.h" - -#if TARGET_OS_IPHONE -#import "WebRTC/UIDevice+RTCDevice.h" -#endif - -#import "AVCaptureSession+Device.h" -#import "RTCDispatcher+Private.h" - -const int64_t kNanosecondsPerSecond = 1000000000; - -static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) { - return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange || - mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); -} - -@interface RTCCameraVideoCapturer () -@property(nonatomic, readonly) dispatch_queue_t frameQueue; -@end - -@implementation RTCCameraVideoCapturer { - AVCaptureVideoDataOutput *_videoDataOutput; - AVCaptureSession *_captureSession; - AVCaptureDevice *_currentDevice; - BOOL _hasRetriedOnFatalError; - BOOL _isRunning; - // Will the session be running once all asynchronous operations have been completed? - BOOL _willBeRunning; -#if TARGET_OS_IPHONE - UIDeviceOrientation _orientation; -#endif -} - -@synthesize frameQueue = _frameQueue; -@synthesize captureSession = _captureSession; - -- (instancetype)initWithDelegate:(__weak id)delegate { - if (self = [super initWithDelegate:delegate]) { - // Create the capture session and all relevant inputs and outputs. We need - // to do this in init because the application may want the capture session - // before we start the capturer for e.g. AVCapturePreviewLayer. All objects - // created here are retained until dealloc and never recreated. - if (![self setupCaptureSession]) { - return nil; - } - NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; -#if TARGET_OS_IPHONE - _orientation = UIDeviceOrientationPortrait; - [center addObserver:self - selector:@selector(deviceOrientationDidChange:) - name:UIDeviceOrientationDidChangeNotification - object:nil]; - [center addObserver:self - selector:@selector(handleCaptureSessionInterruption:) - name:AVCaptureSessionWasInterruptedNotification - object:_captureSession]; - [center addObserver:self - selector:@selector(handleCaptureSessionInterruptionEnded:) - name:AVCaptureSessionInterruptionEndedNotification - object:_captureSession]; - [center addObserver:self - selector:@selector(handleApplicationDidBecomeActive:) - name:UIApplicationDidBecomeActiveNotification - object:[UIApplication sharedApplication]]; -#endif - [center addObserver:self - selector:@selector(handleCaptureSessionRuntimeError:) - name:AVCaptureSessionRuntimeErrorNotification - object:_captureSession]; - [center addObserver:self - selector:@selector(handleCaptureSessionDidStartRunning:) - name:AVCaptureSessionDidStartRunningNotification - object:_captureSession]; - [center addObserver:self - selector:@selector(handleCaptureSessionDidStopRunning:) - name:AVCaptureSessionDidStopRunningNotification - object:_captureSession]; - } - return self; -} - -- (void)dealloc { - NSAssert( - !_willBeRunning, - @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?"); - [[NSNotificationCenter defaultCenter] removeObserver:self]; -} - -+ (NSArray *)captureDevices { - return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; -} - -+ (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device { - NSMutableArray *eligibleDeviceFormats = [NSMutableArray array]; - - for (AVCaptureDeviceFormat *format in device.formats) { - // Filter out subTypes that we currently don't support in the stack - FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (IsMediaSubTypeSupported(mediaSubType)) { - [eligibleDeviceFormats addObject:format]; - } - } - - return eligibleDeviceFormats; -} - -- (void)startCaptureWithDevice:(AVCaptureDevice *)device - format:(AVCaptureDeviceFormat *)format - fps:(NSInteger)fps { - _willBeRunning = YES; - [RTCDispatcher - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps); - -#if TARGET_OS_IPHONE - [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; -#endif - - _currentDevice = device; - - NSError *error = nil; - if (![_currentDevice lockForConfiguration:&error]) { - RTCLogError( - @"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo); - return; - } - [self reconfigureCaptureSessionInput]; - [self updateOrientation]; - [_captureSession startRunning]; - [self updateDeviceCaptureFormat:format fps:fps]; - [_currentDevice unlockForConfiguration]; - _isRunning = YES; - }]; -} - -- (void)stopCapture { - _willBeRunning = NO; - [RTCDispatcher - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - RTCLogInfo("Stop"); - _currentDevice = nil; - for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { - [_captureSession removeInput:oldInput]; - } - [_captureSession stopRunning]; - -#if TARGET_OS_IPHONE - [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; -#endif - _isRunning = NO; - }]; -} - -#pragma mark iOS notifications - -#if TARGET_OS_IPHONE -- (void)deviceOrientationDidChange:(NSNotification *)notification { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - [self updateOrientation]; - }]; -} -#endif - -#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate - -- (void)captureOutput:(AVCaptureOutput *)captureOutput - didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection { - NSParameterAssert(captureOutput == _videoDataOutput); - - if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || - !CMSampleBufferDataIsReady(sampleBuffer)) { - return; - } - - CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); - if (pixelBuffer == nil) { - return; - } - -#if TARGET_OS_IPHONE - // Default to portrait orientation on iPhone. - RTCVideoRotation rotation = RTCVideoRotation_90; - BOOL usingFrontCamera; - // Check the image's EXIF for the camera the image came from as the image could have been - // delayed as we set alwaysDiscardsLateVideoFrames to NO. - AVCaptureDevicePosition cameraPosition = - [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer]; - if (cameraPosition != AVCaptureDevicePositionUnspecified) { - usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition; - } else { - AVCaptureDeviceInput *deviceInput = - (AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input; - usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position; - } - - switch (_orientation) { - case UIDeviceOrientationPortrait: - rotation = RTCVideoRotation_90; - break; - case UIDeviceOrientationPortraitUpsideDown: - rotation = RTCVideoRotation_270; - break; - case UIDeviceOrientationLandscapeLeft: - rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; - break; - case UIDeviceOrientationLandscapeRight: - rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; - break; - case UIDeviceOrientationFaceUp: - case UIDeviceOrientationFaceDown: - case UIDeviceOrientationUnknown: - // Ignore. - break; - } -#else - // No rotation on Mac. - RTCVideoRotation rotation = RTCVideoRotation_0; -#endif - - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; - int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * - kNanosecondsPerSecond; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:rotation - timeStampNs:timeStampNs]; - [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; -} - -- (void)captureOutput:(AVCaptureOutput *)captureOutput - didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection { - RTCLogError(@"Dropped sample buffer."); -} - -#pragma mark - AVCaptureSession notifications - -- (void)handleCaptureSessionInterruption:(NSNotification *)notification { - NSString *reasonString = nil; -#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ - __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0 - if ([UIDevice isIOS9OrLater]) { - NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey]; - if (reason) { - switch (reason.intValue) { - case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground: - reasonString = @"VideoDeviceNotAvailableInBackground"; - break; - case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient: - reasonString = @"AudioDeviceInUseByAnotherClient"; - break; - case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient: - reasonString = @"VideoDeviceInUseByAnotherClient"; - break; - case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps: - reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps"; - break; - } - } - } -#endif - RTCLog(@"Capture session interrupted: %@", reasonString); -} - -- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification { - RTCLog(@"Capture session interruption ended."); -} - -- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { - NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; - RTCLogError(@"Capture session runtime error: %@", error); - - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ -#if TARGET_OS_IPHONE - if (error.code == AVErrorMediaServicesWereReset) { - [self handleNonFatalError]; - } else { - [self handleFatalError]; - } -#else - [self handleFatalError]; -#endif - }]; -} - -- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { - RTCLog(@"Capture session started."); - - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - // If we successfully restarted after an unknown error, - // allow future retries on fatal errors. - _hasRetriedOnFatalError = NO; - }]; -} - -- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { - RTCLog(@"Capture session stopped."); -} - -- (void)handleFatalError { - [RTCDispatcher - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - if (!_hasRetriedOnFatalError) { - RTCLogWarning(@"Attempting to recover from fatal capture error."); - [self handleNonFatalError]; - _hasRetriedOnFatalError = YES; - } else { - RTCLogError(@"Previous fatal error recovery failed."); - } - }]; -} - -- (void)handleNonFatalError { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - RTCLog(@"Restarting capture session after error."); - if (_isRunning) { - [_captureSession startRunning]; - } - }]; -} - -#if TARGET_OS_IPHONE - -#pragma mark - UIApplication notifications - -- (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - if (_isRunning && !_captureSession.isRunning) { - RTCLog(@"Restarting capture session on active."); - [_captureSession startRunning]; - } - }]; -} - -#endif // TARGET_OS_IPHONE - -#pragma mark - Private - -- (dispatch_queue_t)frameQueue { - if (!_frameQueue) { - _frameQueue = - dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL); - dispatch_set_target_queue(_frameQueue, - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); - } - return _frameQueue; -} - -- (BOOL)setupCaptureSession { - NSAssert(_captureSession == nil, @"Setup capture session called twice."); - _captureSession = [[AVCaptureSession alloc] init]; -#if defined(WEBRTC_IOS) - _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority; - _captureSession.usesApplicationAudioSession = NO; -#endif - [self setupVideoDataOutput]; - // Add the output. - if (![_captureSession canAddOutput:_videoDataOutput]) { - RTCLogError(@"Video data output unsupported."); - return NO; - } - [_captureSession addOutput:_videoDataOutput]; - - return YES; -} - -- (void)setupVideoDataOutput { - NSAssert(_videoDataOutput == nil, @"Setup video data output called twice."); - // Make the capturer output NV12. Ideally we want I420 but that's not - // currently supported on iPhone / iPad. - AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; - videoDataOutput.videoSettings = @{ - (NSString *) - // TODO(denicija): Remove this color conversion and use the original capture format directly. - kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) - }; - videoDataOutput.alwaysDiscardsLateVideoFrames = NO; - [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue]; - _videoDataOutput = videoDataOutput; -} - -#pragma mark - Private, called inside capture queue - -- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"updateDeviceCaptureFormat must be called on the capture queue."); - @try { - _currentDevice.activeFormat = format; - _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); - _currentDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps); - } @catch (NSException *exception) { - RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); - return; - } -} - -- (void)reconfigureCaptureSessionInput { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"reconfigureCaptureSessionInput must be called on the capture queue."); - NSError *error = nil; - AVCaptureDeviceInput *input = - [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; - if (!input) { - RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); - return; - } - [_captureSession beginConfiguration]; - for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { - [_captureSession removeInput:oldInput]; - } - if ([_captureSession canAddInput:input]) { - [_captureSession addInput:input]; - } else { - RTCLogError(@"Cannot add camera as an input to the session."); - } - [_captureSession commitConfiguration]; -} - -- (void)updateOrientation { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"updateOrientation must be called on the capture queue."); -#if TARGET_OS_IPHONE - _orientation = [UIDevice currentDevice].orientation; -#endif -} - -@end diff --git a/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+Device.h b/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h similarity index 93% rename from webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+Device.h rename to webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h index 960ca3e44b..32ab6877f0 100644 --- a/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+Device.h +++ b/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h @@ -13,7 +13,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface AVCaptureSession (Device) +@interface AVCaptureSession (DevicePosition) // Check the image's EXIF for the camera the image came from. + (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer; diff --git a/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+Device.mm b/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm similarity index 95% rename from webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+Device.mm rename to webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm index 3b053dfe52..0814ecc6c5 100644 --- a/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+Device.mm +++ b/webrtc/sdk/objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import "AVCaptureSession+Device.h" +#import "AVCaptureSession+DevicePosition.h" BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) { return CFStringFindWithOptions(theString, @@ -18,7 +18,7 @@ BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) { nil); } -@implementation AVCaptureSession (Device) +@implementation AVCaptureSession (DevicePosition) + (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer { // Check the image's EXIF for the camera the image came from. diff --git a/webrtc/sdk/objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm b/webrtc/sdk/objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm index 111f199e56..287ecdd71d 100644 --- a/webrtc/sdk/objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm +++ b/webrtc/sdk/objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm @@ -16,7 +16,7 @@ #import "WebRTC/UIDevice+RTCDevice.h" #endif -#import "AVCaptureSession+Device.h" +#import "AVCaptureSession+DevicePosition.h" #import "RTCDispatcher+Private.h" #import "WebRTC/RTCLogging.h" @@ -218,7 +218,7 @@ #if TARGET_OS_IPHONE // Default to portrait orientation on iPhone. webrtc::VideoRotation rotation = webrtc::kVideoRotation_90; - BOOL usingFrontCamera; + BOOL usingFrontCamera = NO; // Check the image's EXIF for the camera the image came from as the image could have been // delayed as we set alwaysDiscardsLateVideoFrames to NO. AVCaptureDevicePosition cameraPosition = diff --git a/webrtc/sdk/objc/Framework/UnitTests/RTCCameraVideoCapturerTests.mm b/webrtc/sdk/objc/Framework/UnitTests/RTCCameraVideoCapturerTests.mm index fc5dfe3639..66ac117bd2 100644 --- a/webrtc/sdk/objc/Framework/UnitTests/RTCCameraVideoCapturerTests.mm +++ b/webrtc/sdk/objc/Framework/UnitTests/RTCCameraVideoCapturerTests.mm @@ -19,7 +19,7 @@ #import #import #import -#import "AVCaptureSession+Device.h" +#import "AVCaptureSession+DevicePosition.h" #if TARGET_OS_IPHONE // Helper method.