Delete RTCAVFoundationVideoSource and related classes.

Bug: webrtc:8852
Change-Id: Ie073fe3f7bafc3d22fafef51f659e340d5a9250f
Reviewed-on: https://webrtc-review.googlesource.com/48620
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Reviewed-by: Anders Carlsson <andersc@webrtc.org>
Commit-Queue: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21985}
This commit is contained in:
Kári Tristan Helgason 2018-02-08 20:17:34 +01:00 committed by Commit Bot
parent a55bdc2406
commit 0d3c9a3f2b
17 changed files with 2 additions and 1155 deletions

View File

@ -10,7 +10,6 @@
#import "ARDAppClient+Internal.h" #import "ARDAppClient+Internal.h"
#import "WebRTC/RTCAVFoundationVideoSource.h"
#import "WebRTC/RTCAudioTrack.h" #import "WebRTC/RTCAudioTrack.h"
#import "WebRTC/RTCCameraVideoCapturer.h" #import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCConfiguration.h" #import "WebRTC/RTCConfiguration.h"
@ -24,6 +23,7 @@
#import "WebRTC/RTCRtpSender.h" #import "WebRTC/RTCRtpSender.h"
#import "WebRTC/RTCTracing.h" #import "WebRTC/RTCTracing.h"
#import "WebRTC/RTCVideoCodecFactory.h" #import "WebRTC/RTCVideoCodecFactory.h"
#import "WebRTC/RTCVideoSource.h"
#import "WebRTC/RTCVideoTrack.h" #import "WebRTC/RTCVideoTrack.h"
#import "ARDAppEngineClient.h" #import "ARDAppEngineClient.h"

View File

@ -18,7 +18,6 @@
#import "ARDFileCaptureController.h" #import "ARDFileCaptureController.h"
#import "ARDSettingsModel.h" #import "ARDSettingsModel.h"
#import "ARDVideoCallView.h" #import "ARDVideoCallView.h"
#import "WebRTC/RTCAVFoundationVideoSource.h"
#import "WebRTC/RTCDispatcher.h" #import "WebRTC/RTCDispatcher.h"
#import "WebRTC/RTCLogging.h" #import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCMediaConstraints.h" #import "WebRTC/RTCMediaConstraints.h"

View File

@ -157,8 +157,6 @@ if (is_ios || is_mac) {
sources = [ sources = [
"objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h", "objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.h",
"objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm", "objc/Framework/Classes/Video/AVCaptureSession+DevicePosition.mm",
"objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.h",
"objc/Framework/Classes/Video/RTCAVFoundationVideoCapturerInternal.mm",
"objc/Framework/Classes/Video/RTCDefaultShader.h", "objc/Framework/Classes/Video/RTCDefaultShader.h",
"objc/Framework/Classes/Video/RTCDefaultShader.mm", "objc/Framework/Classes/Video/RTCDefaultShader.mm",
"objc/Framework/Classes/Video/RTCI420TextureCache.h", "objc/Framework/Classes/Video/RTCI420TextureCache.h",
@ -166,10 +164,6 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/Video/RTCOpenGLDefines.h", "objc/Framework/Classes/Video/RTCOpenGLDefines.h",
"objc/Framework/Classes/Video/RTCShader.h", "objc/Framework/Classes/Video/RTCShader.h",
"objc/Framework/Classes/Video/RTCShader.mm", "objc/Framework/Classes/Video/RTCShader.mm",
"objc/Framework/Classes/Video/avfoundationformatmapper.h",
"objc/Framework/Classes/Video/avfoundationformatmapper.mm",
"objc/Framework/Classes/Video/avfoundationvideocapturer.h",
"objc/Framework/Classes/Video/avfoundationvideocapturer.mm",
] ]
libs = [] libs = []
if (is_ios) { if (is_ios) {
@ -278,8 +272,6 @@ if (is_ios || is_mac) {
rtc_static_library("peerconnection_objc") { rtc_static_library("peerconnection_objc") {
visibility = [ "*" ] visibility = [ "*" ]
sources = [ sources = [
"objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource+Private.h",
"objc/Framework/Classes/PeerConnection/RTCAVFoundationVideoSource.mm",
"objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m", "objc/Framework/Classes/PeerConnection/RTCCameraVideoCapturer.m",
"objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h", "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h",
] ]
@ -525,7 +517,6 @@ if (is_ios || is_mac) {
"objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm", "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoDecoder.mm",
"objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h", "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.h",
"objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm", "objc/Framework/Classes/PeerConnection/RTCWrappedNativeVideoEncoder.mm",
"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
"objc/Framework/Headers/WebRTC/RTCAudioSource.h", "objc/Framework/Headers/WebRTC/RTCAudioSource.h",
"objc/Framework/Headers/WebRTC/RTCAudioTrack.h", "objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
"objc/Framework/Headers/WebRTC/RTCConfiguration.h", "objc/Framework/Headers/WebRTC/RTCConfiguration.h",
@ -676,7 +667,6 @@ if (is_ios || is_mac) {
"objc/Framework/UnitTests/RTCPeerConnectionTest.mm", "objc/Framework/UnitTests/RTCPeerConnectionTest.mm",
"objc/Framework/UnitTests/RTCSessionDescriptionTest.mm", "objc/Framework/UnitTests/RTCSessionDescriptionTest.mm",
"objc/Framework/UnitTests/RTCTracingTest.mm", "objc/Framework/UnitTests/RTCTracingTest.mm",
"objc/Framework/UnitTests/avformatmappertests.mm",
"objc/Framework/UnitTests/objc_video_decoder_factory_tests.mm", "objc/Framework/UnitTests/objc_video_decoder_factory_tests.mm",
"objc/Framework/UnitTests/objc_video_encoder_factory_tests.mm", "objc/Framework/UnitTests/objc_video_encoder_factory_tests.mm",
"objc/Framework/UnitTests/scoped_cftyperef_tests.mm", "objc/Framework/UnitTests/scoped_cftyperef_tests.mm",
@ -730,7 +720,6 @@ if (is_ios || is_mac) {
"objc/Framework/Headers/WebRTC/RTCVideoCodec.h", "objc/Framework/Headers/WebRTC/RTCVideoCodec.h",
"objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h", "objc/Framework/Headers/WebRTC/RTCVideoCodecFactory.h",
"objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h", "objc/Framework/Headers/WebRTC/RTCAudioSessionConfiguration.h",
"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
"objc/Framework/Headers/WebRTC/RTCAudioSource.h", "objc/Framework/Headers/WebRTC/RTCAudioSource.h",
"objc/Framework/Headers/WebRTC/RTCAudioTrack.h", "objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
"objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h", "objc/Framework/Headers/WebRTC/RTCCameraVideoCapturer.h",

View File

@ -1,27 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAVFoundationVideoSource.h"
#include "avfoundationvideocapturer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCAVFoundationVideoSource ()
@property(nonatomic, readonly) webrtc::AVFoundationVideoCapturer *capturer;
/** Initialize an RTCAVFoundationVideoSource with constraints. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(nullable RTCMediaConstraints *)constraints;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,62 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAVFoundationVideoSource+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoSource+Private.h"
@implementation RTCAVFoundationVideoSource {
webrtc::AVFoundationVideoCapturer *_capturer;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(RTCMediaConstraints *)constraints {
NSParameterAssert(factory);
// We pass ownership of the capturer to the source, but since we own
// the source, it should be ok to keep a raw pointer to the
// capturer.
_capturer = new webrtc::AVFoundationVideoCapturer();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
factory.nativeFactory->CreateVideoSource(
std::unique_ptr<cricket::VideoCapturer>(_capturer),
constraints.nativeConstraints.get());
return [super initWithNativeVideoSource:source];
}
- (void)adaptOutputFormatToWidth:(int)width
height:(int)height
fps:(int)fps {
self.capturer->AdaptOutputFormat(width, height, fps);
}
- (BOOL)canUseBackCamera {
return self.capturer->CanUseBackCamera();
}
- (BOOL)useBackCamera {
return self.capturer->GetUseBackCamera();
}
- (void)setUseBackCamera:(BOOL)useBackCamera {
self.capturer->SetUseBackCamera(useBackCamera);
}
- (AVCaptureSession *)captureSession {
return self.capturer->GetCaptureSession();
}
- (webrtc::AVFoundationVideoCapturer *)capturer {
return _capturer;
}
@end

View File

@ -379,7 +379,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (dispatch_queue_t)frameQueue { - (dispatch_queue_t)frameQueue {
if (!_frameQueue) { if (!_frameQueue) {
_frameQueue = _frameQueue =
dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL); dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue, dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
} }

View File

@ -13,7 +13,6 @@
#import "RTCPeerConnectionFactoryOptions+Private.h" #import "RTCPeerConnectionFactoryOptions+Private.h"
#import "NSString+StdString.h" #import "NSString+StdString.h"
#import "RTCAVFoundationVideoSource+Private.h"
#import "RTCAudioSource+Private.h" #import "RTCAudioSource+Private.h"
#import "RTCAudioTrack+Private.h" #import "RTCAudioTrack+Private.h"
#import "RTCMediaConstraints+Private.h" #import "RTCMediaConstraints+Private.h"
@ -232,15 +231,6 @@
trackId:trackId]; trackId:trackId];
} }
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
(nullable RTCMediaConstraints *)constraints {
#ifdef HAVE_NO_MEDIA
return nil;
#else
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self constraints:constraints];
#endif
}
- (RTCVideoSource *)videoSource { - (RTCVideoSource *)videoSource {
rtc::scoped_refptr<webrtc::ObjcVideoTrackSource> objcVideoTrackSource( rtc::scoped_refptr<webrtc::ObjcVideoTrackSource> objcVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjcVideoTrackSource>()); new rtc::RefCountedObject<webrtc::ObjcVideoTrackSource>());

View File

@ -1,49 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#ifdef __cplusplus
#include "avfoundationvideocapturer.h"
#endif
NS_ASSUME_NONNULL_BEGIN
// This class is an implementation detail of AVFoundationVideoCapturer and handles
// the ObjC integration with the AVFoundation APIs.
// It is meant to be owned by an instance of AVFoundationVideoCapturer.
// The reason for this is because other webrtc objects own cricket::VideoCapturer, which is not
// ref counted. To prevent bad behavior we do not expose this class directly.
@interface RTCAVFoundationVideoCapturerInternal
: NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) AVCaptureSession *captureSession;
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
@property(nonatomic, readonly) BOOL canUseBackCamera;
@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
@property(atomic, assign) BOOL isRunning; // Whether the capture session is running.
@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
// when we receive frames. This is safe because this object should be owned by
// it.
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
- (AVCaptureDevice *)getActiveCaptureDevice;
- (nullable AVCaptureDevice *)frontCaptureDevice;
- (nullable AVCaptureDevice *)backCaptureDevice;
// Starts and stops the capture session asynchronously. We cannot do this
// synchronously without blocking a WebRTC thread.
- (void)start;
- (void)stop;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,515 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAVFoundationVideoCapturerInternal.h"
#import <Foundation/Foundation.h>
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#import "WebRTC/UIDevice+RTCDevice.h"
#endif
#import "AVCaptureSession+DevicePosition.h"
#import "RTCDispatcher+Private.h"
#import "WebRTC/RTCLogging.h"
#include "avfoundationformatmapper.h"
@implementation RTCAVFoundationVideoCapturerInternal {
// Keep pointers to inputs for convenience.
AVCaptureDeviceInput *_frontCameraInput;
AVCaptureDeviceInput *_backCameraInput;
AVCaptureVideoDataOutput *_videoDataOutput;
// The cricket::VideoCapturer that owns this class. Should never be NULL.
webrtc::AVFoundationVideoCapturer *_capturer;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
BOOL _hasStarted;
rtc::CriticalSection _crit;
#if TARGET_OS_IPHONE
UIDeviceOrientation _orientation;
#endif
}
@synthesize captureSession = _captureSession;
@synthesize frameQueue = _frameQueue;
@synthesize useBackCamera = _useBackCamera;
@synthesize isRunning = _isRunning;
@synthesize hasStarted = _hasStarted;
// This is called from the thread that creates the video source, which is likely
// the main thread.
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
RTC_DCHECK(capturer);
if (self = [super init]) {
_capturer = capturer;
// Create the capture session and all relevant inputs and outputs. We need
// to do this in init because the application may want the capture session
// before we start the capturer for e.g. AVCapturePreviewLayer. All objects
// created here are retained until dealloc and never recreated.
if (![self setupCaptureSession]) {
return nil;
}
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
#if TARGET_OS_IPHONE
_orientation = UIDeviceOrientationPortrait;
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruption:)
name:AVCaptureSessionWasInterruptedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruptionEnded:)
name:AVCaptureSessionInterruptionEndedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:[UIApplication sharedApplication]];
#endif
[center addObserver:self
selector:@selector(handleCaptureSessionRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStartRunning:)
name:AVCaptureSessionDidStartRunningNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStopRunning:)
name:AVCaptureSessionDidStopRunningNotification
object:_captureSession];
}
return self;
}
- (void)dealloc {
RTC_DCHECK(!self.hasStarted);
[[NSNotificationCenter defaultCenter] removeObserver:self];
_capturer = nullptr;
}
- (AVCaptureSession *)captureSession {
return _captureSession;
}
- (AVCaptureDevice *)getActiveCaptureDevice {
return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
}
- (nullable AVCaptureDevice *)frontCaptureDevice {
return _frontCameraInput.device;
}
- (nullable AVCaptureDevice *)backCaptureDevice {
return _backCameraInput.device;
}
- (dispatch_queue_t)frameQueue {
if (!_frameQueue) {
_frameQueue =
dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
}
return _frameQueue;
}
// Called from any thread (likely main thread).
- (BOOL)canUseBackCamera {
return _backCameraInput != nil;
}
// Called from any thread (likely main thread).
- (BOOL)useBackCamera {
@synchronized(self) {
return _useBackCamera;
}
}
// Called from any thread (likely main thread).
- (void)setUseBackCamera:(BOOL)useBackCamera {
if (!self.canUseBackCamera) {
if (useBackCamera) {
RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
"not switching.");
}
return;
}
@synchronized(self) {
if (_useBackCamera == useBackCamera) {
return;
}
_useBackCamera = useBackCamera;
[self updateSessionInputForUseBackCamera:useBackCamera];
}
}
// Called from WebRTC thread.
- (void)start {
if (self.hasStarted) {
return;
}
self.hasStarted = YES;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
#endif
AVCaptureSession *captureSession = self.captureSession;
[captureSession startRunning];
}];
}
// Called from same thread as start.
- (void)stop {
if (!self.hasStarted) {
return;
}
self.hasStarted = NO;
// Due to this async block, it's possible that the ObjC object outlives the
// C++ one. In order to not invoke functions on the C++ object, we set
// hasStarted immediately instead of dispatching it async.
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
[_captureSession stopRunning];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
#endif
}];
}
#pragma mark iOS notifications
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSParameterAssert(captureOutput == _videoDataOutput);
if (!self.hasStarted) {
return;
}
#if TARGET_OS_IPHONE
// Default to portrait orientation on iPhone.
webrtc::VideoRotation rotation = webrtc::kVideoRotation_90;
BOOL usingFrontCamera = NO;
// Check the image's EXIF for the camera the image came from as the image could have been
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
AVCaptureDevicePosition cameraPosition =
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
} else {
AVCaptureDeviceInput *deviceInput =
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
}
switch (_orientation) {
case UIDeviceOrientationPortrait:
rotation = webrtc::kVideoRotation_90;
break;
case UIDeviceOrientationPortraitUpsideDown:
rotation = webrtc::kVideoRotation_270;
break;
case UIDeviceOrientationLandscapeLeft:
rotation = usingFrontCamera ? webrtc::kVideoRotation_180 : webrtc::kVideoRotation_0;
break;
case UIDeviceOrientationLandscapeRight:
rotation = usingFrontCamera ? webrtc::kVideoRotation_0 : webrtc::kVideoRotation_180;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
case UIDeviceOrientationUnknown:
// Ignore.
break;
}
#else
// No rotation on Mac.
webrtc::VideoRotation rotation = webrtc::kVideoRotation_0;
#endif
_capturer->CaptureSampleBuffer(sampleBuffer, rotation);
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
RTCLogError(@"Dropped sample buffer.");
}
#pragma mark - AVCaptureSession notifications
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
NSString *reasonString = nil;
#if TARGET_OS_IPHONE
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
if (reason) {
switch (reason.intValue) {
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
reasonString = @"VideoDeviceNotAvailableInBackground";
break;
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
reasonString = @"AudioDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
reasonString = @"VideoDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
break;
}
}
#endif
RTCLog(@"Capture session interrupted: %@", reasonString);
// TODO(tkchin): Handle this case.
}
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
RTCLog(@"Capture session interruption ended.");
// TODO(tkchin): Handle this case.
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
#if TARGET_OS_IPHONE
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
#else
[self handleFatalError];
#endif
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
self.isRunning = YES;
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
_hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
self.isRunning = NO;
}
- (void)handleFatalError {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!_hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
_hasRetriedOnFatalError = YES;
} else {
RTCLogError(@"Previous fatal error recovery failed.");
}
}];
}
- (void)handleNonFatalError {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (self.hasStarted) {
RTCLog(@"Restarting capture session after error.");
[self.captureSession startRunning];
}
}];
}
#if TARGET_OS_IPHONE
#pragma mark - UIApplication notifications
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (self.hasStarted && !self.captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[self.captureSession startRunning];
}
}];
}
#endif // TARGET_OS_IPHONE
#pragma mark - Private
- (BOOL)setupCaptureSession {
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
#if defined(WEBRTC_IOS)
captureSession.usesApplicationAudioSession = NO;
#endif
// Add the output.
AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
if (![captureSession canAddOutput:videoDataOutput]) {
RTCLogError(@"Video data output unsupported.");
return NO;
}
[captureSession addOutput:videoDataOutput];
// Get the front and back cameras. If there isn't a front camera
// give up.
AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
if (!frontCameraInput) {
RTCLogError(@"No front camera for capture session.");
return NO;
}
// Add the inputs.
if (![captureSession canAddInput:frontCameraInput] ||
(backCameraInput && ![captureSession canAddInput:backCameraInput])) {
RTCLogError(@"Session does not support capture inputs.");
return NO;
}
AVCaptureDeviceInput *input = self.useBackCamera ? backCameraInput : frontCameraInput;
[captureSession addInput:input];
_captureSession = captureSession;
return YES;
}
- (AVCaptureVideoDataOutput *)videoDataOutput {
if (!_videoDataOutput) {
// Make the capturer output NV12. Ideally we want I420 but that's not
// currently supported on iPhone / iPad.
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.videoSettings = @{
(NSString *)
// TODO(denicija): Remove this color conversion and use the original capture format directly.
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
return _videoDataOutput;
}
- (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)position {
for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (captureDevice.position == position) {
return captureDevice;
}
}
return nil;
}
- (AVCaptureDeviceInput *)frontCameraInput {
if (!_frontCameraInput) {
#if TARGET_OS_IPHONE
AVCaptureDevice *frontCameraDevice =
[self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
#else
AVCaptureDevice *frontCameraDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
#endif
if (!frontCameraDevice) {
RTCLogWarning(@"Failed to find front capture device.");
return nil;
}
NSError *error = nil;
AVCaptureDeviceInput *frontCameraInput =
[AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice error:&error];
if (!frontCameraInput) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
return nil;
}
_frontCameraInput = frontCameraInput;
}
return _frontCameraInput;
}
- (AVCaptureDeviceInput *)backCameraInput {
if (!_backCameraInput) {
AVCaptureDevice *backCameraDevice =
[self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
if (!backCameraDevice) {
RTCLogWarning(@"Failed to find front capture device.");
return nil;
}
NSError *error = nil;
AVCaptureDeviceInput *backCameraInput =
[AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice error:&error];
if (!backCameraInput) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
return nil;
}
_backCameraInput = backCameraInput;
}
return _backCameraInput;
}
// Called from capture session queue.
- (void)updateOrientation {
#if TARGET_OS_IPHONE
_orientation = [UIDevice currentDevice].orientation;
#endif
}
// Update the current session input to match what's stored in _useBackCamera.
- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[_captureSession beginConfiguration];
AVCaptureDeviceInput *oldInput = _backCameraInput;
AVCaptureDeviceInput *newInput = _frontCameraInput;
if (useBackCamera) {
oldInput = _frontCameraInput;
newInput = _backCameraInput;
}
if (oldInput) {
// Ok to remove this even if it's not attached. Will be no-op.
[_captureSession removeInput:oldInput];
}
if (newInput) {
[_captureSession addInput:newInput];
}
[self updateOrientation];
AVCaptureDevice *newDevice = newInput.device;
const cricket::VideoFormat *format = _capturer->GetCaptureFormat();
webrtc::SetFormatForCaptureDevice(newDevice, _captureSession, *format);
[_captureSession commitConfiguration];
}];
}
@end

View File

@ -1,29 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <set>
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#include "media/base/videocapturer.h"
namespace webrtc {
// Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
// device.
std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
AVCaptureDevice* device);
// Sets device format for the provided capture device. Returns YES/NO depending
// on success.
bool SetFormatForCaptureDevice(AVCaptureDevice* device,
AVCaptureSession* session,
const cricket::VideoFormat& format);
}

View File

@ -1,135 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "avfoundationformatmapper.h"
#import "WebRTC/RTCLogging.h"
// TODO(denicija): add support for higher frame rates.
// See http://crbug/webrtc/6355 for more info.
static const int kFramesPerSecond = 30;
static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
}
static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange* range) {
return range.minFrameRate <= fps && range.maxFrameRate >= fps;
}
// Returns filtered array of device formats based on predefined constraints our
// stack imposes.
static NSArray<AVCaptureDeviceFormat*>* GetEligibleDeviceFormats(
const AVCaptureDevice* device,
int supportedFps) {
NSMutableArray<AVCaptureDeviceFormat*>* eligibleDeviceFormats =
[NSMutableArray array];
for (AVCaptureDeviceFormat* format in device.formats) {
// Filter out subTypes that we currently don't support in the stack
FourCharCode mediaSubType =
CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (!IsMediaSubTypeSupported(mediaSubType)) {
continue;
}
// Filter out frame rate ranges that we currently don't support in the stack
for (AVFrameRateRange* frameRateRange in format.videoSupportedFrameRateRanges) {
if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
[eligibleDeviceFormats addObject:format];
break;
}
}
}
return [eligibleDeviceFormats copy];
}
// Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
static AVCaptureDeviceFormat* GetDeviceFormatForVideoFormat(
const AVCaptureDevice* device,
const cricket::VideoFormat& videoFormat) {
AVCaptureDeviceFormat* desiredDeviceFormat = nil;
NSArray<AVCaptureDeviceFormat*>* eligibleFormats =
GetEligibleDeviceFormats(device, videoFormat.framerate());
for (AVCaptureDeviceFormat* deviceFormat in eligibleFormats) {
CMVideoDimensions dimension =
CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
FourCharCode mediaSubType =
CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
if (videoFormat.width == dimension.width &&
videoFormat.height == dimension.height) {
if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// This is the preferred format so no need to wait for better option.
return deviceFormat;
} else {
// This is good candidate, but let's wait for something better.
desiredDeviceFormat = deviceFormat;
}
}
}
return desiredDeviceFormat;
}
namespace webrtc {
std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
AVCaptureDevice* device) {
std::set<cricket::VideoFormat> supportedFormats;
NSArray<AVCaptureDeviceFormat*>* eligibleFormats =
GetEligibleDeviceFormats(device, kFramesPerSecond);
for (AVCaptureDeviceFormat* deviceFormat in eligibleFormats) {
CMVideoDimensions dimension =
CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
cricket::VideoFormat format = cricket::VideoFormat(
dimension.width, dimension.height,
cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
cricket::FOURCC_NV12);
supportedFormats.insert(format);
}
return supportedFormats;
}
bool SetFormatForCaptureDevice(AVCaptureDevice* device,
AVCaptureSession* session,
const cricket::VideoFormat& format) {
AVCaptureDeviceFormat* deviceFormat =
GetDeviceFormatForVideoFormat(device, format);
const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
NSError* error = nil;
bool success = true;
[session beginConfiguration];
if ([device lockForConfiguration:&error]) {
@try {
device.activeFormat = deviceFormat;
device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
} @catch (NSException* exception) {
RTCLogError(@"Failed to set active format!\n User info:%@",
exception.userInfo);
success = false;
}
[device unlockForConfiguration];
} else {
RTCLogError(@"Failed to lock device %@. Error: %@", device, error.userInfo);
success = false;
}
[session commitConfiguration];
return success;
}
} // namespace webrtc

View File

@ -1,72 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_AVFOUNDATIONVIDEOCAPTURER_H_
#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_AVFOUNDATIONVIDEOCAPTURER_H_
#import <AVFoundation/AVFoundation.h>
#include "api/video/video_frame.h"
#include "common_video/include/i420_buffer_pool.h"
#include "media/base/videocapturer.h"
@class RTCAVFoundationVideoCapturerInternal;
namespace rtc {
class Thread;
} // namespace rtc
namespace webrtc {
class AVFoundationVideoCapturer : public cricket::VideoCapturer {
public:
AVFoundationVideoCapturer();
~AVFoundationVideoCapturer();
cricket::CaptureState Start(const cricket::VideoFormat& format) override;
void Stop() override;
bool IsRunning() override;
bool IsScreencast() const override {
return false;
}
bool GetPreferredFourccs(std::vector<uint32_t> *fourccs) override {
fourccs->push_back(cricket::FOURCC_NV12);
return true;
}
// Returns the active capture session. Calls to the capture session should
// occur on the RTCDispatcherTypeCaptureSession queue in RTCDispatcher.
AVCaptureSession* GetCaptureSession();
// Returns whether the rear-facing camera can be used.
// e.g. It can't be used because it doesn't exist.
bool CanUseBackCamera() const;
// Switches the camera being used (either front or back).
void SetUseBackCamera(bool useBackCamera);
bool GetUseBackCamera() const;
// Converts the sample buffer into a cricket::CapturedFrame and signals the
// frame for capture.
void CaptureSampleBuffer(CMSampleBufferRef sample_buffer,
webrtc::VideoRotation rotation);
// Called to adjust the size of output frames to supplied |width| and
// |height|. Also drops frames to make the output match |fps|.
void AdaptOutputFormat(int width, int height, int fps);
private:
RTCAVFoundationVideoCapturerInternal *_capturer;
webrtc::I420BufferPool _buffer_pool;
}; // AVFoundationVideoCapturer
} // namespace webrtc
#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_AVFOUNDATIONVIDEOCAPTURER_H_

View File

@ -1,179 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "avfoundationvideocapturer.h"
#import <AVFoundation/AVFoundation.h>
#import "RTCAVFoundationVideoCapturerInternal.h"
#import "RTCDispatcher+Private.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "avfoundationformatmapper.h"
#include "api/video/video_rotation.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/thread.h"
#include "sdk/objc/Framework/Native/src/objc_frame_buffer.h"
namespace webrtc {
enum AVFoundationVideoCapturerMessageType : uint32_t {
kMessageTypeFrame,
};
AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
_capturer =
[[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
std::set<cricket::VideoFormat> front_camera_video_formats =
GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
std::set<cricket::VideoFormat> back_camera_video_formats =
GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
std::vector<cricket::VideoFormat> intersection_video_formats;
if (back_camera_video_formats.empty()) {
intersection_video_formats.assign(front_camera_video_formats.begin(),
front_camera_video_formats.end());
} else if (front_camera_video_formats.empty()) {
intersection_video_formats.assign(back_camera_video_formats.begin(),
back_camera_video_formats.end());
} else {
std::set_intersection(
front_camera_video_formats.begin(), front_camera_video_formats.end(),
back_camera_video_formats.begin(), back_camera_video_formats.end(),
std::back_inserter(intersection_video_formats));
}
SetSupportedFormats(intersection_video_formats);
}
AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
_capturer = nil;
}
cricket::CaptureState AVFoundationVideoCapturer::Start(
const cricket::VideoFormat& format) {
if (!_capturer) {
RTC_LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
return cricket::CaptureState::CS_FAILED;
}
if (_capturer.isRunning) {
RTC_LOG(LS_ERROR) << "The capturer is already running.";
return cricket::CaptureState::CS_FAILED;
}
AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
AVCaptureSession* session = _capturer.captureSession;
if (!SetFormatForCaptureDevice(device, session, format)) {
return cricket::CaptureState::CS_FAILED;
}
SetCaptureFormat(&format);
// This isn't super accurate because it takes a while for the AVCaptureSession
// to spin up, and this call returns async.
// TODO(tkchin): make this better.
[_capturer start];
SetCaptureState(cricket::CaptureState::CS_RUNNING);
return cricket::CaptureState::CS_STARTING;
}
void AVFoundationVideoCapturer::Stop() {
[_capturer stop];
SetCaptureFormat(NULL);
}
bool AVFoundationVideoCapturer::IsRunning() {
return _capturer.isRunning;
}
AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
return _capturer.captureSession;
}
bool AVFoundationVideoCapturer::CanUseBackCamera() const {
return _capturer.canUseBackCamera;
}
void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
_capturer.useBackCamera = useBackCamera;
}
bool AVFoundationVideoCapturer::GetUseBackCamera() const {
return _capturer.useBackCamera;
}
void AVFoundationVideoCapturer::AdaptOutputFormat(int width, int height, int fps) {
cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format);
}
void AVFoundationVideoCapturer::CaptureSampleBuffer(
CMSampleBufferRef sample_buffer, VideoRotation rotation) {
if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
!CMSampleBufferIsValid(sample_buffer) ||
!CMSampleBufferDataIsReady(sample_buffer)) {
return;
}
CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
if (image_buffer == NULL) {
return;
}
int captured_width = CVPixelBufferGetWidth(image_buffer);
int captured_height = CVPixelBufferGetHeight(image_buffer);
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
int64_t translated_camera_time_us;
if (!AdaptFrame(captured_width, captured_height,
rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
rtc::TimeMicros(), &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y,
&translated_camera_time_us)) {
return;
}
RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:image_buffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x
cropY:crop_y];
rtc::scoped_refptr<VideoFrameBuffer> buffer =
new rtc::RefCountedObject<ObjCFrameBuffer>(rtcPixelBuffer);
// Applying rotation is only supported for legacy reasons and performance is
// not critical here.
if (apply_rotation() && rotation != kVideoRotation_0) {
buffer = I420Buffer::Rotate(*buffer->ToI420(), rotation);
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
std::swap(captured_width, captured_height);
}
rotation = kVideoRotation_0;
}
OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us),
captured_width, captured_height);
}
} // namespace webrtc

View File

@ -1,55 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <WebRTC/RTCMacros.h>
#import <WebRTC/RTCVideoSource.h>
@class AVCaptureSession;
@class RTCMediaConstraints;
@class RTCPeerConnectionFactory;
NS_ASSUME_NONNULL_BEGIN
/**
* DEPRECATED Use RTCCameraVideoCapturer instead.
*
* RTCAVFoundationVideoSource is a video source that uses
* webrtc::AVFoundationVideoCapturer. We do not currently provide a wrapper for
* that capturer because cricket::VideoCapturer is not ref counted and we cannot
* guarantee its lifetime. Instead, we expose its properties through the ref
* counted video source interface.
*/
RTC_EXPORT
@interface RTCAVFoundationVideoSource : RTCVideoSource
- (instancetype)init NS_UNAVAILABLE;
/**
* Calling this function will cause frames to be scaled down to the
* requested resolution. Also, frames will be cropped to match the
* requested aspect ratio, and frames will be dropped to match the
* requested fps. The requested aspect ratio is orientation agnostic and
* will be adjusted to maintain the input orientation, so it doesn't
* matter if e.g. 1280x720 or 720x1280 is requested.
*/
- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps;
/** Returns whether rear-facing camera is available for use. */
@property(nonatomic, readonly) BOOL canUseBackCamera;
/** Switches the camera being used (either front or back). */
@property(nonatomic, assign) BOOL useBackCamera;
/** Returns the active capture session. */
@property(nonatomic, readonly) AVCaptureSession *captureSession;
@end
NS_ASSUME_NONNULL_END

View File

@ -14,7 +14,6 @@
#import <WebRTC/RTCMacros.h> #import <WebRTC/RTCMacros.h>
@class AVCaptureSession; @class AVCaptureSession;
@class RTCAVFoundationVideoSource;
/** RTCCameraPreviewView is a view that renders local video from an /** RTCCameraPreviewView is a view that renders local video from an
* AVCaptureSession. * AVCaptureSession.

View File

@ -14,7 +14,6 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@class RTCAVFoundationVideoSource;
@class RTCAudioSource; @class RTCAudioSource;
@class RTCAudioTrack; @class RTCAudioTrack;
@class RTCConfiguration; @class RTCConfiguration;
@ -50,10 +49,6 @@ RTC_EXPORT
- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source - (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source
trackId:(NSString *)trackId; trackId:(NSString *)trackId;
/** Initialize an RTCAVFoundationVideoSource with constraints. */
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
(nullable RTCMediaConstraints *)constraints;
/** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer /** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer
* implementation, e.g. RTCCameraVideoCapturer, in order to produce frames. * implementation, e.g. RTCCameraVideoCapturer, in order to produce frames.
*/ */

View File

@ -13,7 +13,6 @@
@interface ViewController () @interface ViewController ()
@property (nonatomic, strong) RTCPeerConnectionFactory *factory; @property (nonatomic, strong) RTCPeerConnectionFactory *factory;
@property (nonatomic, strong) RTCAVFoundationVideoSource *videoSource;
@end @end
@implementation ViewController @implementation ViewController
@ -21,7 +20,6 @@
- (void)viewDidLoad { - (void)viewDidLoad {
[super viewDidLoad]; [super viewDidLoad];
self.factory = [[RTCPeerConnectionFactory alloc] init]; self.factory = [[RTCPeerConnectionFactory alloc] init];
self.videoSource = [self.factory avFoundationVideoSourceWithConstraints:nil];
} }
@end @end