Explicitly retain self in objc blocks to avoid compiler warning.

Implicitly retaining self pointer (assuming this is intended behavior) causes compiler warning `-Wimplicit-retain-self`. We should do it explicitly.

Bug: webrtc:9971
Change-Id: If77a67168d8a65ced78d5119b9a7332391d20bc9
Reviewed-on: https://webrtc-review.googlesource.com/c/109641
Commit-Queue: Jiawei Ou <ouj@fb.com>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25609}
This commit is contained in:
Jiawei Ou 2018-11-09 13:55:45 -08:00 committed by Commit Bot
parent 0c32e33b48
commit 4aeb35b6d0
10 changed files with 248 additions and 208 deletions

View File

@ -400,7 +400,7 @@ static int const kKbpsMultiplier = 1000;
didChangeIceConnectionState:(RTCIceConnectionState)newState {
RTCLog(@"ICE state changed: %ld", (long)newState);
dispatch_async(dispatch_get_main_queue(), ^{
[_delegate appClient:self didChangeConnectionState:newState];
[self.delegate appClient:self didChangeConnectionState:newState];
});
}
@ -450,16 +450,16 @@ static int const kKbpsMultiplier = 1000;
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorCreateSDP
userInfo:userInfo];
[_delegate appClient:self didError:sdpError];
[self.delegate appClient:self didError:sdpError];
return;
}
__weak ARDAppClient *weakSelf = self;
[_peerConnection setLocalDescription:sdp
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didSetSessionDescriptionWithError:error];
}];
[self.peerConnection setLocalDescription:sdp
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didSetSessionDescriptionWithError:error];
}];
ARDSessionDescriptionMessage *message =
[[ARDSessionDescriptionMessage alloc] initWithDescription:sdp];
[self sendSignalingMessage:message];
@ -480,22 +480,21 @@ static int const kKbpsMultiplier = 1000;
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorSetSDP
userInfo:userInfo];
[_delegate appClient:self didError:sdpError];
[self.delegate appClient:self didError:sdpError];
return;
}
// If we're answering and we've just set the remote offer we need to create
// an answer and set the local description.
if (!_isInitiator && !_peerConnection.localDescription) {
if (!self.isInitiator && !self.peerConnection.localDescription) {
RTCMediaConstraints *constraints = [self defaultAnswerConstraints];
__weak ARDAppClient *weakSelf = self;
[_peerConnection answerForConstraints:constraints
completionHandler:^(RTCSessionDescription *sdp,
NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
[self.peerConnection answerForConstraints:constraints
completionHandler:^(RTCSessionDescription *sdp, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
}
});
}

View File

@ -167,23 +167,43 @@
return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0;
}
- (void)updateBweStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
_availableSendBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
_availableRecvBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googActualEncBitrate"]) {
_actualEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googTargetEncBitrate"]) {
_targetEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
}
}
- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
_availableSendBw =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
_availableRecvBw =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googActualEncBitrate"]) {
_actualEncBitrate =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googTargetEncBitrate"]) {
_targetEncBitrate =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
}
}];
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateBweStatOfKey:key value:value];
}];
}
- (void)updateConnectionStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googRtt"]) {
_connRtt = value;
} else if ([key isEqualToString:@"googLocalCandidateType"]) {
_localCandType = value;
} else if ([key isEqualToString:@"googRemoteCandidateType"]) {
_remoteCandType = value;
} else if ([key isEqualToString:@"googTransportType"]) {
_transportType = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_connRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connRecvBitrate = _connRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connSendBitrate = _connSendBitrateTracker.bitrateString;
}
}
- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport {
@ -191,26 +211,10 @@
if (![activeConnection isEqualToString:@"true"]) {
return;
}
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googRtt"]) {
_connRtt = value;
} else if ([key isEqualToString:@"googLocalCandidateType"]) {
_localCandType = value;
} else if ([key isEqualToString:@"googRemoteCandidateType"]) {
_remoteCandType = value;
} else if ([key isEqualToString:@"googTransportType"]) {
_transportType = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_connRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connRecvBitrate = _connRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connSendBitrate = _connSendBitrateTracker.bitrateString;
}
}];
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateConnectionStatOfKey:key value:value];
}];
}
- (void)parseSendSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
@ -224,50 +228,58 @@
}
}
- (void)updateAudioSendStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googCodecName"]) {
_audioSendCodec = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioSendBitrate = _audioSendBitrateTracker.bitrateString;
}
}
- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) {
_audioSendCodec = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioSendBitrate = _audioSendBitrateTracker.bitrateString;
}
}];
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioSendStatOfKey:key value:value];
}];
}
- (void)updateVideoSendStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googCodecName"]) {
_videoSendCodec = value;
} else if ([key isEqualToString:@"googFrameHeightInput"]) {
_videoInputHeight = value;
} else if ([key isEqualToString:@"googFrameWidthInput"]) {
_videoInputWidth = value;
} else if ([key isEqualToString:@"googFrameRateInput"]) {
_videoInputFps = value;
} else if ([key isEqualToString:@"googFrameHeightSent"]) {
_videoSendHeight = value;
} else if ([key isEqualToString:@"googFrameWidthSent"]) {
_videoSendWidth = value;
} else if ([key isEqualToString:@"googFrameRateSent"]) {
_videoSendFps = value;
} else if ([key isEqualToString:@"googAvgEncodeMs"]) {
_videoEncodeMs = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_videoSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoSendBitrate = _videoSendBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"qpSum"]) {
_oldVideoQPSum = _videoQPSum;
_videoQPSum = value.integerValue;
} else if ([key isEqualToString:@"framesEncoded"]) {
_oldFramesEncoded = _framesEncoded;
_framesEncoded = value.integerValue;
}
}
- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) {
_videoSendCodec = value;
} else if ([key isEqualToString:@"googFrameHeightInput"]) {
_videoInputHeight = value;
} else if ([key isEqualToString:@"googFrameWidthInput"]) {
_videoInputWidth = value;
} else if ([key isEqualToString:@"googFrameRateInput"]) {
_videoInputFps = value;
} else if ([key isEqualToString:@"googFrameHeightSent"]) {
_videoSendHeight = value;
} else if ([key isEqualToString:@"googFrameWidthSent"]) {
_videoSendWidth = value;
} else if ([key isEqualToString:@"googFrameRateSent"]) {
_videoSendFps = value;
} else if ([key isEqualToString:@"googAvgEncodeMs"]) {
_videoEncodeMs = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_videoSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoSendBitrate = _videoSendBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"qpSum"]) {
_oldVideoQPSum = _videoQPSum;
_videoQPSum = value.integerValue;
} else if ([key isEqualToString:@"framesEncoded"]) {
_oldFramesEncoded = _framesEncoded;
_framesEncoded = value.integerValue;
}
}];
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoSendStatOfKey:key value:value];
}];
}
- (void)parseRecvSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
@ -281,44 +293,52 @@
}
}
- (void)updateAudioRecvStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googCodecName"]) {
_audioRecvCodec = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_audioRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioRecvBitrate = _audioRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"googSpeechExpandRate"]) {
_audioExpandRate = value;
} else if ([key isEqualToString:@"googCurrentDelayMs"]) {
_audioCurrentDelay = value;
}
}
- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) {
_audioRecvCodec = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_audioRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioRecvBitrate = _audioRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"googSpeechExpandRate"]) {
_audioExpandRate = value;
} else if ([key isEqualToString:@"googCurrentDelayMs"]) {
_audioCurrentDelay = value;
}
}];
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioRecvStatOfKey:key value:value];
}];
}
- (void)updateVideoRecvStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googFrameHeightReceived"]) {
_videoRecvHeight = value;
} else if ([key isEqualToString:@"googFrameWidthReceived"]) {
_videoRecvWidth = value;
} else if ([key isEqualToString:@"googFrameRateReceived"]) {
_videoRecvFps = value;
} else if ([key isEqualToString:@"googFrameRateDecoded"]) {
_videoDecodedFps = value;
} else if ([key isEqualToString:@"googFrameRateOutput"]) {
_videoOutputFps = value;
} else if ([key isEqualToString:@"googDecodeMs"]) {
_videoDecodeMs = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
}
}
- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googFrameHeightReceived"]) {
_videoRecvHeight = value;
} else if ([key isEqualToString:@"googFrameWidthReceived"]) {
_videoRecvWidth = value;
} else if ([key isEqualToString:@"googFrameRateReceived"]) {
_videoRecvFps = value;
} else if ([key isEqualToString:@"googFrameRateDecoded"]) {
_videoDecodedFps = value;
} else if ([key isEqualToString:@"googFrameRateOutput"]) {
_videoOutputFps = value;
} else if ([key isEqualToString:@"googDecodeMs"]) {
_videoDecodeMs = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
}
}];
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoRecvStatOfKey:key value:value];
}];
}
@end

View File

@ -32,14 +32,17 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
ARDMainViewDelegate,
ARDVideoCallViewControllerDelegate,
RTCAudioSessionDelegate>
@property(nonatomic, strong) ARDMainView *mainView;
@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
@end
@implementation ARDMainViewController {
ARDMainView *_mainView;
AVAudioPlayer *_audioPlayer;
BOOL _useManualAudio;
}
@synthesize mainView = _mainView;
@synthesize audioPlayer = _audioPlayer;
- (void)viewDidLoad {
[super viewDidLoad];
if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
@ -165,13 +168,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
// Stop playback on main queue and then configure WebRTC.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
if (_mainView.isAudioLoopPlaying) {
RTCLog(@"Stopping audio loop due to WebRTC start.");
[_audioPlayer stop];
}
RTCLog(@"Setting isAudioEnabled to YES.");
session.isAudioEnabled = YES;
}];
if (self.mainView.isAudioLoopPlaying) {
RTCLog(@"Stopping audio loop due to WebRTC start.");
[self.audioPlayer stop];
}
RTCLog(@"Setting isAudioEnabled to YES.");
session.isAudioEnabled = YES;
}];
}
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {

View File

@ -27,6 +27,7 @@
RTCAudioSessionDelegate>
@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack;
@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
@end
@implementation ARDVideoCallViewController {
@ -34,12 +35,12 @@
RTCVideoTrack *_remoteVideoTrack;
ARDCaptureController *_captureController;
ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
AVAudioSessionPortOverride _portOverride;
}
@synthesize videoCallView = _videoCallView;
@synthesize remoteVideoTrack = _remoteVideoTrack;
@synthesize delegate = _delegate;
@synthesize portOverride = _portOverride;
- (instancetype)initForRoom:(NSString *)room
isLoopback:(BOOL)isLoopback
@ -168,7 +169,7 @@
[session lockForConfiguration];
NSError *error = nil;
if ([session overrideOutputAudioPort:override error:&error]) {
_portOverride = override;
self.portOverride = override;
} else {
RTCLogError(@"Error overriding output port: %@",
error.localizedDescription);

View File

@ -39,6 +39,7 @@ static NSUInteger const kBottomViewHeight = 200;
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView;
@property(nonatomic, readonly) NSTextView* logView;
- (void)displayLogMessage:(NSString*)message;
@ -52,7 +53,6 @@ static NSUInteger const kBottomViewHeight = 200;
NSButton* _connectButton;
NSButton* _loopbackButton;
NSTextField* _roomField;
NSTextView* _logView;
CGSize _localVideoSize;
CGSize _remoteVideoSize;
}
@ -60,14 +60,13 @@ static NSUInteger const kBottomViewHeight = 200;
@synthesize delegate = _delegate;
@synthesize localVideoView = _localVideoView;
@synthesize remoteVideoView = _remoteVideoView;
@synthesize logView = _logView;
- (void)displayLogMessage:(NSString *)message {
dispatch_async(dispatch_get_main_queue(), ^{
_logView.string =
[NSString stringWithFormat:@"%@%@\n", _logView.string, message];
NSRange range = NSMakeRange(_logView.string.length, 0);
[_logView scrollRangeToVisible:range];
self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
NSRange range = NSMakeRange(self.logView.string.length, 0);
[self.logView scrollRangeToVisible:range];
});
}

View File

@ -40,6 +40,7 @@ if (is_ios || is_mac) {
"objc/Framework/Headers", # TODO(bugs.webrtc.org/9627): Remove this.
]
cflags = [
"-Wimplicit-retain-self",
"-Wstrict-overflow",
"-Wmissing-field-initializers",
]

View File

@ -26,18 +26,18 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
@property(nonatomic, assign) BOOL isRunning;
// Will the session be running once all asynchronous operations have been completed?
@property(nonatomic, assign) BOOL willBeRunning;
@end
@implementation RTCCameraVideoCapturer {
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
AVCaptureDevice *_currentDevice;
FourCharCode _preferredOutputPixelFormat;
FourCharCode _outputPixelFormat;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
// Will the session be running once all asynchronous operations have been completed?
BOOL _willBeRunning;
RTCVideoRotation _rotation;
#if TARGET_OS_IPHONE
UIDeviceOrientation _orientation;
@ -46,6 +46,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@synthesize frameQueue = _frameQueue;
@synthesize captureSession = _captureSession;
@synthesize currentDevice = _currentDevice;
@synthesize hasRetriedOnFatalError = _hasRetriedOnFatalError;
@synthesize isRunning = _isRunning;
@synthesize willBeRunning = _willBeRunning;
- (instancetype)init {
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
@ -157,25 +161,26 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
#endif
_currentDevice = device;
self.currentDevice = device;
NSError *error = nil;
if (![_currentDevice lockForConfiguration:&error]) {
RTCLogError(
@"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo);
if (![self.currentDevice lockForConfiguration:&error]) {
RTCLogError(@"Failed to lock device %@. Error: %@",
self.currentDevice,
error.userInfo);
if (completionHandler) {
completionHandler(error);
}
_willBeRunning = NO;
self.willBeRunning = NO;
return;
}
[self reconfigureCaptureSessionInput];
[self updateOrientation];
[self updateDeviceCaptureFormat:format fps:fps];
[self updateVideoDataOutputPixelFormat:format];
[_captureSession startRunning];
[_currentDevice unlockForConfiguration];
_isRunning = YES;
[self.captureSession startRunning];
[self.currentDevice unlockForConfiguration];
self.isRunning = YES;
if (completionHandler) {
completionHandler(nil);
}
@ -188,16 +193,16 @@ const int64_t kNanosecondsPerSecond = 1000000000;
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
_currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
self.currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
[self.captureSession removeInput:oldInput];
}
[_captureSession stopRunning];
[self.captureSession stopRunning];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
#endif
_isRunning = NO;
self.isRunning = NO;
if (completionHandler) {
completionHandler();
}
@ -340,7 +345,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
_hasRetriedOnFatalError = NO;
self.hasRetriedOnFatalError = NO;
}];
}
@ -352,10 +357,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!_hasRetriedOnFatalError) {
if (!self.hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
_hasRetriedOnFatalError = YES;
self.hasRetriedOnFatalError = YES;
} else {
RTCLogError(@"Previous fatal error recovery failed.");
}
@ -366,8 +371,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (_isRunning) {
[_captureSession startRunning];
if (self.isRunning) {
[self.captureSession startRunning];
}
}];
}
@ -379,9 +384,9 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (_isRunning && !_captureSession.isRunning) {
if (self.isRunning && !self.captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[_captureSession startRunning];
[self.captureSession startRunning];
}
}];
}

View File

@ -27,15 +27,21 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
RTCFileVideoCapturerStatusStopped
};
@interface RTCFileVideoCapturer ()
@property(nonatomic, assign) CMTime lastPresentationTime;
@property(nonatomic, strong) NSURL *fileURL;
@end
@implementation RTCFileVideoCapturer {
AVAssetReader *_reader;
AVAssetReaderTrackOutput *_outTrack;
RTCFileVideoCapturerStatus _status;
CMTime _lastPresentationTime;
dispatch_queue_t _frameQueue;
NSURL *_fileURL;
}
@synthesize lastPresentationTime = _lastPresentationTime;
@synthesize fileURL = _fileURL;
- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
if (_status == RTCFileVideoCapturerStatusStarted) {
@ -62,9 +68,9 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
return;
}
_lastPresentationTime = CMTimeMake(0, 0);
self.lastPresentationTime = CMTimeMake(0, 0);
_fileURL = [NSURL fileURLWithPath:pathForFile];
self.fileURL = [NSURL fileURLWithPath:pathForFile];
[self setupReaderOnError:errorBlock];
});
}

View File

@ -47,19 +47,22 @@
if (_captureSession == captureSession) {
return;
}
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
_captureSession = captureSession;
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
previewLayer.session = captureSession;
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
[self setCorrectVideoOrientation];
}];
}];
}];
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
self.captureSession = captureSession;
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
previewLayer.session = captureSession;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
[self setCorrectVideoOrientation];
}];
}];
}];
}
- (void)layoutSubviews {

View File

@ -128,17 +128,20 @@ static const NSUInteger kFullDuplexTimeInSec = 10;
static const NSUInteger kNumIgnoreFirstCallbacks = 50;
@interface RTCAudioDeviceModuleTests : XCTestCase {
rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule;
webrtc::AudioParameters playoutParameters;
webrtc::AudioParameters recordParameters;
MockAudioTransport mock;
}
@property(nonatomic, assign) webrtc::AudioParameters playoutParameters;
@property(nonatomic, assign) webrtc::AudioParameters recordParameters;
@end
@implementation RTCAudioDeviceModuleTests
@synthesize playoutParameters;
@synthesize recordParameters;
- (void)setUp {
[super setUp];
audioDeviceModule = webrtc::CreateAudioDeviceModule();
@ -254,10 +257,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) {
nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
return 0;
@ -291,10 +294,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) {
nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
if (++num_callbacks == kNumCallbacks) {
[playoutExpectation fulfill];
@ -330,10 +333,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) {
nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
if (++num_callbacks == kNumCallbacks) {
[playoutExpectation fulfill];
@ -366,10 +369,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const bool keyPressed,
uint32_t& newMicLevel) {
XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertEqual(nSamples, recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, recordParameters.channels());
XCTAssertEqual((int) samplesPerSec, recordParameters.sample_rate());
XCTAssertEqual(nChannels, self.recordParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
XCTAssertEqual(0, clockDrift);
XCTAssertEqual(0u, currentMicLevel);
XCTAssertFalse(keyPressed);
@ -405,10 +408,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) {
nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
if (callbackCount++ >= kNumCallbacks) {
[playoutExpectation fulfill];
@ -428,10 +431,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const bool keyPressed,
uint32_t& newMicLevel) {
XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertEqual(nSamples, recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, recordParameters.channels());
XCTAssertEqual((int) samplesPerSec, recordParameters.sample_rate());
XCTAssertEqual(nChannels, self.recordParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
XCTAssertEqual(0, clockDrift);
XCTAssertEqual(0u, currentMicLevel);
XCTAssertFalse(keyPressed);