Format all .m objc files

Formatting done via:
git ls-files | grep -E '.*\.m$' | xargs clang-format -i

After applying the command, I manually excluded Matlab .m files that I will handle separately.

No-Iwyu: Includes didn't change and it isn't related to formatting
Bug: webrtc:42225392
Change-Id: I40d11fd6b650ee0d90d92cbd6fc6aa6c78e1fea3
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373887
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43706}
This commit is contained in:
Boris Tsirkin 2025-01-09 02:36:06 -08:00 committed by WebRTC LUCI CQ
parent 2c96934699
commit fc7e874677
49 changed files with 1433 additions and 1027 deletions

View File

@ -41,25 +41,25 @@
#import "RTCIceCandidate+JSON.h"
#import "RTCSessionDescription+JSON.h"
static NSString * const kARDIceServerRequestUrl = @"https://appr.tc/params";
static NSString *const kARDIceServerRequestUrl = @"https://appr.tc/params";
static NSString * const kARDAppClientErrorDomain = @"ARDAppClient";
static NSString *const kARDAppClientErrorDomain = @"ARDAppClient";
static NSInteger const kARDAppClientErrorUnknown = -1;
static NSInteger const kARDAppClientErrorRoomFull = -2;
static NSInteger const kARDAppClientErrorCreateSDP = -3;
static NSInteger const kARDAppClientErrorSetSDP = -4;
static NSInteger const kARDAppClientErrorInvalidClient = -5;
static NSInteger const kARDAppClientErrorInvalidRoom = -6;
static NSString * const kARDMediaStreamId = @"ARDAMS";
static NSString * const kARDAudioTrackId = @"ARDAMSa0";
static NSString * const kARDVideoTrackId = @"ARDAMSv0";
static NSString * const kARDVideoTrackKind = @"video";
static NSString *const kARDMediaStreamId = @"ARDAMS";
static NSString *const kARDAudioTrackId = @"ARDAMSa0";
static NSString *const kARDVideoTrackId = @"ARDAMSv0";
static NSString *const kARDVideoTrackKind = @"video";
// TODO(tkchin): Add these as UI options.
#if defined(WEBRTC_IOS)
static BOOL const kARDAppClientEnableTracing = NO;
static BOOL const kARDAppClientEnableRtcEventLog = YES;
static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB.
static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB.
static int64_t const kARDAppClientRtcEventLogMaxSizeInBytes = 5e6; // 5 MB.
#endif
static int const kKbpsMultiplier = 1000;
@ -124,7 +124,7 @@ static int const kKbpsMultiplier = 1000;
@synthesize factory = _factory;
@synthesize messageQueue = _messageQueue;
@synthesize isTurnComplete = _isTurnComplete;
@synthesize hasReceivedSdp = _hasReceivedSdp;
@synthesize hasReceivedSdp = _hasReceivedSdp;
@synthesize roomId = _roomId;
@synthesize clientId = _clientId;
@synthesize isInitiator = _isInitiator;
@ -191,18 +191,21 @@ static int const kKbpsMultiplier = 1000;
}
if (shouldGetStats) {
__weak ARDAppClient *weakSelf = self;
_statsTimer = [[ARDTimerProxy alloc] initWithInterval:1
repeats:YES
timerHandler:^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.peerConnection statisticsWithCompletionHandler:^(
RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
dispatch_async(dispatch_get_main_queue(), ^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.delegate appClient:strongSelf didGetStats:stats];
});
}];
}];
_statsTimer = [[ARDTimerProxy alloc]
initWithInterval:1
repeats:YES
timerHandler:^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.peerConnection
statisticsWithCompletionHandler:^(
RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
dispatch_async(dispatch_get_main_queue(), ^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.delegate appClient:strongSelf
didGetStats:stats];
});
}];
}];
} else {
[_statsTimer invalidate];
_statsTimer = nil;
@ -232,13 +235,14 @@ static int const kKbpsMultiplier = 1000;
RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
_factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory
decoderFactory:decoderFactory];
_factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
initWithEncoderFactory:encoderFactory
decoderFactory:decoderFactory];
#if defined(WEBRTC_IOS)
if (kARDAppClientEnableTracing) {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
NSString *filePath =
[self documentsFilePathForFileName:@"webrtc-trace.txt"];
RTCStartInternalCapture(filePath);
}
#endif
@ -248,7 +252,8 @@ static int const kKbpsMultiplier = 1000;
[_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
NSError *error) {
if (error) {
RTCLogError(@"Error retrieving TURN servers: %@", error.localizedDescription);
RTCLogError(@"Error retrieving TURN servers: %@",
error.localizedDescription);
}
ARDAppClient *strongSelf = weakSelf;
[strongSelf.iceServers addObjectsFromArray:turnServers];
@ -257,40 +262,41 @@ static int const kKbpsMultiplier = 1000;
}];
// Join room on room server.
[_roomServerClient joinRoomWithRoomId:roomId
isLoopback:isLoopback
completionHandler:^(ARDJoinResponse *response, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *joinError =
[[strongSelf class] errorForJoinResultType:response.result];
if (joinError) {
RTCLogError(@"Failed to join room:%@ on room server.", roomId);
[strongSelf disconnect];
[strongSelf.delegate appClient:strongSelf didError:joinError];
return;
}
RTCLog(@"Joined room:%@ on room server.", roomId);
strongSelf.roomId = response.roomId;
strongSelf.clientId = response.clientId;
strongSelf.isInitiator = response.isInitiator;
for (ARDSignalingMessage *message in response.messages) {
if (message.type == kARDSignalingMessageTypeOffer ||
message.type == kARDSignalingMessageTypeAnswer) {
strongSelf.hasReceivedSdp = YES;
[strongSelf.messageQueue insertObject:message atIndex:0];
} else {
[strongSelf.messageQueue addObject:message];
}
}
strongSelf.webSocketURL = response.webSocketURL;
strongSelf.webSocketRestURL = response.webSocketRestURL;
[strongSelf registerWithColliderIfReady];
[strongSelf startSignalingIfReady];
}];
[_roomServerClient
joinRoomWithRoomId:roomId
isLoopback:isLoopback
completionHandler:^(ARDJoinResponse *response, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *joinError =
[[strongSelf class] errorForJoinResultType:response.result];
if (joinError) {
RTCLogError(@"Failed to join room:%@ on room server.", roomId);
[strongSelf disconnect];
[strongSelf.delegate appClient:strongSelf didError:joinError];
return;
}
RTCLog(@"Joined room:%@ on room server.", roomId);
strongSelf.roomId = response.roomId;
strongSelf.clientId = response.clientId;
strongSelf.isInitiator = response.isInitiator;
for (ARDSignalingMessage *message in response.messages) {
if (message.type == kARDSignalingMessageTypeOffer ||
message.type == kARDSignalingMessageTypeAnswer) {
strongSelf.hasReceivedSdp = YES;
[strongSelf.messageQueue insertObject:message atIndex:0];
} else {
[strongSelf.messageQueue addObject:message];
}
}
strongSelf.webSocketURL = response.webSocketURL;
strongSelf.webSocketRestURL = response.webSocketRestURL;
[strongSelf registerWithColliderIfReady];
[strongSelf startSignalingIfReady];
}];
}
- (void)disconnect {
@ -388,7 +394,8 @@ static int const kKbpsMultiplier = 1000;
}
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
didStartReceivingOnTransceiver:
(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
}
@ -398,7 +405,8 @@ static int const kKbpsMultiplier = 1000;
RTCLog(@"Stream was removed.");
}
- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
- (void)peerConnectionShouldNegotiate:
(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
}
@ -430,8 +438,10 @@ static int const kKbpsMultiplier = 1000;
}
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event {
RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, errorCode: %d, "
didFailToGatherIceCandidate:
(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event {
RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, "
@"errorCode: %d, "
@"errorText: %@",
event.address,
event.port,
@ -441,7 +451,8 @@ static int const kKbpsMultiplier = 1000;
}
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
didRemoveIceCandidates:
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
dispatch_async(dispatch_get_main_queue(), ^{
ARDICECandidateRemovalMessage *message =
[[ARDICECandidateRemovalMessage alloc]
@ -474,7 +485,7 @@ static int const kKbpsMultiplier = 1000;
RTCLogError(@"Failed to create session description. Error: %@", error);
[self disconnect];
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey: @"Failed to create session description.",
NSLocalizedDescriptionKey : @"Failed to create session description.",
};
NSError *sdpError =
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
@ -484,12 +495,13 @@ static int const kKbpsMultiplier = 1000;
return;
}
__weak ARDAppClient *weakSelf = self;
[self.peerConnection setLocalDescription:sdp
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didSetSessionDescriptionWithError:error];
}];
[self.peerConnection
setLocalDescription:sdp
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didSetSessionDescriptionWithError:error];
}];
ARDSessionDescriptionMessage *message =
[[ARDSessionDescriptionMessage alloc] initWithDescription:sdp];
[self sendSignalingMessage:message];
@ -504,7 +516,7 @@ static int const kKbpsMultiplier = 1000;
RTCLogError(@"Failed to set session description. Error: %@", error);
[self disconnect];
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey: @"Failed to set session description.",
NSLocalizedDescriptionKey : @"Failed to set session description.",
};
NSError *sdpError =
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
@ -516,11 +528,13 @@ static int const kKbpsMultiplier = 1000;
// If we're answering and we've just set the remote offer we need to create
// an answer and set the local description.
if (!self.isInitiator && !self.peerConnection.localDescription) {
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[self defaultAnswerConstraints];
__weak ARDAppClient *weakSelf = self;
[self.peerConnection
answerForConstraints:constraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp,
NSError * error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
@ -562,10 +576,15 @@ static int const kKbpsMultiplier = 1000;
self.state = kARDAppClientStateConnected;
// Create peer connection.
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate)
generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[self defaultPeerConnectionConstraints];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCCertificate) *pcert =
[RTC_OBJC_TYPE(RTCCertificate) generateCertificateWithParams:@{
@"expires" : @100000,
@"name" : @"RSASSA-PKCS1-v1_5"
}];
config.iceServers = _iceServers;
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
config.certificate = pcert;
@ -580,7 +599,8 @@ static int const kKbpsMultiplier = 1000;
__weak ARDAppClient *weakSelf = self;
[_peerConnection
offerForConstraints:[self defaultOfferConstraints]
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp,
NSError * error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
@ -593,18 +613,23 @@ static int const kKbpsMultiplier = 1000;
#if defined(WEBRTC_IOS)
// Start event log.
if (kARDAppClientEnableRtcEventLog) {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-rtceventlog"];
if (![_peerConnection startRtcEventLogWithFilePath:filePath
maxSizeInBytes:kARDAppClientRtcEventLogMaxSizeInBytes]) {
NSString *filePath =
[self documentsFilePathForFileName:@"webrtc-rtceventlog"];
if (![_peerConnection
startRtcEventLogWithFilePath:filePath
maxSizeInBytes:
kARDAppClientRtcEventLogMaxSizeInBytes]) {
RTCLogError(@"Failed to start event logging.");
}
}
// Start aecdump diagnostic recording.
if ([_settings currentCreateAecDumpSettingFromStore]) {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
if (![_factory startAecDumpWithFilePath:filePath
maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
NSString *filePath =
[self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
if (![_factory
startAecDumpWithFilePath:filePath
maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
RTCLogError(@"Failed to start aec dump.");
}
}
@ -629,13 +654,14 @@ static int const kKbpsMultiplier = 1000;
// Processes the given signaling message based on its type.
- (void)processSignalingMessage:(ARDSignalingMessage *)message {
NSParameterAssert(_peerConnection ||
message.type == kARDSignalingMessageTypeBye);
message.type == kARDSignalingMessageTypeBye);
switch (message.type) {
case kARDSignalingMessageTypeOffer:
case kARDSignalingMessageTypeAnswer: {
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
RTC_OBJC_TYPE(RTCSessionDescription) *description =
sdpMessage.sessionDescription;
__weak ARDAppClient *weakSelf = self;
[_peerConnection setRemoteDescription:description
completionHandler:^(NSError *error) {
@ -653,7 +679,8 @@ static int const kKbpsMultiplier = 1000;
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
[strongSelf.delegate appClient:strongSelf
didError:error];
}
}];
break;
@ -679,23 +706,23 @@ static int const kKbpsMultiplier = 1000;
- (void)sendSignalingMessage:(ARDSignalingMessage *)message {
if (_isInitiator) {
__weak ARDAppClient *weakSelf = self;
[_roomServerClient sendMessage:message
forRoomId:_roomId
clientId:_clientId
completionHandler:^(ARDMessageResponse *response,
NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *messageError =
[[strongSelf class] errorForMessageResultType:response.result];
if (messageError) {
[strongSelf.delegate appClient:strongSelf didError:messageError];
return;
}
}];
[_roomServerClient
sendMessage:message
forRoomId:_roomId
clientId:_clientId
completionHandler:^(ARDMessageResponse *response, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *messageError =
[[strongSelf class] errorForMessageResultType:response.result];
if (messageError) {
[strongSelf.delegate appClient:strongSelf didError:messageError];
return;
}
}];
} else {
[_channel sendMessage:message];
}
@ -705,26 +732,30 @@ static int const kKbpsMultiplier = 1000;
for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
if (sender.track != nil) {
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore]
forVideoSender:sender];
}
}
}
}
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
- (void)setMaxBitrate:(NSNumber *)maxBitrate
forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
if (maxBitrate.intValue <= 0) {
return;
}
RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) {
for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) *
encoding in parametersToModify.encodings) {
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
}
[sender setParameters:parametersToModify];
}
- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) {
for (RTC_OBJC_TYPE(RTCRtpTransceiver) *
transceiver in _peerConnection.transceivers) {
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
return transceiver;
}
@ -733,20 +764,24 @@ static int const kKbpsMultiplier = 1000;
}
- (void)createMediaSenders {
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints];
RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints];
RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source
trackId:kARDAudioTrackId];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[self defaultMediaAudioConstraints];
RTC_OBJC_TYPE(RTCAudioSource) *source =
[_factory audioSourceWithConstraints:constraints];
RTC_OBJC_TYPE(RTCAudioTrack) *track =
[_factory audioTrackWithSource:source trackId:kARDAudioTrackId];
[_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
_localVideoTrack = [self createLocalVideoTrack];
if (_localVideoTrack) {
[_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
[_peerConnection addTrack:_localVideoTrack
streamIds:@[ kARDMediaStreamId ]];
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
// We can set up rendering for the remote track right away since the transceiver already has an
// RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and
// produce frames once RTP is received.
RTC_OBJC_TYPE(RTCVideoTrack) *track =
(RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track);
// We can set up rendering for the remote track right away since the
// transceiver already has an RTC_OBJC_TYPE(RTCRtpReceiver) with a track.
// The track will automatically get unmuted and produce frames once RTP is
// received.
RTC_OBJC_TYPE(RTCVideoTrack) *track = (RTC_OBJC_TYPE(RTCVideoTrack) *)(
[self videoTransceiver].receiver.track);
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
}
}
@ -789,10 +824,9 @@ static int const kKbpsMultiplier = 1000;
}
// Open WebSocket connection.
if (!_channel) {
_channel =
[[ARDWebSocketChannel alloc] initWithURL:_websocketURL
restURL:_websocketRestURL
delegate:self];
_channel = [[ARDWebSocketChannel alloc] initWithURL:_websocketURL
restURL:_websocketRestURL
delegate:self];
if (_isLoopback) {
_loopbackChannel =
[[ARDLoopbackWebSocketChannel alloc] initWithURL:_websocketURL
@ -810,8 +844,9 @@ static int const kKbpsMultiplier = 1000;
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
NSDictionary *mandatoryConstraints = @{};
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
@ -820,13 +855,12 @@ static int const kKbpsMultiplier = 1000;
}
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
NSDictionary *mandatoryConstraints = @{
@"OfferToReceiveAudio" : @"true",
@"OfferToReceiveVideo" : @"true"
};
NSDictionary *mandatoryConstraints =
@{@"OfferToReceiveAudio" : @"true", @"OfferToReceiveVideo" : @"true"};
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
@ -835,10 +869,11 @@ static int const kKbpsMultiplier = 1000;
return _defaultPeerConnectionConstraints;
}
NSString *value = _isLoopback ? @"false" : @"true";
NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
NSDictionary *optionalConstraints = @{@"DtlsSrtpKeyAgreement" : value};
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:optionalConstraints];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:nil
optionalConstraints:optionalConstraints];
return constraints;
}
@ -850,19 +885,21 @@ static int const kKbpsMultiplier = 1000;
case kARDJoinResultTypeSuccess:
break;
case kARDJoinResultTypeUnknown: {
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey: @"Unknown error.",
}];
error = [[NSError alloc]
initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey : @"Unknown error.",
}];
break;
}
case kARDJoinResultTypeFull: {
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorRoomFull
userInfo:@{
NSLocalizedDescriptionKey: @"Room is full.",
}];
error =
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorRoomFull
userInfo:@{
NSLocalizedDescriptionKey : @"Room is full.",
}];
break;
}
}
@ -875,25 +912,28 @@ static int const kKbpsMultiplier = 1000;
case kARDMessageResultTypeSuccess:
break;
case kARDMessageResultTypeUnknown:
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey: @"Unknown error.",
}];
error = [[NSError alloc]
initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey : @"Unknown error.",
}];
break;
case kARDMessageResultTypeInvalidClient:
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorInvalidClient
userInfo:@{
NSLocalizedDescriptionKey: @"Invalid client.",
}];
error = [[NSError alloc]
initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorInvalidClient
userInfo:@{
NSLocalizedDescriptionKey : @"Invalid client.",
}];
break;
case kARDMessageResultTypeInvalidRoom:
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorInvalidRoom
userInfo:@{
NSLocalizedDescriptionKey: @"Invalid room.",
}];
error =
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorInvalidRoom
userInfo:@{
NSLocalizedDescriptionKey : @"Invalid room.",
}];
break;
}
return error;

View File

@ -18,18 +18,16 @@
#import "ARDUtilities.h"
// TODO(tkchin): move these to a configuration object.
static NSString * const kARDRoomServerHostUrl =
@"https://appr.tc";
static NSString * const kARDRoomServerJoinFormat =
@"https://appr.tc/join/%@";
static NSString * const kARDRoomServerJoinFormatLoopback =
static NSString *const kARDRoomServerHostUrl = @"https://appr.tc";
static NSString *const kARDRoomServerJoinFormat = @"https://appr.tc/join/%@";
static NSString *const kARDRoomServerJoinFormatLoopback =
@"https://appr.tc/join/%@?debug=loopback";
static NSString * const kARDRoomServerMessageFormat =
static NSString *const kARDRoomServerMessageFormat =
@"https://appr.tc/message/%@/%@";
static NSString * const kARDRoomServerLeaveFormat =
static NSString *const kARDRoomServerLeaveFormat =
@"https://appr.tc/leave/%@/%@";
static NSString * const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient";
static NSString *const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient";
static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
@implementation ARDAppEngineClient
@ -47,8 +45,7 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
urlString =
[NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId];
} else {
urlString =
[NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
urlString = [NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
}
NSURL *roomURL = [NSURL URLWithString:urlString];
@ -57,14 +54,16 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
request.HTTPMethod = @"POST";
[NSURLConnection
sendAsyncRequest:request
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
completionHandler:^(
NSURLResponse *response __unused, NSData *data, NSError *error) {
if (error) {
if (completionHandler) {
completionHandler(nil, error);
}
return;
}
ARDJoinResponse *joinResponse = [ARDJoinResponse responseFromJSONData:data];
ARDJoinResponse *joinResponse =
[ARDJoinResponse responseFromJSONData:data];
if (!joinResponse) {
if (completionHandler) {
NSError *error = [[self class] badResponseError];
@ -89,8 +88,7 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
NSData *data = [message JSONData];
NSString *urlString =
[NSString stringWithFormat:
kARDRoomServerMessageFormat, roomId, clientId];
[NSString stringWithFormat:kARDRoomServerMessageFormat, roomId, clientId];
NSURL *url = [NSURL URLWithString:urlString];
RTCLog(@"C->RS POST: %@", message);
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
@ -98,14 +96,16 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
request.HTTPBody = data;
[NSURLConnection
sendAsyncRequest:request
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
completionHandler:^(
NSURLResponse *response __unused, NSData *data, NSError *error) {
if (error) {
if (completionHandler) {
completionHandler(nil, error);
}
return;
}
ARDMessageResponse *messageResponse = [ARDMessageResponse responseFromJSONData:data];
ARDMessageResponse *messageResponse =
[ARDMessageResponse responseFromJSONData:data];
if (!messageResponse) {
if (completionHandler) {
NSError *error = [[self class] badResponseError];
@ -139,7 +139,8 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
dispatch_semaphore_t sem = dispatch_semaphore_create(0);
[NSURLConnection
sendAsyncRequest:request
completionHandler:^(NSURLResponse *response __unused, NSData *data __unused, NSError *e) {
completionHandler:^(
NSURLResponse *response __unused, NSData *data __unused, NSError *e) {
if (e) {
error = e;
}
@ -148,7 +149,9 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER);
if (error) {
RTCLogError(@"Error leaving room %@ on room server: %@", roomId, error.localizedDescription);
RTCLogError(@"Error leaving room %@ on room server: %@",
roomId,
error.localizedDescription);
if (completionHandler) {
completionHandler(error);
}
@ -163,12 +166,12 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
#pragma mark - Private
+ (NSError *)badResponseError {
NSError *error =
[[NSError alloc] initWithDomain:kARDAppEngineClientErrorDomain
code:kARDAppEngineClientErrorBadResponse
userInfo:@{
NSLocalizedDescriptionKey: @"Error parsing response.",
}];
NSError *error = [[NSError alloc]
initWithDomain:kARDAppEngineClientErrorDomain
code:kARDAppEngineClientErrorBadResponse
userInfo:@{
NSLocalizedDescriptionKey : @"Error parsing response.",
}];
return error;
}

View File

@ -22,7 +22,8 @@ const Float64 kFramerateLimit = 30.0;
BOOL _usingFrontCamera;
}
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
- (instancetype)initWithCapturer:
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
settings:(ARDSettingsModel *)settings {
self = [super init];
if (self) {
@ -38,8 +39,9 @@ const Float64 kFramerateLimit = 30.0;
}
- (void)startCapture:(void (^)(NSError *))completion {
AVCaptureDevicePosition position =
_usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
AVCaptureDevicePosition position = _usingFrontCamera ?
AVCaptureDevicePositionFront :
AVCaptureDevicePositionBack;
AVCaptureDevice *device = [self findDeviceForPosition:position];
AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
@ -52,7 +54,10 @@ const Float64 kFramerateLimit = 30.0;
NSInteger fps = [self selectFpsForFormat:format];
[_capturer startCaptureWithDevice:device format:format fps:fps completionHandler:completion];
[_capturer startCaptureWithDevice:device
format:format
fps:fps
completionHandler:completion];
}
- (void)stopCapture {
@ -91,13 +96,17 @@ const Float64 kFramerateLimit = 30.0;
int currentDiff = INT_MAX;
for (AVCaptureDeviceFormat *format in formats) {
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
CMVideoDimensions dimension =
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
FourCharCode pixelFormat =
CMFormatDescriptionGetMediaSubType(format.formatDescription);
int diff = abs(targetWidth - dimension.width) +
abs(targetHeight - dimension.height);
if (diff < currentDiff) {
selectedFormat = format;
currentDiff = diff;
} else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
} else if (diff == currentDiff &&
pixelFormat == [_capturer preferredOutputPixelFormat]) {
selectedFormat = format;
}
}

View File

@ -21,14 +21,16 @@
@implementation ARDExternalSampleCapturer
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
- (instancetype)initWithDelegate:
(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
return [super initWithDelegate:delegate];
}
#pragma mark - ARDExternalSampleDelegate
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer {
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
!CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
@ -41,7 +43,8 @@
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs =
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
NSEC_PER_SEC;
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
rotation:RTCVideoRotation_0

View File

@ -27,19 +27,21 @@ NS_ASSUME_NONNULL_BEGIN
- (NSArray<NSString *> *)availableVideoResolutions {
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
for (AVCaptureDeviceFormat *format in
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) {
for (AVCaptureDevice *device in
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
for (AVCaptureDeviceFormat *format in [RTC_OBJC_TYPE(RTCCameraVideoCapturer)
supportedFormatsForDevice:device]) {
CMVideoDimensions resolution =
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
NSArray<NSNumber *> *resolutionObject =
@[ @(resolution.width), @(resolution.height) ];
[resolutions addObject:resolutionObject];
}
}
NSArray<NSArray<NSNumber *> *> *sortedResolutions =
[[resolutions allObjects] sortedArrayUsingComparator:^NSComparisonResult(
NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
NSArray<NSArray<NSNumber *> *> *sortedResolutions = [[resolutions allObjects]
sortedArrayUsingComparator:^NSComparisonResult(
NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
NSComparisonResult cmp = [obj1.firstObject compare:obj2.firstObject];
if (cmp != NSOrderedSame) {
return cmp;
@ -47,10 +49,13 @@ NS_ASSUME_NONNULL_BEGIN
return [obj1.lastObject compare:obj2.lastObject];
}];
NSMutableArray<NSString *> *resolutionStrings = [[NSMutableArray<NSString *> alloc] init];
NSMutableArray<NSString *> *resolutionStrings =
[[NSMutableArray<NSString *> alloc] init];
for (NSArray<NSNumber *> *resolution in sortedResolutions) {
NSString *resolutionString =
[NSString stringWithFormat:@"%@x%@", resolution.firstObject, resolution.lastObject];
[NSString stringWithFormat:@"%@x%@",
resolution.firstObject,
resolution.lastObject];
[resolutionStrings addObject:resolutionString];
}
@ -81,7 +86,9 @@ NS_ASSUME_NONNULL_BEGIN
Class expectedClass = [RTC_OBJC_TYPE(RTCVideoCodecInfo) class];
NSError *error;
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodecSetting =
[NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass fromData:codecData error:&error];
[NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass
fromData:codecData
error:&error];
if (!error) {
return videoCodecSetting;
}
@ -176,11 +183,13 @@ NS_ASSUME_NONNULL_BEGIN
return [self availableVideoCodecs].firstObject;
}
- (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolution {
- (int)videoResolutionComponentAtIndex:(int)index
inString:(NSString *)resolution {
if (index != 0 && index != 1) {
return 0;
}
NSArray<NSString *> *components = [resolution componentsSeparatedByString:@"x"];
NSArray<NSString *> *components =
[resolution componentsSeparatedByString:@"x"];
if (components.count != 2) {
return 0;
}
@ -190,22 +199,25 @@ NS_ASSUME_NONNULL_BEGIN
- (void)registerStoreDefaults {
#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
NSError *error;
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]
requiringSecureCoding:NO
error:&error];
NSData *codecData = [NSKeyedArchiver
archivedDataWithRootObject:[self defaultVideoCodecSetting]
requiringSecureCoding:NO
error:&error];
if (error) {
return;
}
#else
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
NSData *codecData = [NSKeyedArchiver
archivedDataWithRootObject:[self defaultVideoCodecSetting]];
#endif
[ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
videoCodec:codecData
bitrate:nil
audioOnly:NO
createAecDump:NO
useManualAudioConfig:YES];
[ARDSettingsStore
setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
videoCodec:codecData
bitrate:nil
audioOnly:NO
createAecDump:NO
useManualAudioConfig:YES];
}
@end
NS_ASSUME_NONNULL_END

View File

@ -15,7 +15,8 @@ static NSString *const kVideoCodecKey = @"rtc_video_codec_info_key";
static NSString *const kBitrateKey = @"rtc_max_bitrate_key";
static NSString *const kAudioOnlyKey = @"rtc_audio_only_key";
static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key";
static NSString *const kUseManualAudioConfigKey = @"rtc_use_manual_audio_config_key";
static NSString *const kUseManualAudioConfigKey =
@"rtc_use_manual_audio_config_key";
NS_ASSUME_NONNULL_BEGIN
@interface ARDSettingsStore () {

View File

@ -16,8 +16,8 @@
#import "RTCIceCandidate+JSON.h"
#import "RTCSessionDescription+JSON.h"
static NSString * const kARDSignalingMessageTypeKey = @"type";
static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
static NSString *const kARDSignalingMessageTypeKey = @"type";
static NSString *const kARDTypeValueRemoveCandidates = @"remove-candidates";
@implementation ARDSignalingMessage
@ -54,11 +54,12 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
[RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values];
message = [[ARDICECandidateRemovalMessage alloc]
initWithRemovedCandidates:candidates];
initWithRemovedCandidates:candidates];
} else if ([typeString isEqualToString:@"offer"] ||
[typeString isEqualToString:@"answer"]) {
RTC_OBJC_TYPE(RTCSessionDescription) *description =
[RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values];
[RTC_OBJC_TYPE(RTCSessionDescription)
descriptionFromJSONDictionary:values];
message =
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
} else if ([typeString isEqualToString:@"bye"]) {
@ -97,7 +98,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
@synthesize candidates = _candidates;
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
- (instancetype)initWithRemovedCandidates:
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
NSParameterAssert(candidates.count);
self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval];
if (self) {
@ -107,8 +109,9 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
}
- (NSData *)JSONData {
return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates
withType:kARDTypeValueRemoveCandidates];
return [RTC_OBJC_TYPE(RTCIceCandidate)
JSONDataForIceCandidates:_candidates
withType:kARDTypeValueRemoveCandidates];
}
@end
@ -117,7 +120,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
@synthesize sessionDescription = _sessionDescription;
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
- (instancetype)initWithDescription:
(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
RTCSdpType sdpType = description.type;
switch (sdpType) {
@ -129,8 +133,9 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
break;
case RTCSdpTypePrAnswer:
case RTCSdpTypeRollback:
NSAssert(
NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
NSAssert(NO,
@"Unexpected type: %@",
[RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
break;
}
self = [super initWithType:messageType];
@ -153,9 +158,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
}
- (NSData *)JSONData {
NSDictionary *message = @{
@"type": @"bye"
};
NSDictionary *message = @{@"type" : @"bye"};
return [NSJSONSerialization dataWithJSONObject:message
options:NSJSONWritingPrettyPrinted
error:NULL];

View File

@ -33,4 +33,3 @@
}
@end

View File

@ -33,11 +33,11 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
- (void)requestServersWithCompletionHandler:
(void (^)(NSArray *turnServers, NSError *error))completionHandler {
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url];
[NSURLConnection
sendAsyncRequest:request
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
completionHandler:^(
NSURLResponse *response __unused, NSData *data, NSError *error) {
if (error) {
completionHandler(nil, error);
return;
@ -54,30 +54,36 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
- (void)makeTurnServerRequestToURL:(NSURL *)url
WithCompletionHandler:(void (^)(NSArray *turnServers,
NSError *error))completionHandler {
NSMutableURLRequest *iceServerRequest = [NSMutableURLRequest requestWithURL:url];
NSMutableURLRequest *iceServerRequest =
[NSMutableURLRequest requestWithURL:url];
iceServerRequest.HTTPMethod = @"POST";
[iceServerRequest addValue:kTURNRefererURLString forHTTPHeaderField:@"referer"];
[iceServerRequest addValue:kTURNRefererURLString
forHTTPHeaderField:@"referer"];
[NSURLConnection
sendAsyncRequest:iceServerRequest
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
completionHandler:^(
NSURLResponse *response __unused, NSData *data, NSError *error) {
if (error) {
completionHandler(nil, error);
return;
}
NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
NSDictionary *turnResponseDict =
[NSDictionary dictionaryWithJSONData:data];
NSMutableArray *turnServers = [NSMutableArray array];
[turnResponseDict[@"iceServers"]
enumerateObjectsUsingBlock:^(
NSDictionary *obj, NSUInteger idx __unused, BOOL *stop __unused) {
[turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]];
enumerateObjectsUsingBlock:^(NSDictionary *obj,
NSUInteger idx __unused,
BOOL *stop __unused) {
[turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer)
serverFromJSONDictionary:obj]];
}];
if (!turnServers) {
NSError *responseError =
[[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
code:kARDTURNClientErrorBadResponse
userInfo:@{
NSLocalizedDescriptionKey : @"Bad TURN response.",
}];
NSError *responseError = [[NSError alloc]
initWithDomain:kARDTURNClientErrorDomain
code:kARDTURNClientErrorBadResponse
userInfo:@{
NSLocalizedDescriptionKey : @"Bad TURN response.",
}];
completionHandler(nil, responseError);
return;
}

View File

@ -63,8 +63,7 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
[_delegate channel:self didChangeState:_state];
}
- (void)registerForRoomId:(NSString *)roomId
clientId:(NSString *)clientId {
- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId {
NSParameterAssert(roomId.length);
NSParameterAssert(clientId.length);
_roomId = roomId;
@ -79,11 +78,11 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
NSParameterAssert(_roomId.length);
NSData *data = [message JSONData];
if (_state == kARDSignalingChannelStateRegistered) {
NSString *payload =
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
NSString *payload = [[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding];
NSDictionary *message = @{
@"cmd": @"send",
@"msg": payload,
@"cmd" : @"send",
@"msg" : payload,
};
NSData *messageJSONObject =
[NSJSONSerialization dataWithJSONObject:message
@ -95,12 +94,13 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
RTCLog(@"C->WSS: %@", messageString);
[_socket send:messageString];
} else {
NSString *dataString =
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
NSString *dataString = [[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding];
RTCLog(@"C->WSS POST: %@", dataString);
NSString *urlString =
[NSString stringWithFormat:@"%@/%@/%@",
[_restURL absoluteString], _roomId, _clientId];
NSString *urlString = [NSString stringWithFormat:@"%@/%@/%@",
[_restURL absoluteString],
_roomId,
_clientId];
NSURL *url = [NSURL URLWithString:urlString];
[NSURLConnection sendAsyncPostToURL:url
withData:data
@ -115,9 +115,10 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
}
[_socket close];
RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId);
NSString *urlString =
[NSString stringWithFormat:@"%@/%@/%@",
[_restURL absoluteString], _roomId, _clientId];
NSString *urlString = [NSString stringWithFormat:@"%@/%@/%@",
[_restURL absoluteString],
_roomId,
_clientId];
NSURL *url = [NSURL URLWithString:urlString];
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"DELETE";
@ -168,7 +169,9 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
reason:(NSString *)reason
wasClean:(BOOL)wasClean {
RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d",
(long)code, reason, wasClean);
(long)code,
reason,
wasClean);
NSParameterAssert(_state != kARDSignalingChannelStateError);
self.state = kARDSignalingChannelStateClosed;
}
@ -182,7 +185,7 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
NSParameterAssert(_roomId.length);
NSParameterAssert(_clientId.length);
NSDictionary *registerMessage = @{
@"cmd": @"register",
@"cmd" : @"register",
@"roomid" : _roomId,
@"clientid" : _clientId,
};
@ -219,15 +222,17 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
// Change message to answer, send back to server.
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
RTC_OBJC_TYPE(RTCSessionDescription) *description =
sdpMessage.sessionDescription;
NSString *dsc = description.sdp;
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
withString:@"answer"];
RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
ARDSignalingMessage *answer =
[[ARDSessionDescriptionMessage alloc]
initWithDescription:answerDescription];
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
initWithType:RTCSdpTypeAnswer
sdp:dsc];
ARDSignalingMessage *answer = [[ARDSessionDescriptionMessage alloc]
initWithDescription:answerDescription];
[self sendMessage:answer];
break;
}
@ -250,4 +255,3 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
}
@end

View File

@ -22,7 +22,8 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
@implementation RTC_OBJC_TYPE (RTCIceCandidate)
(JSON)
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary {
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary
: (NSDictionary *)dictionary {
NSString *mid = dictionary[kRTCICECandidateMidKey];
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
@ -32,7 +33,8 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
sdpMid:mid];
}
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
+ (NSData *)JSONDataForIceCandidates:
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
withType:(NSString *)typeValue {
NSMutableArray *jsonCandidates =
[NSMutableArray arrayWithCapacity:candidates.count];
@ -62,8 +64,8 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
for (NSDictionary *jsonCandidate in jsonCandidates) {
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
[RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate];
RTC_OBJC_TYPE(RTCIceCandidate) *candidate = [RTC_OBJC_TYPE(RTCIceCandidate)
candidateFromJSONDictionary:jsonCandidate];
[candidates addObject:candidate];
}
return candidates;
@ -88,7 +90,7 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
return data;
}
- (NSDictionary *)JSONDictionary{
- (NSDictionary *)JSONDictionary {
NSDictionary *json = @{
kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
kRTCICECandidateMidKey : self.sdpMid,

View File

@ -13,7 +13,8 @@
@implementation RTC_OBJC_TYPE (RTCIceServer)
(JSON)
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary {
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary
: (NSDictionary *)dictionary {
NSArray *turnUrls = dictionary[@"urls"];
NSString *username = dictionary[@"username"] ?: @"";
NSString *credential = dictionary[@"credential"] ?: @"";

View File

@ -21,7 +21,8 @@ static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
RTCSdpType type = [[self class] typeForString:typeString];
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp];
return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type
sdp:sdp];
}
- (NSData *)JSONData {

View File

@ -20,8 +20,9 @@
NSParameterAssert(jsonString.length > 0);
NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding];
NSError *error = nil;
NSDictionary *dict =
[NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:data
options:0
error:&error];
if (error) {
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
}
@ -30,8 +31,9 @@
+ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData {
NSError *error = nil;
NSDictionary *dict =
[NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&error];
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:jsonData
options:0
error:&error];
if (error) {
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
}
@ -49,7 +51,8 @@
// Kick off an async request which will call back on main thread.
NSURLSession *session = [NSURLSession sharedSession];
[[session dataTaskWithRequest:request
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
completionHandler:^(
NSData *data, NSURLResponse *response, NSError *error) {
if (completionHandler) {
completionHandler(response, data, error);
}
@ -59,37 +62,38 @@
// Posts data to the specified URL.
+ (void)sendAsyncPostToURL:(NSURL *)url
withData:(NSData *)data
completionHandler:(void (^)(BOOL succeeded,
NSData *data))completionHandler {
completionHandler:
(void (^)(BOOL succeeded, NSData *data))completionHandler {
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"POST";
request.HTTPBody = data;
[[self class] sendAsyncRequest:request
completionHandler:^(NSURLResponse *response,
NSData *data,
NSError *error) {
if (error) {
RTCLogError(@"Error posting data: %@", error.localizedDescription);
if (completionHandler) {
completionHandler(NO, data);
}
return;
}
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
if (httpResponse.statusCode != 200) {
NSString *serverResponse = data.length > 0 ?
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] :
nil;
RTCLogError(@"Received bad response: %@", serverResponse);
if (completionHandler) {
completionHandler(NO, data);
}
return;
}
if (completionHandler) {
completionHandler(YES, data);
}
}];
[[self class]
sendAsyncRequest:request
completionHandler:^(
NSURLResponse *response, NSData *data, NSError *error) {
if (error) {
RTCLogError(@"Error posting data: %@", error.localizedDescription);
if (completionHandler) {
completionHandler(NO, data);
}
return;
}
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
if (httpResponse.statusCode != 200) {
NSString *serverResponse = data.length > 0 ?
[[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding] :
nil;
RTCLogError(@"Received bad response: %@", serverResponse);
if (completionHandler) {
completionHandler(NO, data);
}
return;
}
if (completionHandler) {
completionHandler(YES, data);
}
}];
}
@end
@ -120,7 +124,7 @@ NSInteger ARDGetCpuUsagePercentage(void) {
}
// Dealloc the created array.
vm_deallocate(task, (vm_address_t)thread_array,
sizeof(thread_act_t) * thread_count);
vm_deallocate(
task, (vm_address_t)thread_array, sizeof(thread_act_t) * thread_count);
return lroundf(cpu_usage_percentage);
}

View File

@ -29,12 +29,12 @@
RTCInitFieldTrialDictionary(fieldTrials);
RTCInitializeSSL();
RTCSetupInternalTracer();
_window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
_window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
[_window makeKeyAndVisible];
ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
UINavigationController *root =
[[UINavigationController alloc] initWithRootViewController:viewController];
UINavigationController *root = [[UINavigationController alloc]
initWithRootViewController:viewController];
root.navigationBar.translucent = NO;
_window.rootViewController = root;

View File

@ -21,7 +21,8 @@
@implementation ARDFileCaptureController
@synthesize fileCapturer = _fileCapturer;
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
- (instancetype)initWithCapturer:
(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
self = [super init];
if (self) {
_fileCapturer = capturer;

View File

@ -48,7 +48,9 @@ static CGFloat const kCallControlMargin = 8;
- (void)layoutSubviews {
_roomText.frame =
CGRectMake(kRoomTextFieldMargin, 0, CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
CGRectMake(kRoomTextFieldMargin,
0,
CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
kRoomTextFieldHeight);
}
@ -93,10 +95,15 @@ static CGFloat const kCallControlMargin = 8;
_startRegularCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
_startRegularCallButton.titleLabel.font = controlFont;
[_startRegularCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
_startRegularCallButton.backgroundColor
= [UIColor colorWithRed:66.0/255.0 green:200.0/255.0 blue:90.0/255.0 alpha:1.0];
[_startRegularCallButton setTitle:@"Call room" forState:UIControlStateNormal];
[_startRegularCallButton setTitleColor:controlFontColor
forState:UIControlStateNormal];
_startRegularCallButton.backgroundColor =
[UIColor colorWithRed:66.0 / 255.0
green:200.0 / 255.0
blue:90.0 / 255.0
alpha:1.0];
[_startRegularCallButton setTitle:@"Call room"
forState:UIControlStateNormal];
[_startRegularCallButton addTarget:self
action:@selector(onStartRegularCall:)
forControlEvents:UIControlEventTouchUpInside];
@ -104,22 +111,26 @@ static CGFloat const kCallControlMargin = 8;
_startLoopbackCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
_startLoopbackCallButton.titleLabel.font = controlFont;
[_startLoopbackCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
[_startLoopbackCallButton setTitleColor:controlFontColor
forState:UIControlStateNormal];
_startLoopbackCallButton.backgroundColor =
[UIColor colorWithRed:0.0 green:122.0/255.0 blue:1.0 alpha:1.0];
[_startLoopbackCallButton setTitle:@"Loopback call" forState:UIControlStateNormal];
[UIColor colorWithRed:0.0 green:122.0 / 255.0 blue:1.0 alpha:1.0];
[_startLoopbackCallButton setTitle:@"Loopback call"
forState:UIControlStateNormal];
[_startLoopbackCallButton addTarget:self
action:@selector(onStartLoopbackCall:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_startLoopbackCallButton];
// Used to test what happens to sounds when calls are in progress.
_audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem];
_audioLoopButton.titleLabel.font = controlFont;
[_audioLoopButton setTitleColor:controlFontColor forState:UIControlStateNormal];
_audioLoopButton.backgroundColor =
[UIColor colorWithRed:1.0 green:149.0/255.0 blue:0.0 alpha:1.0];
[_audioLoopButton setTitleColor:controlFontColor
forState:UIControlStateNormal];
_audioLoopButton.backgroundColor = [UIColor colorWithRed:1.0
green:149.0 / 255.0
blue:0.0
alpha:1.0];
[self updateAudioLoopButton];
[_audioLoopButton addTarget:self
action:@selector(onToggleAudioLoop:)
@ -143,29 +154,36 @@ static CGFloat const kCallControlMargin = 8;
CGRect bounds = self.bounds;
CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin;
CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height;
_roomText.frame =
CGRectMake(kRoomTextFieldMargin, kRoomTextFieldMargin, roomTextWidth,
roomTextHeight);
_roomText.frame = CGRectMake(kRoomTextFieldMargin,
kRoomTextFieldMargin,
roomTextWidth,
roomTextHeight);
CGFloat buttonHeight =
(CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) - kCallControlMargin * 4) / 3;
(CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) -
kCallControlMargin * 4) /
3;
CGFloat regularCallFrameTop = CGRectGetMaxY(_roomText.frame) + kCallControlMargin;
CGRect regularCallFrame = CGRectMake(kCallControlMargin,
regularCallFrameTop,
bounds.size.width - 2*kCallControlMargin,
buttonHeight);
CGFloat regularCallFrameTop =
CGRectGetMaxY(_roomText.frame) + kCallControlMargin;
CGRect regularCallFrame =
CGRectMake(kCallControlMargin,
regularCallFrameTop,
bounds.size.width - 2 * kCallControlMargin,
buttonHeight);
CGFloat loopbackCallFrameTop = CGRectGetMaxY(regularCallFrame) + kCallControlMargin;
CGRect loopbackCallFrame = CGRectMake(kCallControlMargin,
loopbackCallFrameTop,
bounds.size.width - 2*kCallControlMargin,
buttonHeight);
CGFloat loopbackCallFrameTop =
CGRectGetMaxY(regularCallFrame) + kCallControlMargin;
CGRect loopbackCallFrame =
CGRectMake(kCallControlMargin,
loopbackCallFrameTop,
bounds.size.width - 2 * kCallControlMargin,
buttonHeight);
CGFloat audioLoopTop = CGRectGetMaxY(loopbackCallFrame) + kCallControlMargin;
CGRect audioLoopFrame = CGRectMake(kCallControlMargin,
audioLoopTop,
bounds.size.width - 2*kCallControlMargin,
bounds.size.width - 2 * kCallControlMargin,
buttonHeight);
_startRegularCallButton.frame = regularCallFrame;

View File

@ -25,7 +25,8 @@
static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
// Launch argument to be passed to indicate that the app should start loopback immediatly
// Launch argument to be passed to indicate that the app should start loopback
// immediatly
static NSString *const loopbackLaunchProcessArgument = @"loopback";
@interface ARDMainViewController () <ARDMainViewDelegate,
@ -44,7 +45,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
- (void)viewDidLoad {
[super viewDidLoad];
if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
if ([[[NSProcessInfo processInfo] arguments]
containsObject:loopbackLaunchProcessArgument]) {
[self mainView:nil didInputRoom:@"" isLoopback:YES];
}
}
@ -60,9 +62,11 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
AVAudioSessionCategoryOptionDefaultToSpeaker;
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig];
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration)
setWebRTCConfiguration:webRTCConfig];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session addDelegate:self];
[self configureAudioSession];
@ -70,23 +74,26 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
}
- (void)addSettingsBarButton {
UIBarButtonItem *settingsButton =
[[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:barButtonImageString]
style:UIBarButtonItemStylePlain
target:self
action:@selector(showSettings:)];
UIBarButtonItem *settingsButton = [[UIBarButtonItem alloc]
initWithImage:[UIImage imageNamed:barButtonImageString]
style:UIBarButtonItemStylePlain
target:self
action:@selector(showSettings:)];
self.navigationItem.rightBarButtonItem = settingsButton;
}
+ (NSString *)loopbackRoomString {
NSString *loopbackRoomString =
[[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-" withString:@""];
[[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-"
withString:@""];
return loopbackRoomString;
}
#pragma mark - ARDMainViewDelegate
- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback {
- (void)mainView:(ARDMainView *)mainView
didInputRoom:(NSString *)room
isLoopback:(BOOL)isLoopback {
if (!room.length) {
if (isLoopback) {
// If this is a loopback call, allow a generated room name.
@ -123,8 +130,10 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
session.useManualAudio =
[settingsModel currentUseManualAudioConfigSettingFromStore];
session.isAudioEnabled = NO;
// Kick off the video call.
@ -134,7 +143,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
delegate:self];
videoCallViewController.modalTransitionStyle =
UIModalTransitionStyleCrossDissolve;
videoCallViewController.modalPresentationStyle = UIModalPresentationFullScreen;
videoCallViewController.modalPresentationStyle =
UIModalPresentationFullScreen;
[self presentViewController:videoCallViewController
animated:YES
completion:nil];
@ -154,17 +164,20 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
if (![viewController isBeingDismissed]) {
RTCLog(@"Dismissing VC");
[self dismissViewControllerAnimated:YES completion:^{
[self restartAudioPlayerIfNeeded];
}];
[self dismissViewControllerAnimated:YES
completion:^{
[self restartAudioPlayerIfNeeded];
}];
}
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
session.isAudioEnabled = NO;
}
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionDidStartPlayOrRecord:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
// Stop playback on main queue and then configure WebRTC.
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeMain
@ -178,23 +191,26 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
}];
}
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionDidStopPlayOrRecord:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
// WebRTC is done with the audio session. Restart playback.
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
RTCLog(@"audioSessionDidStopPlayOrRecord");
[self restartAudioPlayerIfNeeded];
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
RTCLog(@"audioSessionDidStopPlayOrRecord");
[self restartAudioPlayerIfNeeded];
}];
}
#pragma mark - Private
- (void)showSettings:(id)sender {
ARDSettingsViewController *settingsController =
[[ARDSettingsViewController alloc] initWithStyle:UITableViewStyleGrouped
settingsModel:[[ARDSettingsModel alloc] init]];
[[ARDSettingsViewController alloc]
initWithStyle:UITableViewStyleGrouped
settingsModel:[[ARDSettingsModel alloc] init]];
UINavigationController *navigationController =
[[UINavigationController alloc] initWithRootViewController:settingsController];
UINavigationController *navigationController = [[UINavigationController alloc]
initWithRootViewController:settingsController];
[self presentViewControllerAsModal:navigationController];
}
@ -209,7 +225,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
configuration.mode = AVAudioSessionModeDefault;
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
BOOL hasSucceeded = NO;
NSError *error = nil;
@ -227,8 +244,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
}
- (void)setupAudioPlayer {
NSString *audioFilePath =
[[NSBundle mainBundle] pathForResource:@"mozart" ofType:@"mp3"];
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:@"mozart"
ofType:@"mp3"];
NSURL *audioFileURL = [NSURL URLWithString:audioFilePath];
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL
error:nil];
@ -245,16 +262,17 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
}
}
- (void)showAlertWithMessage:(NSString*)message {
- (void)showAlertWithMessage:(NSString *)message {
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil
message:message
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action){
}];
UIAlertAction *defaultAction =
[UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action){
}];
[alert addAction:defaultAction];
[self presentViewController:alert animated:YES completion:nil];

View File

@ -69,10 +69,10 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
#pragma mark -
- (void)addDoneBarButton {
UIBarButtonItem *barItem =
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone
target:self
action:@selector(dismissModally:)];
UIBarButtonItem *barItem = [[UIBarButtonItem alloc]
initWithBarButtonSystemItem:UIBarButtonSystemItemDone
target:self
action:@selector(dismissModally:)];
self.navigationItem.leftBarButtonItem = barItem;
}
@ -88,7 +88,8 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
return 4;
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
- (NSInteger)tableView:(UITableView *)tableView
numberOfRowsInSection:(NSInteger)section {
switch (section) {
case ARDSettingsSectionAudioSettings:
return 3;
@ -103,9 +104,7 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
#pragma mark - Table view delegate helpers
- (void)removeAllAccessories:(UITableView *)tableView
inSection:(int)section
{
- (void)removeAllAccessories:(UITableView *)tableView inSection:(int)section {
for (int i = 0; i < [tableView numberOfRowsInSection:section]; i++) {
NSIndexPath *rowPath = [NSIndexPath indexPathForRow:i inSection:section];
UITableViewCell *cell = [tableView cellForRowAtIndexPath:rowPath];
@ -114,8 +113,8 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
}
- (void)tableView:(UITableView *)tableView
updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
inSection:(int)section {
updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
inSection:(int)section {
[self removeAllAccessories:tableView inSection:section];
UITableViewCell *cell = [tableView cellForRowAtIndexPath:indexPath];
cell.accessoryType = UITableViewCellAccessoryCheckmark;
@ -144,16 +143,20 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
cellForRowAtIndexPath:(NSIndexPath *)indexPath {
switch (indexPath.section) {
case ARDSettingsSectionAudioSettings:
return [self audioSettingsTableViewCellForTableView:tableView atIndexPath:indexPath];
return [self audioSettingsTableViewCellForTableView:tableView
atIndexPath:indexPath];
case ARDSettingsSectionVideoResolution:
return [self videoResolutionTableViewCellForTableView:tableView atIndexPath:indexPath];
return [self videoResolutionTableViewCellForTableView:tableView
atIndexPath:indexPath];
case ARDSettingsSectionVideoCodec:
return [self videoCodecTableViewCellForTableView:tableView atIndexPath:indexPath];
return [self videoCodecTableViewCellForTableView:tableView
atIndexPath:indexPath];
case ARDSettingsSectionBitRate:
return [self bitrateTableViewCellForTableView:tableView atIndexPath:indexPath];
return [self bitrateTableViewCellForTableView:tableView
atIndexPath:indexPath];
default:
return [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
@ -161,7 +164,8 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
}
}
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
- (void)tableView:(UITableView *)tableView
didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
switch (indexPath.section) {
case ARDSettingsSectionVideoResolution:
[self tableView:tableView disSelectVideoResolutionAtIndex:indexPath];
@ -175,17 +179,21 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
#pragma mark - Table view delegate(Video Resolution)
- (UITableViewCell *)videoResolutionTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
- (UITableViewCell *)
videoResolutionTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsVideoResolutionViewCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
UITableViewCell *cell =
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
}
NSString *resolution = self.videoResolutionArray[indexPath.row];
cell.textLabel.text = resolution;
if ([resolution isEqualToString:[_settingsModel currentVideoResolutionSettingFromStore]]) {
if ([resolution
isEqualToString:[_settingsModel
currentVideoResolutionSettingFromStore]]) {
cell.accessoryType = UITableViewCellAccessoryCheckmark;
} else {
cell.accessoryType = UITableViewCellAccessoryNone;
@ -206,17 +214,20 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
#pragma mark - Table view delegate(Video Codec)
- (UITableViewCell *)videoCodecTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
- (UITableViewCell *)
videoCodecTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsVideoCodecCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
UITableViewCell *cell =
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
}
RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
cell.textLabel.text = [codec humanReadableDescription];
if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
if ([codec isEqualToCodecInfo:[_settingsModel
currentVideoCodecSettingFromStore]]) {
cell.accessoryType = UITableViewCellAccessoryCheckmark;
} else {
cell.accessoryType = UITableViewCellAccessoryNone;
@ -228,10 +239,11 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
- (void)tableView:(UITableView *)tableView
didSelectVideoCodecCellAtIndexPath:(NSIndexPath *)indexPath {
[self tableView:tableView
updateListSelectionAtIndexPath:indexPath
inSection:ARDSettingsSectionVideoCodec];
updateListSelectionAtIndexPath:indexPath
inSection:ARDSettingsSectionVideoCodec];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec =
self.videoCodecArray[indexPath.row];
[_settingsModel storeVideoCodecSetting:videoCodec];
}
@ -240,30 +252,37 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
- (UITableViewCell *)bitrateTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsBitrateCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
UITableViewCell *cell =
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
UITextField *textField = [[UITextField alloc]
initWithFrame:CGRectMake(10, 0, cell.bounds.size.width - 20, cell.bounds.size.height)];
NSString *currentMaxBitrate = [_settingsModel currentMaxBitrateSettingFromStore].stringValue;
initWithFrame:CGRectMake(10,
0,
cell.bounds.size.width - 20,
cell.bounds.size.height)];
NSString *currentMaxBitrate =
[_settingsModel currentMaxBitrateSettingFromStore].stringValue;
textField.text = currentMaxBitrate;
textField.placeholder = @"Enter max bit rate (kbps)";
textField.keyboardType = UIKeyboardTypeNumberPad;
textField.delegate = self;
// Numerical keyboards have no return button, we need to add one manually.
UIToolbar *numberToolbar =
[[UIToolbar alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
UIToolbar *numberToolbar = [[UIToolbar alloc]
initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
numberToolbar.items = @[
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
target:nil
action:nil],
[[UIBarButtonItem alloc] initWithTitle:@"Apply"
style:UIBarButtonItemStyleDone
target:self
action:@selector(numberTextFieldDidEndEditing:)]
[[UIBarButtonItem alloc]
initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
target:nil
action:nil],
[[UIBarButtonItem alloc]
initWithTitle:@"Apply"
style:UIBarButtonItemStyleDone
target:self
action:@selector(numberTextFieldDidEndEditing:)]
];
[numberToolbar sizeToFit];
@ -289,10 +308,12 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
#pragma mark - Table view delegate(Audio settings)
- (UITableViewCell *)audioSettingsTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
- (UITableViewCell *)
audioSettingsTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsAudioSettingsCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
UITableViewCell *cell =
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];

View File

@ -41,11 +41,13 @@ static CGFloat const kStatusBarHeight = 20;
- (instancetype)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
_remoteVideoView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
[self addSubview:_remoteVideoView];
_localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
_localVideoView =
[[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
[self addSubview:_localVideoView];
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
@ -69,11 +71,12 @@ static CGFloat const kStatusBarHeight = 20;
_cameraSwitchButton.backgroundColor = [UIColor grayColor];
_cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
_cameraSwitchButton.layer.masksToBounds = YES;
image = [UIImage imageForName:@"ic_switch_video_black_24dp.png" color:[UIColor whiteColor]];
image = [UIImage imageForName:@"ic_switch_video_black_24dp.png"
color:[UIColor whiteColor]];
[_cameraSwitchButton setImage:image forState:UIControlStateNormal];
[_cameraSwitchButton addTarget:self
action:@selector(onCameraSwitch:)
forControlEvents:UIControlEventTouchUpInside];
action:@selector(onCameraSwitch:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_cameraSwitchButton];
_hangupButton = [UIButton buttonWithType:UIButtonTypeCustom];
@ -93,10 +96,9 @@ static CGFloat const kStatusBarHeight = 20;
_statusLabel.textColor = [UIColor whiteColor];
[self addSubview:_statusLabel];
UITapGestureRecognizer *tapRecognizer =
[[UITapGestureRecognizer alloc]
initWithTarget:self
action:@selector(didTripleTap:)];
UITapGestureRecognizer *tapRecognizer = [[UITapGestureRecognizer alloc]
initWithTarget:self
action:@selector(didTripleTap:)];
tapRecognizer.numberOfTapsRequired = 3;
[self addGestureRecognizer:tapRecognizer];
}
@ -130,23 +132,23 @@ static CGFloat const kStatusBarHeight = 20;
CGRect localVideoFrame =
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
// Place the view in the bottom right.
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
- localVideoFrame.size.width - kLocalVideoViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
- localVideoFrame.size.height - kLocalVideoViewPadding;
localVideoFrame.origin.x = CGRectGetMaxX(bounds) -
localVideoFrame.size.width - kLocalVideoViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(bounds) -
localVideoFrame.size.height - kLocalVideoViewPadding;
_localVideoView.frame = localVideoFrame;
// Place stats at the top.
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
_statsView.frame = CGRectMake(CGRectGetMinX(bounds),
CGRectGetMinY(bounds) + kStatusBarHeight,
statsSize.width, statsSize.height);
statsSize.width,
statsSize.height);
// Place hangup button in the bottom left.
_hangupButton.frame =
CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
CGRectGetMaxY(bounds) - kButtonPadding -
kButtonSize,
CGRectGetMaxY(bounds) - kButtonPadding - kButtonSize,
kButtonSize,
kButtonSize);
@ -158,8 +160,7 @@ static CGFloat const kStatusBarHeight = 20;
// Place route button to the right of camera button.
CGRect routeChangeFrame = _cameraSwitchButton.frame;
routeChangeFrame.origin.x =
CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
routeChangeFrame.origin.x = CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
_routeChangeButton.frame = routeChangeFrame;
[_statusLabel sizeToFit];
@ -169,7 +170,8 @@ static CGFloat const kStatusBarHeight = 20;
#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView
didChangeVideoSize:(CGSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
}

View File

@ -22,9 +22,10 @@
#import "ARDSettingsModel.h"
#import "ARDVideoCallView.h"
@interface ARDVideoCallViewController () <ARDAppClientDelegate,
ARDVideoCallViewDelegate,
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@interface ARDVideoCallViewController () <
ARDAppClientDelegate,
ARDVideoCallViewDelegate,
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
@ -51,7 +52,9 @@
_delegate = delegate;
_client = [[ARDAppClient alloc] initWithDelegate:self];
[_client connectToRoomWithId:room settings:settingsModel isLoopback:isLoopback];
[_client connectToRoomWithId:room
settings:settingsModel
isLoopback:isLoopback];
}
return self;
}
@ -63,7 +66,8 @@
[self statusTextForState:RTCIceConnectionStateNew];
self.view = _videoCallView;
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session addDelegate:self];
}
@ -101,19 +105,23 @@
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
didCreateLocalCapturer:
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
_captureController =
[[ARDCaptureController alloc] initWithCapturer:localCapturer settings:settingsModel];
[[ARDCaptureController alloc] initWithCapturer:localCapturer
settings:settingsModel];
[_captureController startCapture];
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
didCreateLocalFileCapturer:
(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
if (@available(iOS 10, *)) {
_fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
_fileCaptureController =
[[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
[_fileCaptureController startCapture];
}
#endif
@ -124,7 +132,8 @@
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
didReceiveRemoteVideoTrack:
(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
self.remoteVideoTrack = remoteVideoTrack;
__weak ARDVideoCallViewController *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
@ -133,13 +142,13 @@
});
}
- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
- (void)appClient:(ARDAppClient *)client
didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
_videoCallView.statsView.stats = stats;
[_videoCallView setNeedsLayout];
}
- (void)appClient:(ARDAppClient *)client
didError:(NSError *)error {
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
NSString *message =
[NSString stringWithFormat:@"%@", error.localizedDescription];
[self hangup];
@ -164,22 +173,23 @@
if (_portOverride == AVAudioSessionPortOverrideNone) {
override = AVAudioSessionPortOverrideSpeaker;
}
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession
block:^{
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
NSError *error = nil;
if ([session overrideOutputAudioPort:override
error:&error]) {
self.portOverride = override;
} else {
RTCLogError(@"Error overriding output port: %@",
error.localizedDescription);
}
[session unlockForConfiguration];
completion();
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeAudioSession
block:^{
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
NSError *error = nil;
if ([session overrideOutputAudioPort:override
error:&error]) {
self.portOverride = override;
} else {
RTCLogError(@"Error overriding output port: %@",
error.localizedDescription);
}
[session unlockForConfiguration];
completion();
}];
}
- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
@ -233,16 +243,17 @@
}
}
- (void)showAlertWithMessage:(NSString*)message {
- (void)showAlertWithMessage:(NSString *)message {
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil
message:message
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action){
}];
UIAlertAction *defaultAction =
[UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action){
}];
[alert addAction:defaultAction];
[self presentViewController:alert animated:YES completion:nil];

View File

@ -19,7 +19,8 @@
if ([self.name isEqualToString:@"H264"]) {
NSString *profileId = self.parameters[@"profile-level-id"];
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId];
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
initWithHexString:profileId];
if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
profileLevelId.profile == RTCH264ProfileHigh) {
return @"H264 (High)";

View File

@ -31,15 +31,18 @@
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
[_callbackLogger start:^(NSString *logMessage) {
os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
os_log(rtc_os_log,
"%{public}s",
[logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
}];
}
return self;
}
- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *, NSObject *> *)setupInfo {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but
// optional.
- (void)broadcastStartedWithSetupInfo:
(NSDictionary<NSString *, NSObject *> *)setupInfo {
// User has requested to start the broadcast. Setup info from the UI extension
// can be supplied but optional.
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
_client = [[ARDAppClient alloc] initWithDelegate:self];
@ -57,7 +60,8 @@
}
- (void)broadcastPaused {
// User has requested to pause the broadcast. Samples will stop being delivered.
// User has requested to pause the broadcast. Samples will stop being
// delivered.
}
- (void)broadcastResumed {
@ -86,7 +90,8 @@
#pragma mark - ARDAppClientDelegate
- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state {
- (void)appClient:(ARDAppClient *)client
didChangeState:(ARDAppClientState)state {
switch (state) {
case kARDAppClientStateConnected:
RTCLog(@"Client connected.");
@ -100,16 +105,19 @@
}
}
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state {
- (void)appClient:(ARDAppClient *)client
didChangeConnectionState:(RTCIceConnectionState)state {
RTCLog(@"ICE state changed: %ld", (long)state);
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
didCreateLocalCapturer:
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer {
didCreateLocalExternalSampleCapturer:
(ARDExternalSampleCapturer *)externalSampleCapturer {
self.capturer = externalSampleCapturer;
}
@ -118,10 +126,12 @@
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
didReceiveRemoteVideoTrack:
(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
}
- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
- (void)appClient:(ARDAppClient *)client
didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
}
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {

View File

@ -18,7 +18,8 @@
UIView *view = [[UIView alloc] initWithFrame:CGRectZero];
view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7];
UIImageView *imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
UIImageView *imageView =
[[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
imageView.translatesAutoresizingMaskIntoConstraints = NO;
[view addSubview:imageView];
@ -52,22 +53,34 @@
UILayoutGuide *margin = view.layoutMarginsGuide;
[imageView.widthAnchor constraintEqualToConstant:60.0].active = YES;
[imageView.heightAnchor constraintEqualToConstant:60.0].active = YES;
[imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20].active = YES;
[imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES;
[_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
[_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor constant:20].active =
[imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20]
.active = YES;
[imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active =
YES;
[_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
[doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
[doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
[_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor]
.active = YES;
[_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor
constant:20]
.active = YES;
[_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor]
.active = YES;
[cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
[cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
[doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor]
.active = YES;
[doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor
constant:-20]
.active = YES;
[cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor]
.active = YES;
[cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor
constant:-20]
.active = YES;
UITapGestureRecognizer *tgr =
[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)];
[[UITapGestureRecognizer alloc] initWithTarget:self
action:@selector(didTap:)];
[view addGestureRecognizer:tgr];
self.view = view;
@ -78,23 +91,28 @@
}
- (void)userDidFinishSetup {
// URL of the resource where broadcast can be viewed that will be returned to the application
NSURL *broadcastURL = [NSURL
URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", _roomNameField.text]];
// URL of the resource where broadcast can be viewed that will be returned to
// the application
NSURL *broadcastURL =
[NSURL URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@",
_roomNameField.text]];
// Dictionary with setup information that will be provided to broadcast extension when broadcast
// is started
// Dictionary with setup information that will be provided to broadcast
// extension when broadcast is started
NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text};
// Tell ReplayKit that the extension is finished setting up and can begin broadcasting
[self.extensionContext completeRequestWithBroadcastURL:broadcastURL setupInfo:setupInfo];
// Tell ReplayKit that the extension is finished setting up and can begin
// broadcasting
[self.extensionContext completeRequestWithBroadcastURL:broadcastURL
setupInfo:setupInfo];
}
- (void)userDidCancelSetup {
// Tell ReplayKit that the extension was cancelled by the user
[self.extensionContext cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
code:-1
userInfo:nil]];
[self.extensionContext
cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
code:-1
userInfo:nil]];
}
#pragma mark - UITextFieldDelegate

View File

@ -26,10 +26,8 @@
RTCInitializeSSL();
NSScreen* screen = [NSScreen mainScreen];
NSRect visibleRect = [screen visibleFrame];
NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
NSMidY(visibleRect),
1320,
1140);
NSRect windowRect =
NSMakeRect(NSMidX(visibleRect), NSMidY(visibleRect), 1320, 1140);
NSUInteger styleMask = NSWindowStyleMaskTitled | NSWindowStyleMaskClosable;
_window = [[NSWindow alloc] initWithContentRect:windowRect
styleMask:styleMask
@ -52,4 +50,3 @@
}
@end

View File

@ -36,17 +36,20 @@ static NSUInteger const kBottomViewHeight = 200;
@interface APPRTCMainView : NSView
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
@property(nonatomic, readonly)
NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
@property(nonatomic, readonly)
NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
@property(nonatomic, readonly) NSTextView* logView;
- (void)displayLogMessage:(NSString*)message;
@end
@interface APPRTCMainView () <NSTextFieldDelegate, RTC_OBJC_TYPE (RTCVideoViewDelegate)>
@interface APPRTCMainView () <NSTextFieldDelegate,
RTC_OBJC_TYPE (RTCVideoViewDelegate)>
@end
@implementation APPRTCMainView {
@implementation APPRTCMainView {
NSScrollView* _scrollView;
NSView* _actionItemsView;
NSButton* _connectButton;
@ -61,9 +64,10 @@ static NSUInteger const kBottomViewHeight = 200;
@synthesize remoteVideoView = _remoteVideoView;
@synthesize logView = _logView;
- (void)displayLogMessage:(NSString *)message {
- (void)displayLogMessage:(NSString*)message {
dispatch_async(dispatch_get_main_queue(), ^{
self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
self.logView.string =
[NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
NSRange range = NSMakeRange(self.logView.string.length, 0);
[self.logView scrollRangeToVisible:range];
});
@ -84,14 +88,10 @@ static NSUInteger const kBottomViewHeight = 200;
}
- (void)updateConstraints {
NSParameterAssert(
_roomField != nil &&
_scrollView != nil &&
_remoteVideoView != nil &&
_localVideoView != nil &&
_actionItemsView!= nil &&
_connectButton != nil &&
_loopbackButton != nil);
NSParameterAssert(_roomField != nil && _scrollView != nil &&
_remoteVideoView != nil && _localVideoView != nil &&
_actionItemsView != nil && _connectButton != nil &&
_loopbackButton != nil);
[self removeConstraints:[self constraints]];
NSDictionary* viewsDictionary =
@ -115,25 +115,26 @@ static NSUInteger const kBottomViewHeight = 200;
};
// Declare this separately to avoid compiler warning about splitting string
// within an NSArray expression.
NSString* verticalConstraintLeft =
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_scrollView(kBottomViewHeight)]-|";
NSString* verticalConstraintLeft = @"V:|-[_remoteVideoView(remoteViewHeight)]"
@"-[_scrollView(kBottomViewHeight)]-|";
NSString* verticalConstraintRight =
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(kBottomViewHeight)]-|";
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView("
@"kBottomViewHeight)]-|";
NSArray* constraintFormats = @[
verticalConstraintLeft,
verticalConstraintRight,
@"H:|-[_remoteVideoView(remoteViewWidth)]-|",
@"V:|-[_localVideoView(localViewHeight)]",
@"H:|-[_localVideoView(localViewWidth)]",
@"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|"
verticalConstraintLeft,
verticalConstraintRight,
@"H:|-[_remoteVideoView(remoteViewWidth)]-|",
@"V:|-[_localVideoView(localViewHeight)]",
@"H:|-[_localVideoView(localViewWidth)]",
@"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|"
];
NSArray* actionItemsConstraints = @[
@"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]",
@"H:|-[_connectButton(kRoomFieldWidth)]",
@"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]",
@"V:|-[_loopbackButton(kActionItemHeight)]",
];
@"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]",
@"H:|-[_connectButton(kRoomFieldWidth)]",
@"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]",
@"V:|-[_loopbackButton(kActionItemHeight)]",
];
[APPRTCMainView addConstraints:constraintFormats
toView:self
@ -148,15 +149,16 @@ static NSUInteger const kBottomViewHeight = 200;
#pragma mark - Constraints helper
+ (void)addConstraints:(NSArray*)constraints toView:(NSView*)view
+ (void)addConstraints:(NSArray*)constraints
toView:(NSView*)view
viewsDictionary:(NSDictionary*)viewsDictionary
metrics:(NSDictionary*)metrics {
for (NSString* constraintFormat in constraints) {
NSArray* constraints =
[NSLayoutConstraint constraintsWithVisualFormat:constraintFormat
options:0
metrics:metrics
views:viewsDictionary];
[NSLayoutConstraint constraintsWithVisualFormat:constraintFormat
options:0
metrics:metrics
views:viewsDictionary];
for (NSLayoutConstraint* constraint in constraints) {
[view addConstraint:constraint];
}
@ -170,7 +172,8 @@ static NSUInteger const kBottomViewHeight = 200;
// Generate room id for loopback options.
if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
roomString = [NSUUID UUID].UUIDString;
roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
roomString = [roomString stringByReplacingOccurrencesOfString:@"-"
withString:@""];
}
[self.delegate appRTCMainView:self
didEnterRoomId:roomString
@ -180,7 +183,8 @@ static NSUInteger const kBottomViewHeight = 200;
#pragma mark - RTCVideoViewDelegate
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView
didChangeVideoSize:(CGSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
} else if (videoView == _localVideoView) {
@ -216,8 +220,10 @@ static NSUInteger const kBottomViewHeight = 200;
[_scrollView setDocumentView:_logView];
[self addSubview:_scrollView];
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
_localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
_remoteVideoView =
[[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
_localVideoView =
[[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
[self addSubview:_remoteVideoView];
@ -232,7 +238,7 @@ static NSUInteger const kBottomViewHeight = 200;
_roomField = [[NSTextField alloc] initWithFrame:NSZeroRect];
[_roomField setTranslatesAutoresizingMaskIntoConstraints:NO];
[[_roomField cell] setPlaceholderString: @"Enter AppRTC room id"];
[[_roomField cell] setPlaceholderString:@"Enter AppRTC room id"];
[_actionItemsView addSubview:_roomField];
[_roomField setEditable:YES];
@ -256,14 +262,14 @@ static NSUInteger const kBottomViewHeight = 200;
return NSMakeSize(kContentWidth, 0);
}
NSInteger width = MAX(_remoteVideoView.bounds.size.width, kContentWidth);
NSInteger height = (width/16) * 9;
NSInteger height = (width / 16) * 9;
return NSMakeSize(width, height);
}
@end
@interface APPRTCViewController ()
<ARDAppClientDelegate, APPRTCMainViewDelegate>
@interface APPRTCViewController () <ARDAppClientDelegate,
APPRTCMainViewDelegate>
@property(nonatomic, readonly) APPRTCMainView* mainView;
@end
@ -298,14 +304,14 @@ static NSUInteger const kBottomViewHeight = 200;
- (void)displayUsageInstructions {
[self.mainView displayLogMessage:
@"To start call:\n"
@"• Enter AppRTC room id (not neccessary for loopback)\n"
@"• Start call"];
@"To start call:\n"
@"• Enter AppRTC room id (not neccessary for loopback)\n"
@"• Start call"];
}
#pragma mark - ARDAppClientDelegate
- (void)appClient:(ARDAppClient *)client
- (void)appClient:(ARDAppClient*)client
didChangeState:(ARDAppClientState)state {
switch (state) {
case kARDAppClientStateConnected:
@ -322,15 +328,16 @@ static NSUInteger const kBottomViewHeight = 200;
}
}
- (void)appClient:(ARDAppClient *)client
- (void)appClient:(ARDAppClient*)client
didChangeConnectionState:(RTCIceConnectionState)state {
}
- (void)appClient:(ARDAppClient*)client
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
_captureController =
[[ARDCaptureController alloc] initWithCapturer:localCapturer
settings:[[ARDSettingsModel alloc] init]];
didCreateLocalCapturer:
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
_captureController = [[ARDCaptureController alloc]
initWithCapturer:localCapturer
settings:[[ARDSettingsModel alloc] init]];
[_captureController startCapture];
}
@ -341,19 +348,18 @@ static NSUInteger const kBottomViewHeight = 200;
}
- (void)appClient:(ARDAppClient*)client
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
didReceiveRemoteVideoTrack:
(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
_remoteVideoTrack = remoteVideoTrack;
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
}
- (void)appClient:(ARDAppClient *)client
didError:(NSError *)error {
- (void)appClient:(ARDAppClient*)client didError:(NSError*)error {
[self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]];
[self disconnect];
}
- (void)appClient:(ARDAppClient *)client
didGetStats:(NSArray *)stats {
- (void)appClient:(ARDAppClient*)client didGetStats:(NSArray*)stats {
}
#pragma mark - APPRTCMainViewDelegate
@ -361,7 +367,6 @@ static NSUInteger const kBottomViewHeight = 200;
- (void)appRTCMainView:(APPRTCMainView*)mainView
didEnterRoomId:(NSString*)roomId
loopback:(BOOL)isLoopback {
if ([roomId isEqualToString:@""]) {
[self.mainView displayLogMessage:@"Missing room id"];
return;
@ -370,7 +375,8 @@ static NSUInteger const kBottomViewHeight = 200;
[self disconnect];
ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
[client connectToRoomWithId:roomId
settings:[[ARDSettingsModel alloc] init] // Use default settings.
settings:[[ARDSettingsModel alloc]
init] // Use default settings.
isLoopback:isLoopback];
_client = client;
}

View File

@ -31,33 +31,36 @@
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for
// certain types of temporary interruptions (such as an incoming phone call or SMS message) or
// when the user quits the application and it begins the transition to the background state. Use
// this method to pause ongoing tasks, disable timers, and invalidate graphics rendering
// Sent when the application is about to move from active to inactive state.
// This can occur for certain types of temporary interruptions (such as an
// incoming phone call or SMS message) or when the user quits the application
// and it begins the transition to the background state. Use this method to
// pause ongoing tasks, disable timers, and invalidate graphics rendering
// callbacks. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store
// enough application state information to restore your application to its current state in case
// it is terminated later. If your application supports background execution, this method is
// called instead of applicationWillTerminate: when the user quits.
// Use this method to release shared resources, save user data, invalidate
// timers, and store enough application state information to restore your
// application to its current state in case it is terminated later. If your
// application supports background execution, this method is called instead of
// applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo
// many of the changes made on entering the background.
// Called as part of the transition from the background to the active state;
// here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If
// the application was previously in the background, optionally refresh the user interface.
// Restart any tasks that were paused (or not yet started) while the
// application was inactive. If the application was previously in the
// background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also
// applicationDidEnterBackground:.
// Called when the application is about to terminate. Save data if
// appropriate. See also applicationDidEnterBackground:.
}
@end

View File

@ -13,6 +13,7 @@
int main(int argc, char* argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
return UIApplicationMain(
argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
}
}

View File

@ -14,7 +14,8 @@
@synthesize delegate = _delegate;
- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
- (instancetype)initWithDelegate:
(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
self = [super init];
if (self) {
_delegate = delegate;

View File

@ -21,7 +21,8 @@
}
- (instancetype)initWithName:(NSString *)name
parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
parameters:(nullable NSDictionary<NSString *, NSString *> *)
parameters {
NSDictionary<NSString *, NSString *> *params = parameters ? parameters : @{};
return [self initWithName:name parameters:params scalabilityModes:@[]];
}
@ -49,10 +50,8 @@
}
- (BOOL)isEqual:(id)object {
if (self == object)
return YES;
if (![object isKindOfClass:[self class]])
return NO;
if (self == object) return YES;
if (![object isKindOfClass:[self class]]) return NO;
return [self isEqualToCodecInfo:object];
}

View File

@ -85,7 +85,8 @@ static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
}
+ (instancetype)currentConfiguration {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
[[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
config.category = session.category;
@ -104,7 +105,8 @@ static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
}
}
+ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
+ (void)setWebRTCConfiguration:
(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
@synchronized(self) {
gWebRTCConfiguration = configuration;
}

View File

@ -26,12 +26,13 @@
const int64_t kNanosecondsPerSecond = 1000000000;
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
readonly) dispatch_queue_t frameQueue;
()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic, readonly)
dispatch_queue_t frameQueue;
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
@property(nonatomic, assign) BOOL isRunning;
// Will the session be running once all asynchronous operations have been completed?
// Will the session be running once all asynchronous operations have been
// completed?
@property(nonatomic, assign) BOOL willBeRunning;
@end
@ -55,15 +56,19 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@synthesize willBeRunning = _willBeRunning;
- (instancetype)init {
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
return [self initWithDelegate:nil
captureSession:[[AVCaptureSession alloc] init]];
}
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
- (instancetype)initWithDelegate:
(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
return [self initWithDelegate:delegate
captureSession:[[AVCaptureSession alloc] init]];
}
// This initializer is used for testing.
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
- (instancetype)initWithDelegate:
(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
captureSession:(AVCaptureSession *)captureSession {
self = [super initWithDelegate:delegate];
if (self) {
@ -113,22 +118,26 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)dealloc {
NSAssert(!_willBeRunning,
@"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
@"Session was still running in "
@"RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
@"call stopCapture?");
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
+ (NSArray<AVCaptureDevice *> *)captureDevices {
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
discoverySessionWithDeviceTypes:@[
AVCaptureDeviceTypeBuiltInWideAngleCamera
]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
return session.devices;
}
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
// Support opening the device in any format. We make sure it's converted to a format we
// can handle, if needed, in the method `-setupVideoDataOutput`.
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:
(AVCaptureDevice *)device {
// Support opening the device in any format. We make sure it's converted to a
// format we can handle, if needed, in the method `-setupVideoDataOutput`.
return device.formats;
}
@ -139,7 +148,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
[self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
[self startCaptureWithDevice:device
format:format
fps:fps
completionHandler:nil];
}
- (void)stopCapture {
@ -149,17 +161,21 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler {
completionHandler:(nullable void (^)(NSError *_Nullable error))
completionHandler {
_willBeRunning = YES;
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps",
format,
(long)fps);
#if TARGET_OS_IPHONE
dispatch_async(dispatch_get_main_queue(), ^{
if (!self->_generatingOrientationNotifications) {
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
[[UIDevice currentDevice]
beginGeneratingDeviceOrientationNotifications];
self->_generatingOrientationNotifications = YES;
}
});
@ -191,14 +207,16 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}];
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
- (void)stopCaptureWithCompletionHandler:
(nullable void (^)(void))completionHandler {
_willBeRunning = NO;
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
self.currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
for (AVCaptureDeviceInput *oldInput in
[self.captureSession.inputs copy]) {
[self.captureSession removeInput:oldInput];
}
[self.captureSession stopRunning];
@ -206,7 +224,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#if TARGET_OS_IPHONE
dispatch_async(dispatch_get_main_queue(), ^{
if (self->_generatingOrientationNotifications) {
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
[[UIDevice currentDevice]
endGeneratingDeviceOrientationNotifications];
self->_generatingOrientationNotifications = NO;
}
});
@ -222,10 +241,11 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
@ -236,7 +256,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
fromConnection:(AVCaptureConnection *)connection {
NSParameterAssert(captureOutput == _videoDataOutput);
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
!CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
@ -249,16 +270,19 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#if TARGET_OS_IPHONE
// Default to portrait orientation on iPhone.
BOOL usingFrontCamera = NO;
// Check the image's EXIF for the camera the image came from as the image could have been
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
// Check the image's EXIF for the camera the image came from as the image
// could have been delayed as we set alwaysDiscardsLateVideoFrames to NO.
AVCaptureDevicePosition cameraPosition =
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
} else {
AVCaptureDeviceInput *deviceInput =
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
(AVCaptureDeviceInput *)((AVCaptureInputPort *)
connection.inputPorts.firstObject)
.input;
usingFrontCamera =
AVCaptureDevicePositionFront == deviceInput.device.position;
}
switch (_orientation) {
case UIDeviceOrientationPortrait:
@ -286,7 +310,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
int64_t timeStampNs =
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
@ -299,13 +324,14 @@ const int64_t kNanosecondsPerSecond = 1000000000;
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
#if TARGET_OS_IPHONE
CFStringRef droppedReason =
CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
CFStringRef droppedReason = CMGetAttachment(
sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
#else
// DroppedFrameReason unavailable on macOS.
CFStringRef droppedReason = nil;
#endif
RTCLogError(@"Dropped sample buffer. Reason: %@", (__bridge NSString *)droppedReason);
RTCLogError(@"Dropped sample buffer. Reason: %@",
(__bridge NSString *)droppedReason);
}
#pragma mark - AVCaptureSession notifications
@ -313,7 +339,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
NSString *reasonString = nil;
#if TARGET_OS_IPHONE
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
NSNumber *reason =
notification.userInfo[AVCaptureSessionInterruptionReasonKey];
if (reason) {
switch (reason.intValue) {
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
@ -339,32 +366,35 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
NSError *error =
[notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
#if TARGET_OS_IPHONE
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
#else
[self handleFatalError];
#endif
}];
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown
// error, allow future retries on fatal errors.
self.hasRetriedOnFatalError = NO;
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown
// error, allow future retries on fatal errors.
self.hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
@ -376,7 +406,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!self.hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
RTCLogWarning(
@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
self.hasRetriedOnFatalError = YES;
} else {
@ -386,13 +417,14 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)handleNonFatalError {
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (self.isRunning) {
[self.captureSession startRunning];
}
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (self.isRunning) {
[self.captureSession startRunning];
}
}];
}
#if TARGET_OS_IPHONE
@ -444,29 +476,34 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)setupVideoDataOutput {
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
AVCaptureVideoDataOutput *videoDataOutput =
[[AVCaptureVideoDataOutput alloc] init];
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
// device with the most efficient output format first. Find the first format that we support.
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel
// formats supported by the device with the most efficient output format
// first. Find the first format that we support.
NSSet<NSNumber *> *supportedPixelFormats =
[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
NSMutableOrderedSet *availablePixelFormats =
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
NSMutableOrderedSet *availablePixelFormats = [NSMutableOrderedSet
orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
[availablePixelFormats intersectSet:supportedPixelFormats];
NSNumber *pixelFormat = availablePixelFormats.firstObject;
NSAssert(pixelFormat, @"Output device has no supported formats.");
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
_outputPixelFormat = _preferredOutputPixelFormat;
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
videoDataOutput.videoSettings =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
FourCharCode mediaSubType =
CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats]
containsObject:@(mediaSubType)]) {
mediaSubType = _preferredOutputPixelFormat;
}
@ -474,9 +511,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
_outputPixelFormat = mediaSubType;
}
// Update videoSettings with dimensions, as some virtual cameras, e.g. Snap Camera, may not work
// otherwise.
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
// Update videoSettings with dimensions, as some virtual cameras, e.g. Snap
// Camera, may not work otherwise.
CMVideoDimensions dimensions =
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
_videoDataOutput.videoSettings = @{
(id)kCVPixelBufferWidthKey : @(dimensions.width),
(id)kCVPixelBufferHeightKey : @(dimensions.height),
@ -486,26 +524,32 @@ const int64_t kNanosecondsPerSecond = 1000000000;
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
NSAssert([RTC_OBJC_TYPE(RTCDispatcher)
isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateDeviceCaptureFormat must be called on the capture queue.");
@try {
_currentDevice.activeFormat = format;
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
} @catch (NSException *exception) {
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
RTCLogError(@"Failed to set active format!\n User info:%@",
exception.userInfo);
return;
}
}
- (void)reconfigureCaptureSessionInput {
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSAssert(
[RTC_OBJC_TYPE(RTCDispatcher)
isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSError *error = nil;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
if (!input) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
RTCLogError(@"Failed to create front camera input: %@",
error.localizedDescription);
return;
}
[_captureSession beginConfiguration];
@ -521,7 +565,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
}
- (void)updateOrientation {
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
NSAssert([RTC_OBJC_TYPE(RTCDispatcher)
isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateOrientation must be called on the capture queue.");
#if TARGET_OS_IPHONE
_orientation = [UIDevice currentDevice].orientation;

View File

@ -50,7 +50,9 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
NSError *error =
[NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
code:RTCFileVideoCapturerErrorCode_CapturerRunning
userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
userInfo:@{
NSUnderlyingErrorKey : @"Capturer has been started."
}];
errorBlock(error);
return;
@ -58,23 +60,25 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
_status = RTCFileVideoCapturerStatusStarted;
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSString *pathForFile = [self pathForFileName:nameOfFile];
if (!pathForFile) {
NSString *errorString =
[NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
code:RTCFileVideoCapturerErrorCode_FileNotFound
userInfo:@{NSUnderlyingErrorKey : errorString}];
errorBlock(error);
return;
}
dispatch_async(
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSString *pathForFile = [self pathForFileName:nameOfFile];
if (!pathForFile) {
NSString *errorString = [NSString
stringWithFormat:@"File %@ not found in bundle", nameOfFile];
NSError *error = [NSError
errorWithDomain:kRTCFileVideoCapturerErrorDomain
code:RTCFileVideoCapturerErrorCode_FileNotFound
userInfo:@{NSUnderlyingErrorKey : errorString}];
errorBlock(error);
return;
}
self.lastPresentationTime = CMTimeMake(0, 0);
self.lastPresentationTime = CMTimeMake(0, 0);
self.fileURL = [NSURL fileURLWithPath:pathForFile];
[self setupReaderOnError:errorBlock];
});
self.fileURL = [NSURL fileURLWithPath:pathForFile];
[self setupReaderOnError:errorBlock];
});
}
- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock {
@ -90,10 +94,12 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
}
NSDictionary *options = @{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
(NSString *)kCVPixelBufferPixelFormatTypeKey :
@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
};
_outTrack =
[[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
[[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject
outputSettings:options];
[_reader addOutput:_outTrack];
[_reader startReading];
@ -113,8 +119,8 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
return nil;
}
NSString *path =
[[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
NSString *path = [[NSBundle mainBundle] pathForResource:nameComponents[0]
ofType:nameComponents[1]];
return path;
}
@ -147,7 +153,8 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
[self readNextBuffer];
return;
}
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
!CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
CFRelease(sampleBuffer);
[self readNextBuffer];
@ -158,18 +165,22 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
}
- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime presentationTime =
CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
Float64 presentationDifference =
CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
_lastPresentationTime = presentationTime;
int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
int64_t presentationDifferenceRound =
lroundf(presentationDifference * NSEC_PER_SEC);
__block dispatch_source_t timer = [self createStrictTimer];
// Strict timer that will fire `presentationDifferenceRound` ns from now and never again.
dispatch_source_set_timer(timer,
dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
DISPATCH_TIME_FOREVER,
0);
// Strict timer that will fire `presentationDifferenceRound` ns from now and
// never again.
dispatch_source_set_timer(
timer,
dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
DISPATCH_TIME_FOREVER,
0);
dispatch_source_set_event_handler(timer, ^{
dispatch_source_cancel(timer);
timer = nil;
@ -177,14 +188,16 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (!pixelBuffer) {
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self readNextBuffer];
});
dispatch_async(
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self readNextBuffer];
});
return;
}
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:pixelBuffer];
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
@ -193,9 +206,10 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
timeStampNs:timeStampNs];
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self readNextBuffer];
});
dispatch_async(
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self readNextBuffer];
});
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
});

View File

@ -58,7 +58,8 @@
if ([[self class] isMetalAvailable]) {
_metalView = [[MTKView alloc] initWithFrame:self.bounds];
[self addSubview:_metalView];
_metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
_metalView.layerContentsPlacement =
NSViewLayerContentsPlacementScaleProportionallyToFit;
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
_metalView.framebufferOnly = YES;
_metalView.delegate = self;

View File

@ -22,8 +22,9 @@
#import "RTCMTLNV12Renderer.h"
#import "RTCMTLRGBRenderer.h"
// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
// Linking errors occur when compiling for architectures that don't support Metal.
// To avoid unreconized symbol linker errors, we're taking advantage of the objc
// runtime. Linking errors occur when compiling for architectures that don't
// support Metal.
#define MTKViewClass NSClassFromString(@"MTKView")
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
@ -117,8 +118,8 @@
}
- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
[super setMultipleTouchEnabled:multipleTouchEnabled];
self.metalView.multipleTouchEnabled = multipleTouchEnabled;
[super setMultipleTouchEnabled:multipleTouchEnabled];
self.metalView.multipleTouchEnabled = multipleTouchEnabled;
}
- (void)layoutSubviews {
@ -136,7 +137,8 @@
#pragma mark - MTKViewDelegate methods
- (void)drawInMTKView:(nonnull MTKView *)view {
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
NSAssert(view == self.metalView,
@"Receiving draw callbacks from foreign instance.");
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
// Skip rendering if we've already rendered this frame.
if (!videoFrame || videoFrame.width <= 0 || videoFrame.height <= 0 ||
@ -149,10 +151,14 @@
}
RTCMTLRenderer *renderer;
if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
if ([videoFrame.buffer
isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
const OSType pixelFormat =
CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA ||
pixelFormat == kCVPixelFormatType_32ARGB) {
if (!self.rendererRGB) {
self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
if (![self.rendererRGB addRenderingDestination:self.metalView]) {
@ -222,8 +228,8 @@
CGSize videoFrameSize = self.videoFrameSize;
RTCVideoRotation frameRotation = [self frameRotation];
BOOL useLandscape =
(frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
BOOL useLandscape = (frameRotation == RTCVideoRotation_0) ||
(frameRotation == RTCVideoRotation_180);
BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
(self.videoFrame.rotation == RTCVideoRotation_180);

View File

@ -30,13 +30,13 @@
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
// the method that will trigger the binding of the render
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
// its own `isDirty` flag.
// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView)
// maintains its own `isDirty` flag.
@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
()<GLKViewDelegate>
// `videoFrame` is set when we receive a frame from a worker thread and is read
// from the display link callback so atomicity is required.
// `videoFrame` is set when we receive a frame from a worker thread and is
// read from the display link callback so atomicity is required.
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
@property(nonatomic, readonly) GLKView *glkView;
@end
@ -68,7 +68,8 @@
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
}
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
- (instancetype)initWithFrame:(CGRect)frame
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
self = [super initWithFrame:frame];
if (self) {
_shader = shader;
@ -93,7 +94,7 @@
- (BOOL)configure {
EAGLContext *glContext =
[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
[[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
if (!glContext) {
glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
}
@ -104,8 +105,7 @@
_glContext = glContext;
// GLKView manages a framebuffer for us.
_glkView = [[GLKView alloc] initWithFrame:CGRectZero
context:_glContext];
_glkView = [[GLKView alloc] initWithFrame:CGRectZero context:_glContext];
_glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
_glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
_glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
@ -118,7 +118,7 @@
// Listen to application state in order to clean up OpenGL before app goes
// away.
NSNotificationCenter *notificationCenter =
[NSNotificationCenter defaultCenter];
[NSNotificationCenter defaultCenter];
[notificationCenter addObserver:self
selector:@selector(willResignActive)
name:UIApplicationWillResignActiveNotification
@ -136,15 +136,16 @@
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
[strongSelf displayLinkTimerDidFire];
}];
if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
if ([[UIApplication sharedApplication] applicationState] ==
UIApplicationStateActive) {
[self setupGL];
}
return YES;
}
- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
[super setMultipleTouchEnabled:multipleTouchEnabled];
_glkView.multipleTouchEnabled = multipleTouchEnabled;
[super setMultipleTouchEnabled:multipleTouchEnabled];
_glkView.multipleTouchEnabled = multipleTouchEnabled;
}
- (void)dealloc {
@ -191,14 +192,15 @@
return;
}
RTCVideoRotation rotation = frame.rotation;
if(_rotationOverride != nil) {
[_rotationOverride getValue: &rotation];
if (_rotationOverride != nil) {
[_rotationOverride getValue:&rotation];
}
[self ensureGLContext];
glClear(GL_COLOR_BUFFER_BIT);
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
if (!_nv12TextureCache) {
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
_nv12TextureCache =
[[RTCNV12TextureCache alloc] initWithContext:_glContext];
}
if (_nv12TextureCache) {
[_nv12TextureCache uploadFrameToTextures:frame];
@ -213,7 +215,8 @@
}
} else {
if (!_i420TextureCache) {
_i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
_i420TextureCache =
[[RTCI420TextureCache alloc] initWithContext:_glContext];
}
[_i420TextureCache uploadFrameToTextures:frame];
[_shader applyShadingForFrameWithWidth:frame.width

View File

@ -31,14 +31,15 @@
- (instancetype)initWithContext:(EAGLContext *)context {
self = [super init];
if (self) {
CVReturn ret = CVOpenGLESTextureCacheCreate(
kCFAllocatorDefault, NULL,
CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault,
NULL,
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
context,
context,
#else
(__bridge void *)context,
(__bridge void *)context,
#endif
NULL, &_textureCache);
NULL,
&_textureCache);
if (ret != kCVReturnSuccess) {
self = nil;
}
@ -57,9 +58,19 @@
CFRelease(*textureOut);
*textureOut = nil;
}
CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width,
height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut);
CVReturn ret =
CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_textureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
pixelFormat,
width,
height,
pixelFormat,
GL_UNSIGNED_BYTE,
planeIndex,
textureOut);
if (ret != kCVReturnSuccess) {
if (*textureOut) {
CFRelease(*textureOut);
@ -80,16 +91,17 @@
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
@"frame must be CVPixelBuffer backed");
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
return [self loadTexture:&_yTextureRef
pixelBuffer:pixelBuffer
planeIndex:0
pixelFormat:GL_LUMINANCE] &&
[self loadTexture:&_uvTextureRef
pixelBuffer:pixelBuffer
planeIndex:1
pixelFormat:GL_LUMINANCE_ALPHA];
[self loadTexture:&_uvTextureRef
pixelBuffer:pixelBuffer
planeIndex:1
pixelFormat:GL_LUMINANCE_ALPHA];
}
- (void)releaseTextures {

View File

@ -30,8 +30,9 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@ -39,11 +40,12 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
alloc] initWithName:kRTCVideoCodecVp8Name];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
constrainedHighInfo,
@ -52,18 +54,20 @@
] mutableCopy];
if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
[result
addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecVp9Name]];
}
#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecAv1Name]];
#endif
return result;
}
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {

View File

@ -32,8 +32,9 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@ -41,11 +42,12 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
alloc] initWithName:kRTCVideoCodecVp8Name];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
constrainedHighInfo,
@ -54,18 +56,20 @@
] mutableCopy];
if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
[result
addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecVp9Name]];
}
#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:kRTCVideoCodecAv1Name]];
#endif
return result;
}
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
@ -88,7 +92,8 @@
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
[[[self class] supportedCodecs] mutableCopy];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs =
[NSMutableArray array];
NSUInteger index = [codecs indexOfObject:self.preferredCodec];
if (index != NSNotFound) {
[orderedCodecs addObject:[codecs objectAtIndex:index]];

View File

@ -16,7 +16,8 @@
@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
[NSMutableArray array];
NSString *codecName = kRTCVideoCodecH264Name;
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@ -25,8 +26,9 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedHighParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:codecName
parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@ -35,14 +37,16 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedBaselineParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:codecName
parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
return [codecs copy];
}
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
}

View File

@ -16,7 +16,8 @@
@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
[NSMutableArray array];
NSString *codecName = kRTCVideoCodecH264Name;
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@ -25,8 +26,9 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedHighParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:codecName
parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@ -35,14 +37,16 @@
@"packetization-mode" : @"1",
};
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
parameters:constrainedBaselineParams];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
initWithName:codecName
parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
return [codecs copy];
}
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
}

View File

@ -31,7 +31,7 @@
return self;
}
- (instancetype)initWithCoder:(NSCoder*)aDecoder {
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self addOrientationObserver];
@ -51,15 +51,18 @@
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
AVCaptureVideoPreviewLayer *previewLayer =
[self previewLayer];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
previewLayer.session = captureSession;
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeMain
dispatchAsyncOnType:
RTCDispatcherTypeMain
block:^{
[self setCorrectVideoOrientation];
[self
setCorrectVideoOrientation];
}];
}];
}];
@ -72,7 +75,7 @@
[self setCorrectVideoOrientation];
}
-(void)orientationChanged:(NSNotification *)notification {
- (void)orientationChanged:(NSNotification *)notification {
[self setCorrectVideoOrientation];
}
@ -97,23 +100,26 @@
previewLayer.connection.videoOrientation =
AVCaptureVideoOrientationPortrait;
}
// If device orientation switches to FaceUp or FaceDown, don't change video orientation.
// If device orientation switches to FaceUp or FaceDown, don't change video
// orientation.
}
}
#pragma mark - Private
- (void)addOrientationObserver {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(orientationChanged:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:@selector(orientationChanged:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
- (void)removeOrientationObserver {
[[NSNotificationCenter defaultCenter] removeObserver:self
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[NSNotificationCenter defaultCenter]
removeObserver:self
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
- (AVCaptureVideoPreviewLayer *)previewLayer {

View File

@ -20,13 +20,11 @@ static dispatch_queue_t kNetworkMonitorQueue = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
kAudioSessionQueue = dispatch_queue_create(
"org.webrtc.RTCDispatcherAudioSession",
DISPATCH_QUEUE_SERIAL);
"org.webrtc.RTCDispatcherAudioSession", DISPATCH_QUEUE_SERIAL);
kCaptureSessionQueue = dispatch_queue_create(
"org.webrtc.RTCDispatcherCaptureSession",
DISPATCH_QUEUE_SERIAL);
kNetworkMonitorQueue =
dispatch_queue_create("org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL);
"org.webrtc.RTCDispatcherCaptureSession", DISPATCH_QUEUE_SERIAL);
kNetworkMonitorQueue = dispatch_queue_create(
"org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL);
});
}
@ -39,10 +37,12 @@ static dispatch_queue_t kNetworkMonitorQueue = nil;
+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
const char* targetLabel = dispatch_queue_get_label(targetQueue);
const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
const char* currentLabel =
dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
NSAssert(strlen(currentLabel) > 0,
@"Label is required for the current queue.");
return strcmp(targetLabel, currentLabel) == 0;
}

View File

@ -37,7 +37,8 @@
- (void)testCallbackGetsCalledForAppropriateLevel {
self.logger.severity = RTCLoggingSeverityWarning;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"callbackWarning"];
[self.logger start:^(NSString *message) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
@ -52,14 +53,15 @@
- (void)testCallbackWithSeverityGetsCalledForAppropriateLevel {
self.logger.severity = RTCLoggingSeverityWarning;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"callbackWarning"];
[self.logger
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
XCTAssertEqual(severity, RTCLoggingSeverityError);
[callbackExpectation fulfill];
}];
[self.logger startWithMessageAndSeverityHandler:^(
NSString *message, RTCLoggingSeverity severity) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
XCTAssertEqual(severity, RTCLoggingSeverityError);
[callbackExpectation fulfill];
}];
RTCLogError("Horrible error");
@ -69,7 +71,8 @@
- (void)testCallbackDoesNotGetCalledForOtherLevels {
self.logger.severity = RTCLoggingSeverityError;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"callbackError"];
[self.logger start:^(NSString *message) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
@ -86,14 +89,15 @@
- (void)testCallbackWithSeverityDoesNotGetCalledForOtherLevels {
self.logger.severity = RTCLoggingSeverityError;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"callbackError"];
[self.logger
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
XCTAssertEqual(severity, RTCLoggingSeverityError);
[callbackExpectation fulfill];
}];
[self.logger startWithMessageAndSeverityHandler:^(
NSString *message, RTCLoggingSeverity severity) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
XCTAssertEqual(severity, RTCLoggingSeverityError);
[callbackExpectation fulfill];
}];
RTCLogInfo("Just some info");
RTCLogWarning("Warning warning");
@ -105,7 +109,8 @@
- (void)testCallbackDoesNotgetCalledForSeverityNone {
self.logger.severity = RTCLoggingSeverityNone;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"unexpectedCallback"];
[self.logger start:^(NSString *message) {
[callbackExpectation fulfill];
@ -117,27 +122,30 @@
RTCLogError("Horrible error");
XCTWaiter *waiter = [[XCTWaiter alloc] init];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
timeout:1.0];
XCTAssertEqual(result, XCTWaiterResultTimedOut);
}
- (void)testCallbackWithSeverityDoesNotgetCalledForSeverityNone {
self.logger.severity = RTCLoggingSeverityNone;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"unexpectedCallback"];
[self.logger
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
[callbackExpectation fulfill];
XCTAssertTrue(false);
}];
[self.logger startWithMessageAndSeverityHandler:^(
NSString *message, RTCLoggingSeverity severity) {
[callbackExpectation fulfill];
XCTAssertTrue(false);
}];
RTCLogInfo("Just some info");
RTCLogWarning("Warning warning");
RTCLogError("Horrible error");
XCTWaiter *waiter = [[XCTWaiter alloc] init];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
timeout:1.0];
XCTAssertEqual(result, XCTWaiterResultTimedOut);
}
@ -154,7 +162,8 @@
}
- (void)testStopCallbackLogger {
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"stopped"];
[self.logger start:^(NSString *message) {
[callbackExpectation fulfill];
@ -165,29 +174,33 @@
RTCLogInfo("Just some info");
XCTWaiter *waiter = [[XCTWaiter alloc] init];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
timeout:1.0];
XCTAssertEqual(result, XCTWaiterResultTimedOut);
}
- (void)testStopCallbackWithSeverityLogger {
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"stopped"];
[self.logger
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
[callbackExpectation fulfill];
}];
[self.logger startWithMessageAndSeverityHandler:^(
NSString *message, RTCLoggingSeverity loggingServerity) {
[callbackExpectation fulfill];
}];
[self.logger stop];
RTCLogInfo("Just some info");
XCTWaiter *waiter = [[XCTWaiter alloc] init];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
timeout:1.0];
XCTAssertEqual(result, XCTWaiterResultTimedOut);
}
- (void)testDestroyingCallbackLogger {
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"destroyed"];
[self.logger start:^(NSString *message) {
[callbackExpectation fulfill];
@ -198,38 +211,42 @@
RTCLogInfo("Just some info");
XCTWaiter *waiter = [[XCTWaiter alloc] init];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
timeout:1.0];
XCTAssertEqual(result, XCTWaiterResultTimedOut);
}
- (void)testDestroyingCallbackWithSeverityLogger {
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"destroyed"];
[self.logger
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
[callbackExpectation fulfill];
}];
[self.logger startWithMessageAndSeverityHandler:^(
NSString *message, RTCLoggingSeverity loggingServerity) {
[callbackExpectation fulfill];
}];
self.logger = nil;
RTCLogInfo("Just some info");
XCTWaiter *waiter = [[XCTWaiter alloc] init];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
timeout:1.0];
XCTAssertEqual(result, XCTWaiterResultTimedOut);
}
- (void)testCallbackWithSeverityLoggerCannotStartTwice {
self.logger.severity = RTCLoggingSeverityWarning;
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"callbackWarning"];
[self.logger
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
XCTAssertEqual(loggingServerity, RTCLoggingSeverityError);
[callbackExpectation fulfill];
}];
[self.logger startWithMessageAndSeverityHandler:^(
NSString *message, RTCLoggingSeverity loggingServerity) {
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
XCTAssertEqual(loggingServerity, RTCLoggingSeverityError);
[callbackExpectation fulfill];
}];
[self.logger start:^(NSString *message) {
[callbackExpectation fulfill];

View File

@ -23,8 +23,10 @@
NSString *fullPath = [NSString stringWithFormat:@"%s", __FILE__];
NSString *extension = fullPath.pathExtension;
XCTAssertEqualObjects(
@"m", extension, @"Do not rename %@. It should end with .m.", fullPath.lastPathComponent);
XCTAssertEqualObjects(@"m",
extension,
@"Do not rename %@. It should end with .m.",
fullPath.lastPathComponent);
}
@end

View File

@ -23,20 +23,22 @@ static NSString *level31ConstrainedBaseline = @"42e01f";
- (void)testInitWithString {
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh];
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
initWithHexString:level31ConstrainedHigh];
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline];
profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
initWithHexString:level31ConstrainedBaseline];
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
}
- (void)testInitWithProfileAndLevel {
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh
level:RTCH264Level3_1];
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
initWithProfile:RTCH264ProfileConstrainedHigh
level:RTCH264Level3_1];
XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]

View File

@ -78,21 +78,26 @@ static size_t kBufferHeight = 200;
nil,
&pixelBufferRef);
OCMStub([frameMock buffer])
.andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]);
.andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:pixelBufferRef]);
} else {
OCMStub([frameMock buffer])
.andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:kBufferWidth
height:kBufferHeight]);
.andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc]
initWithWidth:kBufferWidth
height:kBufferHeight]);
}
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) width]).andReturn(kBufferWidth);
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) height]).andReturn(kBufferHeight);
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) width])
.andReturn(kBufferWidth);
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) height])
.andReturn(kBufferHeight);
OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
return frameMock;
}
- (id)rendererMockWithSuccessfulSetup:(BOOL)success {
id rendererMock = OCMClassMock([RTCMTLRenderer class]);
OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success);
OCMStub([rendererMock addRenderingDestination:[OCMArg any]])
.andReturn(success);
return rendererMock;
}
@ -124,8 +129,8 @@ static size_t kBufferHeight = 200;
// given
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
[[self.frameMock reject] buffer];
@ -148,11 +153,12 @@ static size_t kBufferHeight = 200;
self.frameMock = [self frameMockWithCVPixelBuffer:NO];
OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
OCMExpect([self.classMock createI420Renderer])
.andReturn(self.rendererI420Mock);
[[self.classMock reject] createNV12Renderer];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
// when
[realView renderFrame:self.frameMock];
@ -170,11 +176,12 @@ static size_t kBufferHeight = 200;
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
OCMExpect([self.classMock createNV12Renderer])
.andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
// when
[realView renderFrame:self.frameMock];
@ -191,11 +198,12 @@ static size_t kBufferHeight = 200;
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
OCMExpect([self.classMock createNV12Renderer])
.andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -203,10 +211,12 @@ static size_t kBufferHeight = 200;
[self.classMock verify];
// Recreate view.
realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc]
initWithFrame:CGRectMake(0, 0, 640, 480)];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
// View hould reinit renderer.
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
OCMExpect([self.classMock createNV12Renderer])
.andReturn(self.rendererNV12Mock);
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -220,11 +230,12 @@ static size_t kBufferHeight = 200;
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
OCMExpect([self.classMock createNV12Renderer])
.andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -245,11 +256,12 @@ static size_t kBufferHeight = 200;
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
OCMExpect([self.classMock createNV12Renderer])
.andReturn(self.rendererNV12Mock);
[[self.classMock reject] createI420Renderer];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
[realView renderFrame:self.frameMock];
[realView drawInMTKView:realView.metalView];
@ -269,12 +281,13 @@ static size_t kBufferHeight = 200;
- (void)testReportsSizeChangesToDelegate {
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
id delegateMock =
OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
CGSize size = CGSizeMake(640, 480);
OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
realView.delegate = delegateMock;
[realView setSize:size];
@ -290,7 +303,8 @@ static size_t kBufferHeight = 200;
createMetalView:CGRectZero];
OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
[realView setVideoContentMode:UIViewContentModeScaleAspectFill];
OCMVerifyAll(metalKitView);

View File

@ -44,7 +44,8 @@
- (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
CVPixelBufferRef nullPixelBuffer = NULL;
RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer];
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:nullPixelBuffer];
RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
rotation:RTCVideoRotation_0

View File

@ -32,7 +32,8 @@
#import <XCTest/XCTest.h>
@interface MockVideoEncoderDecoderFactory
: NSObject <RTC_OBJC_TYPE (RTCVideoEncoderFactory), RTC_OBJC_TYPE (RTCVideoDecoderFactory)>
: NSObject <RTC_OBJC_TYPE (RTCVideoEncoderFactory),
RTC_OBJC_TYPE (RTCVideoDecoderFactory)>
- (instancetype)initWithSupportedCodecs:
(nonnull NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
@end
@ -73,19 +74,22 @@
- (void)testPeerConnectionLifetime {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
[peerConnection close];
factory = nil;
}
@ -114,10 +118,12 @@
- (void)testDataChannelLifetime {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
[[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
@ -127,10 +133,11 @@
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
dataChannel =
[peerConnection dataChannelForLabel:@"test_channel" configuration:dataChannelConfig];
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
dataChannel = [peerConnection dataChannelForLabel:@"test_channel"
configuration:dataChannelConfig];
XCTAssertNotNil(dataChannel);
[peerConnection close];
peerConnection = nil;
@ -144,11 +151,13 @@
- (void)testRTCRtpTransceiverLifetime {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
@ -158,9 +167,11 @@
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init];
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:contraints
delegate:nil];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio
init:init];
XCTAssertNotNil(tranceiver);
[peerConnection close];
peerConnection = nil;
@ -174,11 +185,13 @@
- (void)testRTCRtpSenderLifetime {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsPlanB;
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
@ -186,9 +199,11 @@
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
peerConnection =
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"];
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo
streamId:@"stream"];
XCTAssertNotNil(sender);
[peerConnection close];
peerConnection = nil;
@ -202,11 +217,13 @@
- (void)testRTCRtpReceiverLifetime {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsPlanB;
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
@ -217,10 +234,14 @@
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
pc1 = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
[pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
pc2 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
pc2 = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
[pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
NSTimeInterval negotiationTimeout = 15;
@ -306,7 +327,8 @@
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"];
videoTrack = [factory videoTrackWithSource:[factory videoSource]
trackId:@"videoTrack"];
XCTAssertNotNil(videoTrack);
factory = nil;
}
@ -318,25 +340,31 @@
- (void)testRollback {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [[RTC_OBJC_TYPE(
RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
}
optionalConstraints:nil];
__block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
__block RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
RTC_OBJC_TYPE(RTCSessionDescription) *rollback =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeRollback sdp:@""];
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
initWithType:RTCSdpTypeRollback
sdp:@""];
@autoreleasepool {
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
pc1 = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
[pc1 offerForConstraints:constraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer,
NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(offer);
@ -352,7 +380,8 @@
NSTimeInterval negotiationTimeout = 15;
dispatch_semaphore_wait(
negotiatedSem,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(negotiationTimeout * NSEC_PER_SEC)));
dispatch_time(DISPATCH_TIME_NOW,
(int64_t)(negotiationTimeout * NSEC_PER_SEC)));
XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
@ -377,10 +406,13 @@
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"]
];
encoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
decoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoder
decoderFactory:decoder];
encoder = [[MockVideoEncoderDecoderFactory alloc]
initWithSupportedCodecs:supportedCodecs];
decoder = [[MockVideoEncoderDecoderFactory alloc]
initWithSupportedCodecs:supportedCodecs];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
initWithEncoderFactory:encoder
decoderFactory:decoder];
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
[factory rtpSenderCapabilitiesForKind:kRTCMediaStreamTrackKindVideo];
@ -406,10 +438,13 @@
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"]
];
encoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
decoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoder
decoderFactory:decoder];
encoder = [[MockVideoEncoderDecoderFactory alloc]
initWithSupportedCodecs:supportedCodecs];
decoder = [[MockVideoEncoderDecoderFactory alloc]
initWithSupportedCodecs:supportedCodecs];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
initWithEncoderFactory:encoder
decoderFactory:decoder];
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
[factory rtpReceiverCapabilitiesForKind:kRTCMediaStreamTrackKindVideo];
@ -426,10 +461,12 @@
- (void)testSetCodecPreferences {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:nil
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
@ -439,20 +476,24 @@
];
MockVideoEncoderDecoderFactory *encoder =
[[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
[[MockVideoEncoderDecoderFactory alloc]
initWithSupportedCodecs:supportedCodecs];
MockVideoEncoderDecoderFactory *decoder =
[[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
[[MockVideoEncoderDecoderFactory alloc]
initWithSupportedCodecs:supportedCodecs];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoder
decoderFactory:decoder];
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
initWithEncoderFactory:encoder
decoderFactory:decoder];
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo init:init];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo
init:init];
XCTAssertNotNil(tranceiver);
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
@ -468,7 +509,8 @@
XCTAssertNotNil(targetCodec);
NSError *error = nil;
BOOL result = [tranceiver setCodecPreferences:@[ targetCodec ] error:&error];
BOOL result = [tranceiver setCodecPreferences:@[ targetCodec ]
error:&error];
XCTAssertTrue(result);
XCTAssertNil(error);
@ -478,8 +520,9 @@
__block BOOL completed = NO;
[peerConnection
offerForConstraints:constraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
NSError *_Nullable error) {
completionHandler:^(
RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
NSError *_Nullable error) {
XCTAssertNil(error);
XCTAssertNotNil(sdp);
@ -489,10 +532,10 @@
XCTAssertNotNil(targetCodec.preferredPayloadType);
XCTAssertNotNil(targetCodec.clockRate);
NSString *expected =
[NSString stringWithFormat:@"a=rtpmap:%i VP8/%i",
targetCodec.preferredPayloadType.intValue,
targetCodec.clockRate.intValue];
NSString *expected = [NSString
stringWithFormat:@"a=rtpmap:%i VP8/%i",
targetCodec.preferredPayloadType.intValue,
targetCodec.clockRate.intValue];
XCTAssertTrue([expected isEqualToString:rtpMaps[0]]);
@ -505,7 +548,8 @@
factory = nil;
tranceiver = nil;
dispatch_semaphore_wait(semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
dispatch_semaphore_wait(
semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
XCTAssertTrue(completed);
}
}
@ -513,10 +557,12 @@
- (void)testSetHeaderExtensionsToNegotiate {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:nil
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
@ -528,18 +574,22 @@
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo init:init];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo
init:init];
XCTAssertNotNil(tranceiver);
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *> *headerExtensionsToNegotiate =
tranceiver.headerExtensionsToNegotiate;
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *>
*headerExtensionsToNegotiate = tranceiver.headerExtensionsToNegotiate;
__block RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *targetExtension = nil;
__block RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *targetExtension =
nil;
[headerExtensionsToNegotiate
enumerateObjectsUsingBlock:^(RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
NSUInteger idx,
BOOL * stop) {
if ([extension.uri isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
enumerateObjectsUsingBlock:^(
RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
NSUInteger idx,
BOOL * stop) {
if ([extension.uri
isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
targetExtension = extension;
} else {
extension.direction = RTCRtpTransceiverDirectionStopped;
@ -547,8 +597,9 @@
}];
NSError *error = nil;
BOOL isOK = [tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
error:&error];
BOOL isOK =
[tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
error:&error];
XCTAssertNil(error);
XCTAssertTrue(isOK);
@ -556,35 +607,38 @@
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
__block BOOL completed = NO;
[peerConnection offerForConstraints:constraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
NSError *_Nullable error) {
XCTAssertNil(error);
XCTAssertNotNil(sdp);
[peerConnection
offerForConstraints:constraints
completionHandler:^(
RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
NSError *_Nullable error) {
XCTAssertNil(error);
XCTAssertNotNil(sdp);
NSArray<NSString *> *extMaps = [self extMapsFromSDP:sdp.sdp];
XCTAssertEqual(1, extMaps.count);
NSArray<NSString *> *extMaps = [self extMapsFromSDP:sdp.sdp];
XCTAssertEqual(1, extMaps.count);
XCTAssertNotNil(targetExtension);
XCTAssertNotNil(targetExtension.preferredId);
XCTAssertNotNil(targetExtension);
XCTAssertNotNil(targetExtension.preferredId);
NSString *expected =
[NSString stringWithFormat:@"a=extmap:%i %@",
targetExtension.preferredId.intValue,
targetExtension.uri];
NSString *expected = [NSString
stringWithFormat:@"a=extmap:%i %@",
targetExtension.preferredId.intValue,
targetExtension.uri];
XCTAssertTrue([expected isEqualToString:extMaps[0]]);
XCTAssertTrue([expected isEqualToString:extMaps[0]]);
completed = YES;
dispatch_semaphore_signal(semaphore);
}];
completed = YES;
dispatch_semaphore_signal(semaphore);
}];
[peerConnection close];
peerConnection = nil;
factory = nil;
tranceiver = nil;
dispatch_semaphore_wait(semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
dispatch_semaphore_wait(
semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
XCTAssertTrue(completed);
}
}
@ -592,10 +646,12 @@
- (void)testSetHeaderExtensionsToNegotiateError {
@autoreleasepool {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:nil
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
@ -607,25 +663,29 @@
peerConnection = [factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:nil];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo init:init];
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo
init:init];
XCTAssertNotNil(tranceiver);
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *> *headerExtensionsToNegotiate =
tranceiver.headerExtensionsToNegotiate;
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *>
*headerExtensionsToNegotiate = tranceiver.headerExtensionsToNegotiate;
[headerExtensionsToNegotiate
enumerateObjectsUsingBlock:^(RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
NSUInteger idx,
BOOL * stop) {
if ([extension.uri isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
enumerateObjectsUsingBlock:^(
RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
NSUInteger idx,
BOOL * stop) {
if ([extension.uri
isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
extension.direction = RTCRtpTransceiverDirectionStopped;
}
}];
// Stopping a mandatory extension should yield an error
NSError *error = nil;
BOOL isOK = [tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
error:&error];
BOOL isOK =
[tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
error:&error];
XCTAssertNotNil(error);
XCTAssertFalse(isOK);
@ -642,56 +702,64 @@
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
[[RTC_OBJC_TYPE(RTCMediaConstraints)
alloc] initWithMandatoryConstraints:@{
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
}
optionalConstraints:nil];
optionalConstraints:nil];
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
[weakPC1 offerForConstraints:sdpConstraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(offer);
[weakPC1
setLocalDescription:offer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
[weakPC2
setRemoteDescription:offer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
[weakPC2
answerForConstraints:sdpConstraints
completionHandler:^(
RTC_OBJC_TYPE(RTCSessionDescription) * answer,
NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(answer);
[weakPC2
setLocalDescription:answer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
[weakPC1
setRemoteDescription:answer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
dispatch_semaphore_signal(negotiatedSem);
}];
}];
}];
}];
}];
}];
[weakPC1
offerForConstraints:sdpConstraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer,
NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(offer);
[weakPC1
setLocalDescription:offer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
[weakPC2
setRemoteDescription:offer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
[weakPC2
answerForConstraints:sdpConstraints
completionHandler:^(
RTC_OBJC_TYPE(RTCSessionDescription) *
answer,
NSError * error) {
XCTAssertNil(error);
XCTAssertNotNil(answer);
[weakPC2
setLocalDescription:answer
completionHandler:^(NSError *error) {
XCTAssertNil(error);
[weakPC1
setRemoteDescription:answer
completionHandler:^(
NSError *error) {
XCTAssertNil(error);
dispatch_semaphore_signal(
negotiatedSem);
}];
}];
}];
}];
}];
}];
return 0 ==
dispatch_semaphore_wait(negotiatedSem,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)));
dispatch_time(DISPATCH_TIME_NOW,
(int64_t)(timeout * NSEC_PER_SEC)));
}
- (NSArray<NSString *> *)rtpMapsFromSDP:(NSString *)sdp {
NSMutableArray<NSString *> *rtpMaps = [NSMutableArray new];
NSArray *sdpLines =
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet newlineCharacterSet]];
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet
newlineCharacterSet]];
for (NSString *line in sdpLines) {
if ([line hasPrefix:@"a=rtpmap"]) {
[rtpMaps addObject:line];
@ -703,7 +771,8 @@
- (NSArray<NSString *> *)extMapsFromSDP:(NSString *)sdp {
NSMutableArray<NSString *> *extMaps = [NSMutableArray new];
NSArray *sdpLines =
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet newlineCharacterSet]];
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet
newlineCharacterSet]];
for (NSString *line in sdpLines) {
if ([line hasPrefix:@"a=extmap:"]) {
[extMaps addObject:line];