Format all .m objc files
Formatting done via: git ls-files | grep -E '.*\.m$' | xargs clang-format -i After applying the command, I manually excluded Matlab .m files that I will handle separately. No-Iwyu: Includes didn't change and it isn't related to formatting Bug: webrtc:42225392 Change-Id: I40d11fd6b650ee0d90d92cbd6fc6aa6c78e1fea3 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373887 Commit-Queue: Harald Alvestrand <hta@webrtc.org> Reviewed-by: Danil Chapovalov <danilchap@webrtc.org> Reviewed-by: Harald Alvestrand <hta@webrtc.org> Cr-Commit-Position: refs/heads/main@{#43706}
This commit is contained in:
parent
2c96934699
commit
fc7e874677
@ -191,15 +191,18 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
if (shouldGetStats) {
|
if (shouldGetStats) {
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
_statsTimer = [[ARDTimerProxy alloc] initWithInterval:1
|
_statsTimer = [[ARDTimerProxy alloc]
|
||||||
|
initWithInterval:1
|
||||||
repeats:YES
|
repeats:YES
|
||||||
timerHandler:^{
|
timerHandler:^{
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf.peerConnection statisticsWithCompletionHandler:^(
|
[strongSelf.peerConnection
|
||||||
|
statisticsWithCompletionHandler:^(
|
||||||
RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
|
RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf.delegate appClient:strongSelf didGetStats:stats];
|
[strongSelf.delegate appClient:strongSelf
|
||||||
|
didGetStats:stats];
|
||||||
});
|
});
|
||||||
}];
|
}];
|
||||||
}];
|
}];
|
||||||
@ -232,13 +235,14 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
|
RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
|
||||||
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
|
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
|
||||||
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
|
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
|
||||||
_factory =
|
_factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
|
||||||
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory
|
initWithEncoderFactory:encoderFactory
|
||||||
decoderFactory:decoderFactory];
|
decoderFactory:decoderFactory];
|
||||||
|
|
||||||
#if defined(WEBRTC_IOS)
|
#if defined(WEBRTC_IOS)
|
||||||
if (kARDAppClientEnableTracing) {
|
if (kARDAppClientEnableTracing) {
|
||||||
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
|
NSString *filePath =
|
||||||
|
[self documentsFilePathForFileName:@"webrtc-trace.txt"];
|
||||||
RTCStartInternalCapture(filePath);
|
RTCStartInternalCapture(filePath);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -248,7 +252,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
[_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
|
[_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
|
||||||
NSError *error) {
|
NSError *error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
RTCLogError(@"Error retrieving TURN servers: %@", error.localizedDescription);
|
RTCLogError(@"Error retrieving TURN servers: %@",
|
||||||
|
error.localizedDescription);
|
||||||
}
|
}
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf.iceServers addObjectsFromArray:turnServers];
|
[strongSelf.iceServers addObjectsFromArray:turnServers];
|
||||||
@ -257,7 +262,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}];
|
}];
|
||||||
|
|
||||||
// Join room on room server.
|
// Join room on room server.
|
||||||
[_roomServerClient joinRoomWithRoomId:roomId
|
[_roomServerClient
|
||||||
|
joinRoomWithRoomId:roomId
|
||||||
isLoopback:isLoopback
|
isLoopback:isLoopback
|
||||||
completionHandler:^(ARDJoinResponse *response, NSError *error) {
|
completionHandler:^(ARDJoinResponse *response, NSError *error) {
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
@ -388,7 +394,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
|
didStartReceivingOnTransceiver:
|
||||||
|
(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
|
||||||
RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
|
RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
|
||||||
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
|
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
|
||||||
}
|
}
|
||||||
@ -398,7 +405,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
RTCLog(@"Stream was removed.");
|
RTCLog(@"Stream was removed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
|
- (void)peerConnectionShouldNegotiate:
|
||||||
|
(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
|
||||||
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
|
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -430,8 +438,10 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didFailToGatherIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event {
|
didFailToGatherIceCandidate:
|
||||||
RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, errorCode: %d, "
|
(RTC_OBJC_TYPE(RTCIceCandidateErrorEvent) *)event {
|
||||||
|
RTCLog(@"Failed to gather ICE candidate. address: %@, port: %d, url: %@, "
|
||||||
|
@"errorCode: %d, "
|
||||||
@"errorText: %@",
|
@"errorText: %@",
|
||||||
event.address,
|
event.address,
|
||||||
event.port,
|
event.port,
|
||||||
@ -441,7 +451,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
|
||||||
didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
didRemoveIceCandidates:
|
||||||
|
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
ARDICECandidateRemovalMessage *message =
|
ARDICECandidateRemovalMessage *message =
|
||||||
[[ARDICECandidateRemovalMessage alloc]
|
[[ARDICECandidateRemovalMessage alloc]
|
||||||
@ -484,7 +495,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[self.peerConnection setLocalDescription:sdp
|
[self.peerConnection
|
||||||
|
setLocalDescription:sdp
|
||||||
completionHandler:^(NSError *error) {
|
completionHandler:^(NSError *error) {
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf peerConnection:strongSelf.peerConnection
|
[strongSelf peerConnection:strongSelf.peerConnection
|
||||||
@ -516,11 +528,13 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
// If we're answering and we've just set the remote offer we need to create
|
// If we're answering and we've just set the remote offer we need to create
|
||||||
// an answer and set the local description.
|
// an answer and set the local description.
|
||||||
if (!self.isInitiator && !self.peerConnection.localDescription) {
|
if (!self.isInitiator && !self.peerConnection.localDescription) {
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints];
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
|
[self defaultAnswerConstraints];
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[self.peerConnection
|
[self.peerConnection
|
||||||
answerForConstraints:constraints
|
answerForConstraints:constraints
|
||||||
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
|
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp,
|
||||||
|
NSError * error) {
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf peerConnection:strongSelf.peerConnection
|
[strongSelf peerConnection:strongSelf.peerConnection
|
||||||
didCreateSessionDescription:sdp
|
didCreateSessionDescription:sdp
|
||||||
@ -562,10 +576,15 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
self.state = kARDAppClientStateConnected;
|
self.state = kARDAppClientStateConnected;
|
||||||
|
|
||||||
// Create peer connection.
|
// Create peer connection.
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints];
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
[self defaultPeerConnectionConstraints];
|
||||||
RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate)
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
|
RTC_OBJC_TYPE(RTCCertificate) *pcert =
|
||||||
|
[RTC_OBJC_TYPE(RTCCertificate) generateCertificateWithParams:@{
|
||||||
|
@"expires" : @100000,
|
||||||
|
@"name" : @"RSASSA-PKCS1-v1_5"
|
||||||
|
}];
|
||||||
config.iceServers = _iceServers;
|
config.iceServers = _iceServers;
|
||||||
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
||||||
config.certificate = pcert;
|
config.certificate = pcert;
|
||||||
@ -580,7 +599,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[_peerConnection
|
[_peerConnection
|
||||||
offerForConstraints:[self defaultOfferConstraints]
|
offerForConstraints:[self defaultOfferConstraints]
|
||||||
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
|
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp,
|
||||||
|
NSError * error) {
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
[strongSelf peerConnection:strongSelf.peerConnection
|
[strongSelf peerConnection:strongSelf.peerConnection
|
||||||
didCreateSessionDescription:sdp
|
didCreateSessionDescription:sdp
|
||||||
@ -593,17 +613,22 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
#if defined(WEBRTC_IOS)
|
#if defined(WEBRTC_IOS)
|
||||||
// Start event log.
|
// Start event log.
|
||||||
if (kARDAppClientEnableRtcEventLog) {
|
if (kARDAppClientEnableRtcEventLog) {
|
||||||
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-rtceventlog"];
|
NSString *filePath =
|
||||||
if (![_peerConnection startRtcEventLogWithFilePath:filePath
|
[self documentsFilePathForFileName:@"webrtc-rtceventlog"];
|
||||||
maxSizeInBytes:kARDAppClientRtcEventLogMaxSizeInBytes]) {
|
if (![_peerConnection
|
||||||
|
startRtcEventLogWithFilePath:filePath
|
||||||
|
maxSizeInBytes:
|
||||||
|
kARDAppClientRtcEventLogMaxSizeInBytes]) {
|
||||||
RTCLogError(@"Failed to start event logging.");
|
RTCLogError(@"Failed to start event logging.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start aecdump diagnostic recording.
|
// Start aecdump diagnostic recording.
|
||||||
if ([_settings currentCreateAecDumpSettingFromStore]) {
|
if ([_settings currentCreateAecDumpSettingFromStore]) {
|
||||||
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
|
NSString *filePath =
|
||||||
if (![_factory startAecDumpWithFilePath:filePath
|
[self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
|
||||||
|
if (![_factory
|
||||||
|
startAecDumpWithFilePath:filePath
|
||||||
maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
|
maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
|
||||||
RTCLogError(@"Failed to start aec dump.");
|
RTCLogError(@"Failed to start aec dump.");
|
||||||
}
|
}
|
||||||
@ -635,7 +660,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
case kARDSignalingMessageTypeAnswer: {
|
case kARDSignalingMessageTypeAnswer: {
|
||||||
ARDSessionDescriptionMessage *sdpMessage =
|
ARDSessionDescriptionMessage *sdpMessage =
|
||||||
(ARDSessionDescriptionMessage *)message;
|
(ARDSessionDescriptionMessage *)message;
|
||||||
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
|
RTC_OBJC_TYPE(RTCSessionDescription) *description =
|
||||||
|
sdpMessage.sessionDescription;
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[_peerConnection setRemoteDescription:description
|
[_peerConnection setRemoteDescription:description
|
||||||
completionHandler:^(NSError *error) {
|
completionHandler:^(NSError *error) {
|
||||||
@ -653,7 +679,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
completionHandler:^(NSError *error) {
|
completionHandler:^(NSError *error) {
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
if (error) {
|
if (error) {
|
||||||
[strongSelf.delegate appClient:strongSelf didError:error];
|
[strongSelf.delegate appClient:strongSelf
|
||||||
|
didError:error];
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
break;
|
break;
|
||||||
@ -679,11 +706,11 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
- (void)sendSignalingMessage:(ARDSignalingMessage *)message {
|
- (void)sendSignalingMessage:(ARDSignalingMessage *)message {
|
||||||
if (_isInitiator) {
|
if (_isInitiator) {
|
||||||
__weak ARDAppClient *weakSelf = self;
|
__weak ARDAppClient *weakSelf = self;
|
||||||
[_roomServerClient sendMessage:message
|
[_roomServerClient
|
||||||
|
sendMessage:message
|
||||||
forRoomId:_roomId
|
forRoomId:_roomId
|
||||||
clientId:_clientId
|
clientId:_clientId
|
||||||
completionHandler:^(ARDMessageResponse *response,
|
completionHandler:^(ARDMessageResponse *response, NSError *error) {
|
||||||
NSError *error) {
|
|
||||||
ARDAppClient *strongSelf = weakSelf;
|
ARDAppClient *strongSelf = weakSelf;
|
||||||
if (error) {
|
if (error) {
|
||||||
[strongSelf.delegate appClient:strongSelf didError:error];
|
[strongSelf.delegate appClient:strongSelf didError:error];
|
||||||
@ -705,26 +732,30 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
|
for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
|
||||||
if (sender.track != nil) {
|
if (sender.track != nil) {
|
||||||
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
|
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
|
||||||
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
|
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore]
|
||||||
|
forVideoSender:sender];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
|
- (void)setMaxBitrate:(NSNumber *)maxBitrate
|
||||||
|
forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
|
||||||
if (maxBitrate.intValue <= 0) {
|
if (maxBitrate.intValue <= 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
|
RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
|
||||||
for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) {
|
for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) *
|
||||||
|
encoding in parametersToModify.encodings) {
|
||||||
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
|
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
|
||||||
}
|
}
|
||||||
[sender setParameters:parametersToModify];
|
[sender setParameters:parametersToModify];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
|
- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
|
||||||
for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) {
|
for (RTC_OBJC_TYPE(RTCRtpTransceiver) *
|
||||||
|
transceiver in _peerConnection.transceivers) {
|
||||||
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
|
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
|
||||||
return transceiver;
|
return transceiver;
|
||||||
}
|
}
|
||||||
@ -733,20 +764,24 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)createMediaSenders {
|
- (void)createMediaSenders {
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints];
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints];
|
[self defaultMediaAudioConstraints];
|
||||||
RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source
|
RTC_OBJC_TYPE(RTCAudioSource) *source =
|
||||||
trackId:kARDAudioTrackId];
|
[_factory audioSourceWithConstraints:constraints];
|
||||||
|
RTC_OBJC_TYPE(RTCAudioTrack) *track =
|
||||||
|
[_factory audioTrackWithSource:source trackId:kARDAudioTrackId];
|
||||||
[_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
|
[_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
|
||||||
_localVideoTrack = [self createLocalVideoTrack];
|
_localVideoTrack = [self createLocalVideoTrack];
|
||||||
if (_localVideoTrack) {
|
if (_localVideoTrack) {
|
||||||
[_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
|
[_peerConnection addTrack:_localVideoTrack
|
||||||
|
streamIds:@[ kARDMediaStreamId ]];
|
||||||
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
|
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
|
||||||
// We can set up rendering for the remote track right away since the transceiver already has an
|
// We can set up rendering for the remote track right away since the
|
||||||
// RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and
|
// transceiver already has an RTC_OBJC_TYPE(RTCRtpReceiver) with a track.
|
||||||
// produce frames once RTP is received.
|
// The track will automatically get unmuted and produce frames once RTP is
|
||||||
RTC_OBJC_TYPE(RTCVideoTrack) *track =
|
// received.
|
||||||
(RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track);
|
RTC_OBJC_TYPE(RTCVideoTrack) *track = (RTC_OBJC_TYPE(RTCVideoTrack) *)(
|
||||||
|
[self videoTransceiver].receiver.track);
|
||||||
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
|
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -789,8 +824,7 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
// Open WebSocket connection.
|
// Open WebSocket connection.
|
||||||
if (!_channel) {
|
if (!_channel) {
|
||||||
_channel =
|
_channel = [[ARDWebSocketChannel alloc] initWithURL:_websocketURL
|
||||||
[[ARDWebSocketChannel alloc] initWithURL:_websocketURL
|
|
||||||
restURL:_websocketRestURL
|
restURL:_websocketRestURL
|
||||||
delegate:self];
|
delegate:self];
|
||||||
if (_isLoopback) {
|
if (_isLoopback) {
|
||||||
@ -810,7 +844,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
|
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
|
||||||
NSDictionary *mandatoryConstraints = @{};
|
NSDictionary *mandatoryConstraints = @{};
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:mandatoryConstraints
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
return constraints;
|
return constraints;
|
||||||
}
|
}
|
||||||
@ -820,12 +855,11 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
|
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
|
||||||
NSDictionary *mandatoryConstraints = @{
|
NSDictionary *mandatoryConstraints =
|
||||||
@"OfferToReceiveAudio" : @"true",
|
@{@"OfferToReceiveAudio" : @"true", @"OfferToReceiveVideo" : @"true"};
|
||||||
@"OfferToReceiveVideo" : @"true"
|
|
||||||
};
|
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:mandatoryConstraints
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
return constraints;
|
return constraints;
|
||||||
}
|
}
|
||||||
@ -837,7 +871,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
NSString *value = _isLoopback ? @"false" : @"true";
|
NSString *value = _isLoopback ? @"false" : @"true";
|
||||||
NSDictionary *optionalConstraints = @{@"DtlsSrtpKeyAgreement" : value};
|
NSDictionary *optionalConstraints = @{@"DtlsSrtpKeyAgreement" : value};
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:optionalConstraints];
|
optionalConstraints:optionalConstraints];
|
||||||
return constraints;
|
return constraints;
|
||||||
}
|
}
|
||||||
@ -850,7 +885,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
case kARDJoinResultTypeSuccess:
|
case kARDJoinResultTypeSuccess:
|
||||||
break;
|
break;
|
||||||
case kARDJoinResultTypeUnknown: {
|
case kARDJoinResultTypeUnknown: {
|
||||||
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
error = [[NSError alloc]
|
||||||
|
initWithDomain:kARDAppClientErrorDomain
|
||||||
code:kARDAppClientErrorUnknown
|
code:kARDAppClientErrorUnknown
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Unknown error.",
|
NSLocalizedDescriptionKey : @"Unknown error.",
|
||||||
@ -858,7 +894,8 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case kARDJoinResultTypeFull: {
|
case kARDJoinResultTypeFull: {
|
||||||
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
error =
|
||||||
|
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
||||||
code:kARDAppClientErrorRoomFull
|
code:kARDAppClientErrorRoomFull
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Room is full.",
|
NSLocalizedDescriptionKey : @"Room is full.",
|
||||||
@ -875,21 +912,24 @@ static int const kKbpsMultiplier = 1000;
|
|||||||
case kARDMessageResultTypeSuccess:
|
case kARDMessageResultTypeSuccess:
|
||||||
break;
|
break;
|
||||||
case kARDMessageResultTypeUnknown:
|
case kARDMessageResultTypeUnknown:
|
||||||
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
error = [[NSError alloc]
|
||||||
|
initWithDomain:kARDAppClientErrorDomain
|
||||||
code:kARDAppClientErrorUnknown
|
code:kARDAppClientErrorUnknown
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Unknown error.",
|
NSLocalizedDescriptionKey : @"Unknown error.",
|
||||||
}];
|
}];
|
||||||
break;
|
break;
|
||||||
case kARDMessageResultTypeInvalidClient:
|
case kARDMessageResultTypeInvalidClient:
|
||||||
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
error = [[NSError alloc]
|
||||||
|
initWithDomain:kARDAppClientErrorDomain
|
||||||
code:kARDAppClientErrorInvalidClient
|
code:kARDAppClientErrorInvalidClient
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Invalid client.",
|
NSLocalizedDescriptionKey : @"Invalid client.",
|
||||||
}];
|
}];
|
||||||
break;
|
break;
|
||||||
case kARDMessageResultTypeInvalidRoom:
|
case kARDMessageResultTypeInvalidRoom:
|
||||||
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
error =
|
||||||
|
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
|
||||||
code:kARDAppClientErrorInvalidRoom
|
code:kARDAppClientErrorInvalidRoom
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Invalid room.",
|
NSLocalizedDescriptionKey : @"Invalid room.",
|
||||||
|
|||||||
@ -18,10 +18,8 @@
|
|||||||
#import "ARDUtilities.h"
|
#import "ARDUtilities.h"
|
||||||
|
|
||||||
// TODO(tkchin): move these to a configuration object.
|
// TODO(tkchin): move these to a configuration object.
|
||||||
static NSString * const kARDRoomServerHostUrl =
|
static NSString *const kARDRoomServerHostUrl = @"https://appr.tc";
|
||||||
@"https://appr.tc";
|
static NSString *const kARDRoomServerJoinFormat = @"https://appr.tc/join/%@";
|
||||||
static NSString * const kARDRoomServerJoinFormat =
|
|
||||||
@"https://appr.tc/join/%@";
|
|
||||||
static NSString *const kARDRoomServerJoinFormatLoopback =
|
static NSString *const kARDRoomServerJoinFormatLoopback =
|
||||||
@"https://appr.tc/join/%@?debug=loopback";
|
@"https://appr.tc/join/%@?debug=loopback";
|
||||||
static NSString *const kARDRoomServerMessageFormat =
|
static NSString *const kARDRoomServerMessageFormat =
|
||||||
@ -47,8 +45,7 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
urlString =
|
urlString =
|
||||||
[NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId];
|
[NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId];
|
||||||
} else {
|
} else {
|
||||||
urlString =
|
urlString = [NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
|
||||||
[NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
NSURL *roomURL = [NSURL URLWithString:urlString];
|
NSURL *roomURL = [NSURL URLWithString:urlString];
|
||||||
@ -57,14 +54,16 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
request.HTTPMethod = @"POST";
|
request.HTTPMethod = @"POST";
|
||||||
[NSURLConnection
|
[NSURLConnection
|
||||||
sendAsyncRequest:request
|
sendAsyncRequest:request
|
||||||
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
|
completionHandler:^(
|
||||||
|
NSURLResponse *response __unused, NSData *data, NSError *error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(nil, error);
|
completionHandler(nil, error);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ARDJoinResponse *joinResponse = [ARDJoinResponse responseFromJSONData:data];
|
ARDJoinResponse *joinResponse =
|
||||||
|
[ARDJoinResponse responseFromJSONData:data];
|
||||||
if (!joinResponse) {
|
if (!joinResponse) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
NSError *error = [[self class] badResponseError];
|
NSError *error = [[self class] badResponseError];
|
||||||
@ -89,8 +88,7 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
|
|
||||||
NSData *data = [message JSONData];
|
NSData *data = [message JSONData];
|
||||||
NSString *urlString =
|
NSString *urlString =
|
||||||
[NSString stringWithFormat:
|
[NSString stringWithFormat:kARDRoomServerMessageFormat, roomId, clientId];
|
||||||
kARDRoomServerMessageFormat, roomId, clientId];
|
|
||||||
NSURL *url = [NSURL URLWithString:urlString];
|
NSURL *url = [NSURL URLWithString:urlString];
|
||||||
RTCLog(@"C->RS POST: %@", message);
|
RTCLog(@"C->RS POST: %@", message);
|
||||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
||||||
@ -98,14 +96,16 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
request.HTTPBody = data;
|
request.HTTPBody = data;
|
||||||
[NSURLConnection
|
[NSURLConnection
|
||||||
sendAsyncRequest:request
|
sendAsyncRequest:request
|
||||||
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
|
completionHandler:^(
|
||||||
|
NSURLResponse *response __unused, NSData *data, NSError *error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(nil, error);
|
completionHandler(nil, error);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ARDMessageResponse *messageResponse = [ARDMessageResponse responseFromJSONData:data];
|
ARDMessageResponse *messageResponse =
|
||||||
|
[ARDMessageResponse responseFromJSONData:data];
|
||||||
if (!messageResponse) {
|
if (!messageResponse) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
NSError *error = [[self class] badResponseError];
|
NSError *error = [[self class] badResponseError];
|
||||||
@ -139,7 +139,8 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
dispatch_semaphore_t sem = dispatch_semaphore_create(0);
|
dispatch_semaphore_t sem = dispatch_semaphore_create(0);
|
||||||
[NSURLConnection
|
[NSURLConnection
|
||||||
sendAsyncRequest:request
|
sendAsyncRequest:request
|
||||||
completionHandler:^(NSURLResponse *response __unused, NSData *data __unused, NSError *e) {
|
completionHandler:^(
|
||||||
|
NSURLResponse *response __unused, NSData *data __unused, NSError *e) {
|
||||||
if (e) {
|
if (e) {
|
||||||
error = e;
|
error = e;
|
||||||
}
|
}
|
||||||
@ -148,7 +149,9 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
|
|
||||||
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER);
|
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER);
|
||||||
if (error) {
|
if (error) {
|
||||||
RTCLogError(@"Error leaving room %@ on room server: %@", roomId, error.localizedDescription);
|
RTCLogError(@"Error leaving room %@ on room server: %@",
|
||||||
|
roomId,
|
||||||
|
error.localizedDescription);
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(error);
|
completionHandler(error);
|
||||||
}
|
}
|
||||||
@ -163,8 +166,8 @@ static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
|
|||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
+ (NSError *)badResponseError {
|
+ (NSError *)badResponseError {
|
||||||
NSError *error =
|
NSError *error = [[NSError alloc]
|
||||||
[[NSError alloc] initWithDomain:kARDAppEngineClientErrorDomain
|
initWithDomain:kARDAppEngineClientErrorDomain
|
||||||
code:kARDAppEngineClientErrorBadResponse
|
code:kARDAppEngineClientErrorBadResponse
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Error parsing response.",
|
NSLocalizedDescriptionKey : @"Error parsing response.",
|
||||||
|
|||||||
@ -22,7 +22,8 @@ const Float64 kFramerateLimit = 30.0;
|
|||||||
BOOL _usingFrontCamera;
|
BOOL _usingFrontCamera;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
|
- (instancetype)initWithCapturer:
|
||||||
|
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
|
||||||
settings:(ARDSettingsModel *)settings {
|
settings:(ARDSettingsModel *)settings {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
@ -38,8 +39,9 @@ const Float64 kFramerateLimit = 30.0;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)startCapture:(void (^)(NSError *))completion {
|
- (void)startCapture:(void (^)(NSError *))completion {
|
||||||
AVCaptureDevicePosition position =
|
AVCaptureDevicePosition position = _usingFrontCamera ?
|
||||||
_usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
|
AVCaptureDevicePositionFront :
|
||||||
|
AVCaptureDevicePositionBack;
|
||||||
AVCaptureDevice *device = [self findDeviceForPosition:position];
|
AVCaptureDevice *device = [self findDeviceForPosition:position];
|
||||||
AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
|
AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
|
||||||
|
|
||||||
@ -52,7 +54,10 @@ const Float64 kFramerateLimit = 30.0;
|
|||||||
|
|
||||||
NSInteger fps = [self selectFpsForFormat:format];
|
NSInteger fps = [self selectFpsForFormat:format];
|
||||||
|
|
||||||
[_capturer startCaptureWithDevice:device format:format fps:fps completionHandler:completion];
|
[_capturer startCaptureWithDevice:device
|
||||||
|
format:format
|
||||||
|
fps:fps
|
||||||
|
completionHandler:completion];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stopCapture {
|
- (void)stopCapture {
|
||||||
@ -91,13 +96,17 @@ const Float64 kFramerateLimit = 30.0;
|
|||||||
int currentDiff = INT_MAX;
|
int currentDiff = INT_MAX;
|
||||||
|
|
||||||
for (AVCaptureDeviceFormat *format in formats) {
|
for (AVCaptureDeviceFormat *format in formats) {
|
||||||
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
CMVideoDimensions dimension =
|
||||||
FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||||
int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
|
FourCharCode pixelFormat =
|
||||||
|
CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
||||||
|
int diff = abs(targetWidth - dimension.width) +
|
||||||
|
abs(targetHeight - dimension.height);
|
||||||
if (diff < currentDiff) {
|
if (diff < currentDiff) {
|
||||||
selectedFormat = format;
|
selectedFormat = format;
|
||||||
currentDiff = diff;
|
currentDiff = diff;
|
||||||
} else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) {
|
} else if (diff == currentDiff &&
|
||||||
|
pixelFormat == [_capturer preferredOutputPixelFormat]) {
|
||||||
selectedFormat = format;
|
selectedFormat = format;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,14 +21,16 @@
|
|||||||
|
|
||||||
@implementation ARDExternalSampleCapturer
|
@implementation ARDExternalSampleCapturer
|
||||||
|
|
||||||
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
- (instancetype)initWithDelegate:
|
||||||
|
(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
||||||
return [super initWithDelegate:delegate];
|
return [super initWithDelegate:delegate];
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - ARDExternalSampleDelegate
|
#pragma mark - ARDExternalSampleDelegate
|
||||||
|
|
||||||
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
||||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
|
||||||
|
!CMSampleBufferIsValid(sampleBuffer) ||
|
||||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -41,7 +43,8 @@
|
|||||||
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
||||||
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
|
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
|
||||||
int64_t timeStampNs =
|
int64_t timeStampNs =
|
||||||
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
|
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
|
||||||
|
NSEC_PER_SEC;
|
||||||
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
|
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
|
||||||
rotation:RTCVideoRotation_0
|
rotation:RTCVideoRotation_0
|
||||||
|
|||||||
@ -27,18 +27,20 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
- (NSArray<NSString *> *)availableVideoResolutions {
|
- (NSArray<NSString *> *)availableVideoResolutions {
|
||||||
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
|
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
|
||||||
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
|
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
|
||||||
for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
|
for (AVCaptureDevice *device in
|
||||||
for (AVCaptureDeviceFormat *format in
|
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
|
||||||
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) {
|
for (AVCaptureDeviceFormat *format in [RTC_OBJC_TYPE(RTCCameraVideoCapturer)
|
||||||
|
supportedFormatsForDevice:device]) {
|
||||||
CMVideoDimensions resolution =
|
CMVideoDimensions resolution =
|
||||||
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||||
NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
|
NSArray<NSNumber *> *resolutionObject =
|
||||||
|
@[ @(resolution.width), @(resolution.height) ];
|
||||||
[resolutions addObject:resolutionObject];
|
[resolutions addObject:resolutionObject];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
NSArray<NSArray<NSNumber *> *> *sortedResolutions =
|
NSArray<NSArray<NSNumber *> *> *sortedResolutions = [[resolutions allObjects]
|
||||||
[[resolutions allObjects] sortedArrayUsingComparator:^NSComparisonResult(
|
sortedArrayUsingComparator:^NSComparisonResult(
|
||||||
NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
|
NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
|
||||||
NSComparisonResult cmp = [obj1.firstObject compare:obj2.firstObject];
|
NSComparisonResult cmp = [obj1.firstObject compare:obj2.firstObject];
|
||||||
if (cmp != NSOrderedSame) {
|
if (cmp != NSOrderedSame) {
|
||||||
@ -47,10 +49,13 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
return [obj1.lastObject compare:obj2.lastObject];
|
return [obj1.lastObject compare:obj2.lastObject];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
NSMutableArray<NSString *> *resolutionStrings = [[NSMutableArray<NSString *> alloc] init];
|
NSMutableArray<NSString *> *resolutionStrings =
|
||||||
|
[[NSMutableArray<NSString *> alloc] init];
|
||||||
for (NSArray<NSNumber *> *resolution in sortedResolutions) {
|
for (NSArray<NSNumber *> *resolution in sortedResolutions) {
|
||||||
NSString *resolutionString =
|
NSString *resolutionString =
|
||||||
[NSString stringWithFormat:@"%@x%@", resolution.firstObject, resolution.lastObject];
|
[NSString stringWithFormat:@"%@x%@",
|
||||||
|
resolution.firstObject,
|
||||||
|
resolution.lastObject];
|
||||||
[resolutionStrings addObject:resolutionString];
|
[resolutionStrings addObject:resolutionString];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +86,9 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
Class expectedClass = [RTC_OBJC_TYPE(RTCVideoCodecInfo) class];
|
Class expectedClass = [RTC_OBJC_TYPE(RTCVideoCodecInfo) class];
|
||||||
NSError *error;
|
NSError *error;
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodecSetting =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodecSetting =
|
||||||
[NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass fromData:codecData error:&error];
|
[NSKeyedUnarchiver unarchivedObjectOfClass:expectedClass
|
||||||
|
fromData:codecData
|
||||||
|
error:&error];
|
||||||
if (!error) {
|
if (!error) {
|
||||||
return videoCodecSetting;
|
return videoCodecSetting;
|
||||||
}
|
}
|
||||||
@ -176,11 +183,13 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
return [self availableVideoCodecs].firstObject;
|
return [self availableVideoCodecs].firstObject;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolution {
|
- (int)videoResolutionComponentAtIndex:(int)index
|
||||||
|
inString:(NSString *)resolution {
|
||||||
if (index != 0 && index != 1) {
|
if (index != 0 && index != 1) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
NSArray<NSString *> *components = [resolution componentsSeparatedByString:@"x"];
|
NSArray<NSString *> *components =
|
||||||
|
[resolution componentsSeparatedByString:@"x"];
|
||||||
if (components.count != 2) {
|
if (components.count != 2) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -190,17 +199,20 @@ NS_ASSUME_NONNULL_BEGIN
|
|||||||
- (void)registerStoreDefaults {
|
- (void)registerStoreDefaults {
|
||||||
#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
|
#if defined(WEBRTC_IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_13
|
||||||
NSError *error;
|
NSError *error;
|
||||||
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]
|
NSData *codecData = [NSKeyedArchiver
|
||||||
|
archivedDataWithRootObject:[self defaultVideoCodecSetting]
|
||||||
requiringSecureCoding:NO
|
requiringSecureCoding:NO
|
||||||
error:&error];
|
error:&error];
|
||||||
if (error) {
|
if (error) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
NSData *codecData = [NSKeyedArchiver archivedDataWithRootObject:[self defaultVideoCodecSetting]];
|
NSData *codecData = [NSKeyedArchiver
|
||||||
|
archivedDataWithRootObject:[self defaultVideoCodecSetting]];
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
[ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
|
[ARDSettingsStore
|
||||||
|
setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
|
||||||
videoCodec:codecData
|
videoCodec:codecData
|
||||||
bitrate:nil
|
bitrate:nil
|
||||||
audioOnly:NO
|
audioOnly:NO
|
||||||
|
|||||||
@ -15,7 +15,8 @@ static NSString *const kVideoCodecKey = @"rtc_video_codec_info_key";
|
|||||||
static NSString *const kBitrateKey = @"rtc_max_bitrate_key";
|
static NSString *const kBitrateKey = @"rtc_max_bitrate_key";
|
||||||
static NSString *const kAudioOnlyKey = @"rtc_audio_only_key";
|
static NSString *const kAudioOnlyKey = @"rtc_audio_only_key";
|
||||||
static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key";
|
static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key";
|
||||||
static NSString *const kUseManualAudioConfigKey = @"rtc_use_manual_audio_config_key";
|
static NSString *const kUseManualAudioConfigKey =
|
||||||
|
@"rtc_use_manual_audio_config_key";
|
||||||
|
|
||||||
NS_ASSUME_NONNULL_BEGIN
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
@interface ARDSettingsStore () {
|
@interface ARDSettingsStore () {
|
||||||
|
|||||||
@ -58,7 +58,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
} else if ([typeString isEqualToString:@"offer"] ||
|
} else if ([typeString isEqualToString:@"offer"] ||
|
||||||
[typeString isEqualToString:@"answer"]) {
|
[typeString isEqualToString:@"answer"]) {
|
||||||
RTC_OBJC_TYPE(RTCSessionDescription) *description =
|
RTC_OBJC_TYPE(RTCSessionDescription) *description =
|
||||||
[RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values];
|
[RTC_OBJC_TYPE(RTCSessionDescription)
|
||||||
|
descriptionFromJSONDictionary:values];
|
||||||
message =
|
message =
|
||||||
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
|
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
|
||||||
} else if ([typeString isEqualToString:@"bye"]) {
|
} else if ([typeString isEqualToString:@"bye"]) {
|
||||||
@ -97,7 +98,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
|
|
||||||
@synthesize candidates = _candidates;
|
@synthesize candidates = _candidates;
|
||||||
|
|
||||||
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
- (instancetype)initWithRemovedCandidates:
|
||||||
|
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
|
||||||
NSParameterAssert(candidates.count);
|
NSParameterAssert(candidates.count);
|
||||||
self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval];
|
self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval];
|
||||||
if (self) {
|
if (self) {
|
||||||
@ -107,7 +109,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)JSONData {
|
- (NSData *)JSONData {
|
||||||
return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates
|
return [RTC_OBJC_TYPE(RTCIceCandidate)
|
||||||
|
JSONDataForIceCandidates:_candidates
|
||||||
withType:kARDTypeValueRemoveCandidates];
|
withType:kARDTypeValueRemoveCandidates];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,7 +120,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
|
|
||||||
@synthesize sessionDescription = _sessionDescription;
|
@synthesize sessionDescription = _sessionDescription;
|
||||||
|
|
||||||
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
|
- (instancetype)initWithDescription:
|
||||||
|
(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
|
||||||
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
|
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
|
||||||
RTCSdpType sdpType = description.type;
|
RTCSdpType sdpType = description.type;
|
||||||
switch (sdpType) {
|
switch (sdpType) {
|
||||||
@ -129,8 +133,9 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
break;
|
break;
|
||||||
case RTCSdpTypePrAnswer:
|
case RTCSdpTypePrAnswer:
|
||||||
case RTCSdpTypeRollback:
|
case RTCSdpTypeRollback:
|
||||||
NSAssert(
|
NSAssert(NO,
|
||||||
NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
|
@"Unexpected type: %@",
|
||||||
|
[RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
self = [super initWithType:messageType];
|
self = [super initWithType:messageType];
|
||||||
@ -153,9 +158,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)JSONData {
|
- (NSData *)JSONData {
|
||||||
NSDictionary *message = @{
|
NSDictionary *message = @{@"type" : @"bye"};
|
||||||
@"type": @"bye"
|
|
||||||
};
|
|
||||||
return [NSJSONSerialization dataWithJSONObject:message
|
return [NSJSONSerialization dataWithJSONObject:message
|
||||||
options:NSJSONWritingPrettyPrinted
|
options:NSJSONWritingPrettyPrinted
|
||||||
error:NULL];
|
error:NULL];
|
||||||
|
|||||||
@ -33,4 +33,3 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|||||||
@ -33,11 +33,11 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
|
|||||||
|
|
||||||
- (void)requestServersWithCompletionHandler:
|
- (void)requestServersWithCompletionHandler:
|
||||||
(void (^)(NSArray *turnServers, NSError *error))completionHandler {
|
(void (^)(NSArray *turnServers, NSError *error))completionHandler {
|
||||||
|
|
||||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url];
|
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url];
|
||||||
[NSURLConnection
|
[NSURLConnection
|
||||||
sendAsyncRequest:request
|
sendAsyncRequest:request
|
||||||
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
|
completionHandler:^(
|
||||||
|
NSURLResponse *response __unused, NSData *data, NSError *error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
completionHandler(nil, error);
|
completionHandler(nil, error);
|
||||||
return;
|
return;
|
||||||
@ -54,26 +54,32 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
|
|||||||
- (void)makeTurnServerRequestToURL:(NSURL *)url
|
- (void)makeTurnServerRequestToURL:(NSURL *)url
|
||||||
WithCompletionHandler:(void (^)(NSArray *turnServers,
|
WithCompletionHandler:(void (^)(NSArray *turnServers,
|
||||||
NSError *error))completionHandler {
|
NSError *error))completionHandler {
|
||||||
NSMutableURLRequest *iceServerRequest = [NSMutableURLRequest requestWithURL:url];
|
NSMutableURLRequest *iceServerRequest =
|
||||||
|
[NSMutableURLRequest requestWithURL:url];
|
||||||
iceServerRequest.HTTPMethod = @"POST";
|
iceServerRequest.HTTPMethod = @"POST";
|
||||||
[iceServerRequest addValue:kTURNRefererURLString forHTTPHeaderField:@"referer"];
|
[iceServerRequest addValue:kTURNRefererURLString
|
||||||
|
forHTTPHeaderField:@"referer"];
|
||||||
[NSURLConnection
|
[NSURLConnection
|
||||||
sendAsyncRequest:iceServerRequest
|
sendAsyncRequest:iceServerRequest
|
||||||
completionHandler:^(NSURLResponse *response __unused, NSData *data, NSError *error) {
|
completionHandler:^(
|
||||||
|
NSURLResponse *response __unused, NSData *data, NSError *error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
completionHandler(nil, error);
|
completionHandler(nil, error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
|
NSDictionary *turnResponseDict =
|
||||||
|
[NSDictionary dictionaryWithJSONData:data];
|
||||||
NSMutableArray *turnServers = [NSMutableArray array];
|
NSMutableArray *turnServers = [NSMutableArray array];
|
||||||
[turnResponseDict[@"iceServers"]
|
[turnResponseDict[@"iceServers"]
|
||||||
enumerateObjectsUsingBlock:^(
|
enumerateObjectsUsingBlock:^(NSDictionary *obj,
|
||||||
NSDictionary *obj, NSUInteger idx __unused, BOOL *stop __unused) {
|
NSUInteger idx __unused,
|
||||||
[turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]];
|
BOOL *stop __unused) {
|
||||||
|
[turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer)
|
||||||
|
serverFromJSONDictionary:obj]];
|
||||||
}];
|
}];
|
||||||
if (!turnServers) {
|
if (!turnServers) {
|
||||||
NSError *responseError =
|
NSError *responseError = [[NSError alloc]
|
||||||
[[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
|
initWithDomain:kARDTURNClientErrorDomain
|
||||||
code:kARDTURNClientErrorBadResponse
|
code:kARDTURNClientErrorBadResponse
|
||||||
userInfo:@{
|
userInfo:@{
|
||||||
NSLocalizedDescriptionKey : @"Bad TURN response.",
|
NSLocalizedDescriptionKey : @"Bad TURN response.",
|
||||||
|
|||||||
@ -63,8 +63,7 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
[_delegate channel:self didChangeState:_state];
|
[_delegate channel:self didChangeState:_state];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)registerForRoomId:(NSString *)roomId
|
- (void)registerForRoomId:(NSString *)roomId clientId:(NSString *)clientId {
|
||||||
clientId:(NSString *)clientId {
|
|
||||||
NSParameterAssert(roomId.length);
|
NSParameterAssert(roomId.length);
|
||||||
NSParameterAssert(clientId.length);
|
NSParameterAssert(clientId.length);
|
||||||
_roomId = roomId;
|
_roomId = roomId;
|
||||||
@ -79,8 +78,8 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
NSParameterAssert(_roomId.length);
|
NSParameterAssert(_roomId.length);
|
||||||
NSData *data = [message JSONData];
|
NSData *data = [message JSONData];
|
||||||
if (_state == kARDSignalingChannelStateRegistered) {
|
if (_state == kARDSignalingChannelStateRegistered) {
|
||||||
NSString *payload =
|
NSString *payload = [[NSString alloc] initWithData:data
|
||||||
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
|
encoding:NSUTF8StringEncoding];
|
||||||
NSDictionary *message = @{
|
NSDictionary *message = @{
|
||||||
@"cmd" : @"send",
|
@"cmd" : @"send",
|
||||||
@"msg" : payload,
|
@"msg" : payload,
|
||||||
@ -95,12 +94,13 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
RTCLog(@"C->WSS: %@", messageString);
|
RTCLog(@"C->WSS: %@", messageString);
|
||||||
[_socket send:messageString];
|
[_socket send:messageString];
|
||||||
} else {
|
} else {
|
||||||
NSString *dataString =
|
NSString *dataString = [[NSString alloc] initWithData:data
|
||||||
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
|
encoding:NSUTF8StringEncoding];
|
||||||
RTCLog(@"C->WSS POST: %@", dataString);
|
RTCLog(@"C->WSS POST: %@", dataString);
|
||||||
NSString *urlString =
|
NSString *urlString = [NSString stringWithFormat:@"%@/%@/%@",
|
||||||
[NSString stringWithFormat:@"%@/%@/%@",
|
[_restURL absoluteString],
|
||||||
[_restURL absoluteString], _roomId, _clientId];
|
_roomId,
|
||||||
|
_clientId];
|
||||||
NSURL *url = [NSURL URLWithString:urlString];
|
NSURL *url = [NSURL URLWithString:urlString];
|
||||||
[NSURLConnection sendAsyncPostToURL:url
|
[NSURLConnection sendAsyncPostToURL:url
|
||||||
withData:data
|
withData:data
|
||||||
@ -115,9 +115,10 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
}
|
}
|
||||||
[_socket close];
|
[_socket close];
|
||||||
RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId);
|
RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId);
|
||||||
NSString *urlString =
|
NSString *urlString = [NSString stringWithFormat:@"%@/%@/%@",
|
||||||
[NSString stringWithFormat:@"%@/%@/%@",
|
[_restURL absoluteString],
|
||||||
[_restURL absoluteString], _roomId, _clientId];
|
_roomId,
|
||||||
|
_clientId];
|
||||||
NSURL *url = [NSURL URLWithString:urlString];
|
NSURL *url = [NSURL URLWithString:urlString];
|
||||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
||||||
request.HTTPMethod = @"DELETE";
|
request.HTTPMethod = @"DELETE";
|
||||||
@ -168,7 +169,9 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
reason:(NSString *)reason
|
reason:(NSString *)reason
|
||||||
wasClean:(BOOL)wasClean {
|
wasClean:(BOOL)wasClean {
|
||||||
RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d",
|
RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d",
|
||||||
(long)code, reason, wasClean);
|
(long)code,
|
||||||
|
reason,
|
||||||
|
wasClean);
|
||||||
NSParameterAssert(_state != kARDSignalingChannelStateError);
|
NSParameterAssert(_state != kARDSignalingChannelStateError);
|
||||||
self.state = kARDSignalingChannelStateClosed;
|
self.state = kARDSignalingChannelStateClosed;
|
||||||
}
|
}
|
||||||
@ -219,14 +222,16 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
// Change message to answer, send back to server.
|
// Change message to answer, send back to server.
|
||||||
ARDSessionDescriptionMessage *sdpMessage =
|
ARDSessionDescriptionMessage *sdpMessage =
|
||||||
(ARDSessionDescriptionMessage *)message;
|
(ARDSessionDescriptionMessage *)message;
|
||||||
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
|
RTC_OBJC_TYPE(RTCSessionDescription) *description =
|
||||||
|
sdpMessage.sessionDescription;
|
||||||
NSString *dsc = description.sdp;
|
NSString *dsc = description.sdp;
|
||||||
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
|
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
|
||||||
withString:@"answer"];
|
withString:@"answer"];
|
||||||
RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
|
RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
|
||||||
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
|
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
|
||||||
ARDSignalingMessage *answer =
|
initWithType:RTCSdpTypeAnswer
|
||||||
[[ARDSessionDescriptionMessage alloc]
|
sdp:dsc];
|
||||||
|
ARDSignalingMessage *answer = [[ARDSessionDescriptionMessage alloc]
|
||||||
initWithDescription:answerDescription];
|
initWithDescription:answerDescription];
|
||||||
[self sendMessage:answer];
|
[self sendMessage:answer];
|
||||||
break;
|
break;
|
||||||
@ -250,4 +255,3 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
|
|||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|||||||
@ -22,7 +22,8 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
|
|||||||
@implementation RTC_OBJC_TYPE (RTCIceCandidate)
|
@implementation RTC_OBJC_TYPE (RTCIceCandidate)
|
||||||
(JSON)
|
(JSON)
|
||||||
|
|
||||||
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary {
|
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary
|
||||||
|
: (NSDictionary *)dictionary {
|
||||||
NSString *mid = dictionary[kRTCICECandidateMidKey];
|
NSString *mid = dictionary[kRTCICECandidateMidKey];
|
||||||
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
|
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
|
||||||
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
|
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
|
||||||
@ -32,7 +33,8 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
|
|||||||
sdpMid:mid];
|
sdpMid:mid];
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
|
+ (NSData *)JSONDataForIceCandidates:
|
||||||
|
(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
|
||||||
withType:(NSString *)typeValue {
|
withType:(NSString *)typeValue {
|
||||||
NSMutableArray *jsonCandidates =
|
NSMutableArray *jsonCandidates =
|
||||||
[NSMutableArray arrayWithCapacity:candidates.count];
|
[NSMutableArray arrayWithCapacity:candidates.count];
|
||||||
@ -62,8 +64,8 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
|
|||||||
NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
|
NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
|
||||||
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
|
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
|
||||||
for (NSDictionary *jsonCandidate in jsonCandidates) {
|
for (NSDictionary *jsonCandidate in jsonCandidates) {
|
||||||
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
|
RTC_OBJC_TYPE(RTCIceCandidate) *candidate = [RTC_OBJC_TYPE(RTCIceCandidate)
|
||||||
[RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate];
|
candidateFromJSONDictionary:jsonCandidate];
|
||||||
[candidates addObject:candidate];
|
[candidates addObject:candidate];
|
||||||
}
|
}
|
||||||
return candidates;
|
return candidates;
|
||||||
|
|||||||
@ -13,7 +13,8 @@
|
|||||||
@implementation RTC_OBJC_TYPE (RTCIceServer)
|
@implementation RTC_OBJC_TYPE (RTCIceServer)
|
||||||
(JSON)
|
(JSON)
|
||||||
|
|
||||||
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary {
|
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary
|
||||||
|
: (NSDictionary *)dictionary {
|
||||||
NSArray *turnUrls = dictionary[@"urls"];
|
NSArray *turnUrls = dictionary[@"urls"];
|
||||||
NSString *username = dictionary[@"username"] ?: @"";
|
NSString *username = dictionary[@"username"] ?: @"";
|
||||||
NSString *credential = dictionary[@"credential"] ?: @"";
|
NSString *credential = dictionary[@"credential"] ?: @"";
|
||||||
|
|||||||
@ -21,7 +21,8 @@ static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
|
|||||||
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
|
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
|
||||||
RTCSdpType type = [[self class] typeForString:typeString];
|
RTCSdpType type = [[self class] typeForString:typeString];
|
||||||
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
|
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
|
||||||
return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp];
|
return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type
|
||||||
|
sdp:sdp];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSData *)JSONData {
|
- (NSData *)JSONData {
|
||||||
|
|||||||
@ -20,8 +20,9 @@
|
|||||||
NSParameterAssert(jsonString.length > 0);
|
NSParameterAssert(jsonString.length > 0);
|
||||||
NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding];
|
NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding];
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
NSDictionary *dict =
|
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:data
|
||||||
[NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
|
options:0
|
||||||
|
error:&error];
|
||||||
if (error) {
|
if (error) {
|
||||||
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
|
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
|
||||||
}
|
}
|
||||||
@ -30,8 +31,9 @@
|
|||||||
|
|
||||||
+ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData {
|
+ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData {
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
NSDictionary *dict =
|
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:jsonData
|
||||||
[NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&error];
|
options:0
|
||||||
|
error:&error];
|
||||||
if (error) {
|
if (error) {
|
||||||
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
|
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
|
||||||
}
|
}
|
||||||
@ -49,7 +51,8 @@
|
|||||||
// Kick off an async request which will call back on main thread.
|
// Kick off an async request which will call back on main thread.
|
||||||
NSURLSession *session = [NSURLSession sharedSession];
|
NSURLSession *session = [NSURLSession sharedSession];
|
||||||
[[session dataTaskWithRequest:request
|
[[session dataTaskWithRequest:request
|
||||||
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
|
completionHandler:^(
|
||||||
|
NSData *data, NSURLResponse *response, NSError *error) {
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(response, data, error);
|
completionHandler(response, data, error);
|
||||||
}
|
}
|
||||||
@ -59,15 +62,15 @@
|
|||||||
// Posts data to the specified URL.
|
// Posts data to the specified URL.
|
||||||
+ (void)sendAsyncPostToURL:(NSURL *)url
|
+ (void)sendAsyncPostToURL:(NSURL *)url
|
||||||
withData:(NSData *)data
|
withData:(NSData *)data
|
||||||
completionHandler:(void (^)(BOOL succeeded,
|
completionHandler:
|
||||||
NSData *data))completionHandler {
|
(void (^)(BOOL succeeded, NSData *data))completionHandler {
|
||||||
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
|
||||||
request.HTTPMethod = @"POST";
|
request.HTTPMethod = @"POST";
|
||||||
request.HTTPBody = data;
|
request.HTTPBody = data;
|
||||||
[[self class] sendAsyncRequest:request
|
[[self class]
|
||||||
completionHandler:^(NSURLResponse *response,
|
sendAsyncRequest:request
|
||||||
NSData *data,
|
completionHandler:^(
|
||||||
NSError *error) {
|
NSURLResponse *response, NSData *data, NSError *error) {
|
||||||
if (error) {
|
if (error) {
|
||||||
RTCLogError(@"Error posting data: %@", error.localizedDescription);
|
RTCLogError(@"Error posting data: %@", error.localizedDescription);
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
@ -78,7 +81,8 @@
|
|||||||
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
|
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
|
||||||
if (httpResponse.statusCode != 200) {
|
if (httpResponse.statusCode != 200) {
|
||||||
NSString *serverResponse = data.length > 0 ?
|
NSString *serverResponse = data.length > 0 ?
|
||||||
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] :
|
[[NSString alloc] initWithData:data
|
||||||
|
encoding:NSUTF8StringEncoding] :
|
||||||
nil;
|
nil;
|
||||||
RTCLogError(@"Received bad response: %@", serverResponse);
|
RTCLogError(@"Received bad response: %@", serverResponse);
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
@ -120,7 +124,7 @@ NSInteger ARDGetCpuUsagePercentage(void) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Dealloc the created array.
|
// Dealloc the created array.
|
||||||
vm_deallocate(task, (vm_address_t)thread_array,
|
vm_deallocate(
|
||||||
sizeof(thread_act_t) * thread_count);
|
task, (vm_address_t)thread_array, sizeof(thread_act_t) * thread_count);
|
||||||
return lroundf(cpu_usage_percentage);
|
return lroundf(cpu_usage_percentage);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -33,8 +33,8 @@
|
|||||||
[_window makeKeyAndVisible];
|
[_window makeKeyAndVisible];
|
||||||
ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
|
ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
|
||||||
|
|
||||||
UINavigationController *root =
|
UINavigationController *root = [[UINavigationController alloc]
|
||||||
[[UINavigationController alloc] initWithRootViewController:viewController];
|
initWithRootViewController:viewController];
|
||||||
root.navigationBar.translucent = NO;
|
root.navigationBar.translucent = NO;
|
||||||
_window.rootViewController = root;
|
_window.rootViewController = root;
|
||||||
|
|
||||||
|
|||||||
@ -21,7 +21,8 @@
|
|||||||
@implementation ARDFileCaptureController
|
@implementation ARDFileCaptureController
|
||||||
@synthesize fileCapturer = _fileCapturer;
|
@synthesize fileCapturer = _fileCapturer;
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
|
- (instancetype)initWithCapturer:
|
||||||
|
(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
_fileCapturer = capturer;
|
_fileCapturer = capturer;
|
||||||
|
|||||||
@ -48,7 +48,9 @@ static CGFloat const kCallControlMargin = 8;
|
|||||||
|
|
||||||
- (void)layoutSubviews {
|
- (void)layoutSubviews {
|
||||||
_roomText.frame =
|
_roomText.frame =
|
||||||
CGRectMake(kRoomTextFieldMargin, 0, CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
|
CGRectMake(kRoomTextFieldMargin,
|
||||||
|
0,
|
||||||
|
CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
|
||||||
kRoomTextFieldHeight);
|
kRoomTextFieldHeight);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,10 +95,15 @@ static CGFloat const kCallControlMargin = 8;
|
|||||||
|
|
||||||
_startRegularCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
_startRegularCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
||||||
_startRegularCallButton.titleLabel.font = controlFont;
|
_startRegularCallButton.titleLabel.font = controlFont;
|
||||||
[_startRegularCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
|
[_startRegularCallButton setTitleColor:controlFontColor
|
||||||
_startRegularCallButton.backgroundColor
|
forState:UIControlStateNormal];
|
||||||
= [UIColor colorWithRed:66.0/255.0 green:200.0/255.0 blue:90.0/255.0 alpha:1.0];
|
_startRegularCallButton.backgroundColor =
|
||||||
[_startRegularCallButton setTitle:@"Call room" forState:UIControlStateNormal];
|
[UIColor colorWithRed:66.0 / 255.0
|
||||||
|
green:200.0 / 255.0
|
||||||
|
blue:90.0 / 255.0
|
||||||
|
alpha:1.0];
|
||||||
|
[_startRegularCallButton setTitle:@"Call room"
|
||||||
|
forState:UIControlStateNormal];
|
||||||
[_startRegularCallButton addTarget:self
|
[_startRegularCallButton addTarget:self
|
||||||
action:@selector(onStartRegularCall:)
|
action:@selector(onStartRegularCall:)
|
||||||
forControlEvents:UIControlEventTouchUpInside];
|
forControlEvents:UIControlEventTouchUpInside];
|
||||||
@ -104,22 +111,26 @@ static CGFloat const kCallControlMargin = 8;
|
|||||||
|
|
||||||
_startLoopbackCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
_startLoopbackCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
||||||
_startLoopbackCallButton.titleLabel.font = controlFont;
|
_startLoopbackCallButton.titleLabel.font = controlFont;
|
||||||
[_startLoopbackCallButton setTitleColor:controlFontColor forState:UIControlStateNormal];
|
[_startLoopbackCallButton setTitleColor:controlFontColor
|
||||||
|
forState:UIControlStateNormal];
|
||||||
_startLoopbackCallButton.backgroundColor =
|
_startLoopbackCallButton.backgroundColor =
|
||||||
[UIColor colorWithRed:0.0 green:122.0 / 255.0 blue:1.0 alpha:1.0];
|
[UIColor colorWithRed:0.0 green:122.0 / 255.0 blue:1.0 alpha:1.0];
|
||||||
[_startLoopbackCallButton setTitle:@"Loopback call" forState:UIControlStateNormal];
|
[_startLoopbackCallButton setTitle:@"Loopback call"
|
||||||
|
forState:UIControlStateNormal];
|
||||||
[_startLoopbackCallButton addTarget:self
|
[_startLoopbackCallButton addTarget:self
|
||||||
action:@selector(onStartLoopbackCall:)
|
action:@selector(onStartLoopbackCall:)
|
||||||
forControlEvents:UIControlEventTouchUpInside];
|
forControlEvents:UIControlEventTouchUpInside];
|
||||||
[self addSubview:_startLoopbackCallButton];
|
[self addSubview:_startLoopbackCallButton];
|
||||||
|
|
||||||
|
|
||||||
// Used to test what happens to sounds when calls are in progress.
|
// Used to test what happens to sounds when calls are in progress.
|
||||||
_audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
_audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem];
|
||||||
_audioLoopButton.titleLabel.font = controlFont;
|
_audioLoopButton.titleLabel.font = controlFont;
|
||||||
[_audioLoopButton setTitleColor:controlFontColor forState:UIControlStateNormal];
|
[_audioLoopButton setTitleColor:controlFontColor
|
||||||
_audioLoopButton.backgroundColor =
|
forState:UIControlStateNormal];
|
||||||
[UIColor colorWithRed:1.0 green:149.0/255.0 blue:0.0 alpha:1.0];
|
_audioLoopButton.backgroundColor = [UIColor colorWithRed:1.0
|
||||||
|
green:149.0 / 255.0
|
||||||
|
blue:0.0
|
||||||
|
alpha:1.0];
|
||||||
[self updateAudioLoopButton];
|
[self updateAudioLoopButton];
|
||||||
[_audioLoopButton addTarget:self
|
[_audioLoopButton addTarget:self
|
||||||
action:@selector(onToggleAudioLoop:)
|
action:@selector(onToggleAudioLoop:)
|
||||||
@ -143,21 +154,28 @@ static CGFloat const kCallControlMargin = 8;
|
|||||||
CGRect bounds = self.bounds;
|
CGRect bounds = self.bounds;
|
||||||
CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin;
|
CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin;
|
||||||
CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height;
|
CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height;
|
||||||
_roomText.frame =
|
_roomText.frame = CGRectMake(kRoomTextFieldMargin,
|
||||||
CGRectMake(kRoomTextFieldMargin, kRoomTextFieldMargin, roomTextWidth,
|
kRoomTextFieldMargin,
|
||||||
|
roomTextWidth,
|
||||||
roomTextHeight);
|
roomTextHeight);
|
||||||
|
|
||||||
CGFloat buttonHeight =
|
CGFloat buttonHeight =
|
||||||
(CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) - kCallControlMargin * 4) / 3;
|
(CGRectGetMaxY(self.bounds) - CGRectGetMaxY(_roomText.frame) -
|
||||||
|
kCallControlMargin * 4) /
|
||||||
|
3;
|
||||||
|
|
||||||
CGFloat regularCallFrameTop = CGRectGetMaxY(_roomText.frame) + kCallControlMargin;
|
CGFloat regularCallFrameTop =
|
||||||
CGRect regularCallFrame = CGRectMake(kCallControlMargin,
|
CGRectGetMaxY(_roomText.frame) + kCallControlMargin;
|
||||||
|
CGRect regularCallFrame =
|
||||||
|
CGRectMake(kCallControlMargin,
|
||||||
regularCallFrameTop,
|
regularCallFrameTop,
|
||||||
bounds.size.width - 2 * kCallControlMargin,
|
bounds.size.width - 2 * kCallControlMargin,
|
||||||
buttonHeight);
|
buttonHeight);
|
||||||
|
|
||||||
CGFloat loopbackCallFrameTop = CGRectGetMaxY(regularCallFrame) + kCallControlMargin;
|
CGFloat loopbackCallFrameTop =
|
||||||
CGRect loopbackCallFrame = CGRectMake(kCallControlMargin,
|
CGRectGetMaxY(regularCallFrame) + kCallControlMargin;
|
||||||
|
CGRect loopbackCallFrame =
|
||||||
|
CGRectMake(kCallControlMargin,
|
||||||
loopbackCallFrameTop,
|
loopbackCallFrameTop,
|
||||||
bounds.size.width - 2 * kCallControlMargin,
|
bounds.size.width - 2 * kCallControlMargin,
|
||||||
buttonHeight);
|
buttonHeight);
|
||||||
|
|||||||
@ -25,7 +25,8 @@
|
|||||||
|
|
||||||
static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
|
static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
|
||||||
|
|
||||||
// Launch argument to be passed to indicate that the app should start loopback immediatly
|
// Launch argument to be passed to indicate that the app should start loopback
|
||||||
|
// immediatly
|
||||||
static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
||||||
|
|
||||||
@interface ARDMainViewController () <ARDMainViewDelegate,
|
@interface ARDMainViewController () <ARDMainViewDelegate,
|
||||||
@ -44,7 +45,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
|
|
||||||
- (void)viewDidLoad {
|
- (void)viewDidLoad {
|
||||||
[super viewDidLoad];
|
[super viewDidLoad];
|
||||||
if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
|
if ([[[NSProcessInfo processInfo] arguments]
|
||||||
|
containsObject:loopbackLaunchProcessArgument]) {
|
||||||
[self mainView:nil didInputRoom:@"" isLoopback:YES];
|
[self mainView:nil didInputRoom:@"" isLoopback:YES];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -60,9 +62,11 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
|
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
|
||||||
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
|
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
|
||||||
AVAudioSessionCategoryOptionDefaultToSpeaker;
|
AVAudioSessionCategoryOptionDefaultToSpeaker;
|
||||||
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig];
|
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration)
|
||||||
|
setWebRTCConfiguration:webRTCConfig];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session addDelegate:self];
|
[session addDelegate:self];
|
||||||
|
|
||||||
[self configureAudioSession];
|
[self configureAudioSession];
|
||||||
@ -70,8 +74,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)addSettingsBarButton {
|
- (void)addSettingsBarButton {
|
||||||
UIBarButtonItem *settingsButton =
|
UIBarButtonItem *settingsButton = [[UIBarButtonItem alloc]
|
||||||
[[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:barButtonImageString]
|
initWithImage:[UIImage imageNamed:barButtonImageString]
|
||||||
style:UIBarButtonItemStylePlain
|
style:UIBarButtonItemStylePlain
|
||||||
target:self
|
target:self
|
||||||
action:@selector(showSettings:)];
|
action:@selector(showSettings:)];
|
||||||
@ -80,13 +84,16 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
|
|
||||||
+ (NSString *)loopbackRoomString {
|
+ (NSString *)loopbackRoomString {
|
||||||
NSString *loopbackRoomString =
|
NSString *loopbackRoomString =
|
||||||
[[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-" withString:@""];
|
[[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-"
|
||||||
|
withString:@""];
|
||||||
return loopbackRoomString;
|
return loopbackRoomString;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - ARDMainViewDelegate
|
#pragma mark - ARDMainViewDelegate
|
||||||
|
|
||||||
- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback {
|
- (void)mainView:(ARDMainView *)mainView
|
||||||
|
didInputRoom:(NSString *)room
|
||||||
|
isLoopback:(BOOL)isLoopback {
|
||||||
if (!room.length) {
|
if (!room.length) {
|
||||||
if (isLoopback) {
|
if (isLoopback) {
|
||||||
// If this is a loopback call, allow a generated room name.
|
// If this is a loopback call, allow a generated room name.
|
||||||
@ -123,8 +130,10 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
|
|
||||||
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
|
session.useManualAudio =
|
||||||
|
[settingsModel currentUseManualAudioConfigSettingFromStore];
|
||||||
session.isAudioEnabled = NO;
|
session.isAudioEnabled = NO;
|
||||||
|
|
||||||
// Kick off the video call.
|
// Kick off the video call.
|
||||||
@ -134,7 +143,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
delegate:self];
|
delegate:self];
|
||||||
videoCallViewController.modalTransitionStyle =
|
videoCallViewController.modalTransitionStyle =
|
||||||
UIModalTransitionStyleCrossDissolve;
|
UIModalTransitionStyleCrossDissolve;
|
||||||
videoCallViewController.modalPresentationStyle = UIModalPresentationFullScreen;
|
videoCallViewController.modalPresentationStyle =
|
||||||
|
UIModalPresentationFullScreen;
|
||||||
[self presentViewController:videoCallViewController
|
[self presentViewController:videoCallViewController
|
||||||
animated:YES
|
animated:YES
|
||||||
completion:nil];
|
completion:nil];
|
||||||
@ -154,17 +164,20 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
|
- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
|
||||||
if (![viewController isBeingDismissed]) {
|
if (![viewController isBeingDismissed]) {
|
||||||
RTCLog(@"Dismissing VC");
|
RTCLog(@"Dismissing VC");
|
||||||
[self dismissViewControllerAnimated:YES completion:^{
|
[self dismissViewControllerAnimated:YES
|
||||||
|
completion:^{
|
||||||
[self restartAudioPlayerIfNeeded];
|
[self restartAudioPlayerIfNeeded];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
session.isAudioEnabled = NO;
|
session.isAudioEnabled = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
|
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
|
||||||
|
|
||||||
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
|
- (void)audioSessionDidStartPlayOrRecord:
|
||||||
|
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
|
||||||
// Stop playback on main queue and then configure WebRTC.
|
// Stop playback on main queue and then configure WebRTC.
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher)
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeMain
|
dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||||
@ -178,9 +191,11 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
|
- (void)audioSessionDidStopPlayOrRecord:
|
||||||
|
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
|
||||||
// WebRTC is done with the audio session. Restart playback.
|
// WebRTC is done with the audio session. Restart playback.
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||||
block:^{
|
block:^{
|
||||||
RTCLog(@"audioSessionDidStopPlayOrRecord");
|
RTCLog(@"audioSessionDidStopPlayOrRecord");
|
||||||
[self restartAudioPlayerIfNeeded];
|
[self restartAudioPlayerIfNeeded];
|
||||||
@ -190,11 +205,12 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
- (void)showSettings:(id)sender {
|
- (void)showSettings:(id)sender {
|
||||||
ARDSettingsViewController *settingsController =
|
ARDSettingsViewController *settingsController =
|
||||||
[[ARDSettingsViewController alloc] initWithStyle:UITableViewStyleGrouped
|
[[ARDSettingsViewController alloc]
|
||||||
|
initWithStyle:UITableViewStyleGrouped
|
||||||
settingsModel:[[ARDSettingsModel alloc] init]];
|
settingsModel:[[ARDSettingsModel alloc] init]];
|
||||||
|
|
||||||
UINavigationController *navigationController =
|
UINavigationController *navigationController = [[UINavigationController alloc]
|
||||||
[[UINavigationController alloc] initWithRootViewController:settingsController];
|
initWithRootViewController:settingsController];
|
||||||
[self presentViewControllerAsModal:navigationController];
|
[self presentViewControllerAsModal:navigationController];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,7 +225,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
|
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
|
||||||
configuration.mode = AVAudioSessionModeDefault;
|
configuration.mode = AVAudioSessionModeDefault;
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session lockForConfiguration];
|
[session lockForConfiguration];
|
||||||
BOOL hasSucceeded = NO;
|
BOOL hasSucceeded = NO;
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
@ -227,8 +244,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)setupAudioPlayer {
|
- (void)setupAudioPlayer {
|
||||||
NSString *audioFilePath =
|
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:@"mozart"
|
||||||
[[NSBundle mainBundle] pathForResource:@"mozart" ofType:@"mp3"];
|
ofType:@"mp3"];
|
||||||
NSURL *audioFileURL = [NSURL URLWithString:audioFilePath];
|
NSURL *audioFileURL = [NSURL URLWithString:audioFilePath];
|
||||||
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL
|
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL
|
||||||
error:nil];
|
error:nil];
|
||||||
@ -251,7 +268,8 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
|
|||||||
message:message
|
message:message
|
||||||
preferredStyle:UIAlertControllerStyleAlert];
|
preferredStyle:UIAlertControllerStyleAlert];
|
||||||
|
|
||||||
UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
|
UIAlertAction *defaultAction =
|
||||||
|
[UIAlertAction actionWithTitle:@"OK"
|
||||||
style:UIAlertActionStyleDefault
|
style:UIAlertActionStyleDefault
|
||||||
handler:^(UIAlertAction *action){
|
handler:^(UIAlertAction *action){
|
||||||
}];
|
}];
|
||||||
|
|||||||
@ -69,8 +69,8 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
|
|||||||
#pragma mark -
|
#pragma mark -
|
||||||
|
|
||||||
- (void)addDoneBarButton {
|
- (void)addDoneBarButton {
|
||||||
UIBarButtonItem *barItem =
|
UIBarButtonItem *barItem = [[UIBarButtonItem alloc]
|
||||||
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone
|
initWithBarButtonSystemItem:UIBarButtonSystemItemDone
|
||||||
target:self
|
target:self
|
||||||
action:@selector(dismissModally:)];
|
action:@selector(dismissModally:)];
|
||||||
self.navigationItem.leftBarButtonItem = barItem;
|
self.navigationItem.leftBarButtonItem = barItem;
|
||||||
@ -88,7 +88,8 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
|
|||||||
return 4;
|
return 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
|
- (NSInteger)tableView:(UITableView *)tableView
|
||||||
|
numberOfRowsInSection:(NSInteger)section {
|
||||||
switch (section) {
|
switch (section) {
|
||||||
case ARDSettingsSectionAudioSettings:
|
case ARDSettingsSectionAudioSettings:
|
||||||
return 3;
|
return 3;
|
||||||
@ -103,9 +104,7 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
|
|||||||
|
|
||||||
#pragma mark - Table view delegate helpers
|
#pragma mark - Table view delegate helpers
|
||||||
|
|
||||||
- (void)removeAllAccessories:(UITableView *)tableView
|
- (void)removeAllAccessories:(UITableView *)tableView inSection:(int)section {
|
||||||
inSection:(int)section
|
|
||||||
{
|
|
||||||
for (int i = 0; i < [tableView numberOfRowsInSection:section]; i++) {
|
for (int i = 0; i < [tableView numberOfRowsInSection:section]; i++) {
|
||||||
NSIndexPath *rowPath = [NSIndexPath indexPathForRow:i inSection:section];
|
NSIndexPath *rowPath = [NSIndexPath indexPathForRow:i inSection:section];
|
||||||
UITableViewCell *cell = [tableView cellForRowAtIndexPath:rowPath];
|
UITableViewCell *cell = [tableView cellForRowAtIndexPath:rowPath];
|
||||||
@ -144,16 +143,20 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
cellForRowAtIndexPath:(NSIndexPath *)indexPath {
|
cellForRowAtIndexPath:(NSIndexPath *)indexPath {
|
||||||
switch (indexPath.section) {
|
switch (indexPath.section) {
|
||||||
case ARDSettingsSectionAudioSettings:
|
case ARDSettingsSectionAudioSettings:
|
||||||
return [self audioSettingsTableViewCellForTableView:tableView atIndexPath:indexPath];
|
return [self audioSettingsTableViewCellForTableView:tableView
|
||||||
|
atIndexPath:indexPath];
|
||||||
|
|
||||||
case ARDSettingsSectionVideoResolution:
|
case ARDSettingsSectionVideoResolution:
|
||||||
return [self videoResolutionTableViewCellForTableView:tableView atIndexPath:indexPath];
|
return [self videoResolutionTableViewCellForTableView:tableView
|
||||||
|
atIndexPath:indexPath];
|
||||||
|
|
||||||
case ARDSettingsSectionVideoCodec:
|
case ARDSettingsSectionVideoCodec:
|
||||||
return [self videoCodecTableViewCellForTableView:tableView atIndexPath:indexPath];
|
return [self videoCodecTableViewCellForTableView:tableView
|
||||||
|
atIndexPath:indexPath];
|
||||||
|
|
||||||
case ARDSettingsSectionBitRate:
|
case ARDSettingsSectionBitRate:
|
||||||
return [self bitrateTableViewCellForTableView:tableView atIndexPath:indexPath];
|
return [self bitrateTableViewCellForTableView:tableView
|
||||||
|
atIndexPath:indexPath];
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
return [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
||||||
@ -161,7 +164,8 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
|
- (void)tableView:(UITableView *)tableView
|
||||||
|
didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
|
||||||
switch (indexPath.section) {
|
switch (indexPath.section) {
|
||||||
case ARDSettingsSectionVideoResolution:
|
case ARDSettingsSectionVideoResolution:
|
||||||
[self tableView:tableView disSelectVideoResolutionAtIndex:indexPath];
|
[self tableView:tableView disSelectVideoResolutionAtIndex:indexPath];
|
||||||
@ -175,17 +179,21 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
|
|
||||||
#pragma mark - Table view delegate(Video Resolution)
|
#pragma mark - Table view delegate(Video Resolution)
|
||||||
|
|
||||||
- (UITableViewCell *)videoResolutionTableViewCellForTableView:(UITableView *)tableView
|
- (UITableViewCell *)
|
||||||
|
videoResolutionTableViewCellForTableView:(UITableView *)tableView
|
||||||
atIndexPath:(NSIndexPath *)indexPath {
|
atIndexPath:(NSIndexPath *)indexPath {
|
||||||
NSString *dequeueIdentifier = @"ARDSettingsVideoResolutionViewCellIdentifier";
|
NSString *dequeueIdentifier = @"ARDSettingsVideoResolutionViewCellIdentifier";
|
||||||
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
UITableViewCell *cell =
|
||||||
|
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
||||||
reuseIdentifier:dequeueIdentifier];
|
reuseIdentifier:dequeueIdentifier];
|
||||||
}
|
}
|
||||||
NSString *resolution = self.videoResolutionArray[indexPath.row];
|
NSString *resolution = self.videoResolutionArray[indexPath.row];
|
||||||
cell.textLabel.text = resolution;
|
cell.textLabel.text = resolution;
|
||||||
if ([resolution isEqualToString:[_settingsModel currentVideoResolutionSettingFromStore]]) {
|
if ([resolution
|
||||||
|
isEqualToString:[_settingsModel
|
||||||
|
currentVideoResolutionSettingFromStore]]) {
|
||||||
cell.accessoryType = UITableViewCellAccessoryCheckmark;
|
cell.accessoryType = UITableViewCellAccessoryCheckmark;
|
||||||
} else {
|
} else {
|
||||||
cell.accessoryType = UITableViewCellAccessoryNone;
|
cell.accessoryType = UITableViewCellAccessoryNone;
|
||||||
@ -206,17 +214,20 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
|
|
||||||
#pragma mark - Table view delegate(Video Codec)
|
#pragma mark - Table view delegate(Video Codec)
|
||||||
|
|
||||||
- (UITableViewCell *)videoCodecTableViewCellForTableView:(UITableView *)tableView
|
- (UITableViewCell *)
|
||||||
|
videoCodecTableViewCellForTableView:(UITableView *)tableView
|
||||||
atIndexPath:(NSIndexPath *)indexPath {
|
atIndexPath:(NSIndexPath *)indexPath {
|
||||||
NSString *dequeueIdentifier = @"ARDSettingsVideoCodecCellIdentifier";
|
NSString *dequeueIdentifier = @"ARDSettingsVideoCodecCellIdentifier";
|
||||||
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
UITableViewCell *cell =
|
||||||
|
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
||||||
reuseIdentifier:dequeueIdentifier];
|
reuseIdentifier:dequeueIdentifier];
|
||||||
}
|
}
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
|
||||||
cell.textLabel.text = [codec humanReadableDescription];
|
cell.textLabel.text = [codec humanReadableDescription];
|
||||||
if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
|
if ([codec isEqualToCodecInfo:[_settingsModel
|
||||||
|
currentVideoCodecSettingFromStore]]) {
|
||||||
cell.accessoryType = UITableViewCellAccessoryCheckmark;
|
cell.accessoryType = UITableViewCellAccessoryCheckmark;
|
||||||
} else {
|
} else {
|
||||||
cell.accessoryType = UITableViewCellAccessoryNone;
|
cell.accessoryType = UITableViewCellAccessoryNone;
|
||||||
@ -231,7 +242,8 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
updateListSelectionAtIndexPath:indexPath
|
updateListSelectionAtIndexPath:indexPath
|
||||||
inSection:ARDSettingsSectionVideoCodec];
|
inSection:ARDSettingsSectionVideoCodec];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row];
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec =
|
||||||
|
self.videoCodecArray[indexPath.row];
|
||||||
[_settingsModel storeVideoCodecSetting:videoCodec];
|
[_settingsModel storeVideoCodecSetting:videoCodec];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -240,27 +252,34 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
- (UITableViewCell *)bitrateTableViewCellForTableView:(UITableView *)tableView
|
- (UITableViewCell *)bitrateTableViewCellForTableView:(UITableView *)tableView
|
||||||
atIndexPath:(NSIndexPath *)indexPath {
|
atIndexPath:(NSIndexPath *)indexPath {
|
||||||
NSString *dequeueIdentifier = @"ARDSettingsBitrateCellIdentifier";
|
NSString *dequeueIdentifier = @"ARDSettingsBitrateCellIdentifier";
|
||||||
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
UITableViewCell *cell =
|
||||||
|
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
||||||
reuseIdentifier:dequeueIdentifier];
|
reuseIdentifier:dequeueIdentifier];
|
||||||
|
|
||||||
UITextField *textField = [[UITextField alloc]
|
UITextField *textField = [[UITextField alloc]
|
||||||
initWithFrame:CGRectMake(10, 0, cell.bounds.size.width - 20, cell.bounds.size.height)];
|
initWithFrame:CGRectMake(10,
|
||||||
NSString *currentMaxBitrate = [_settingsModel currentMaxBitrateSettingFromStore].stringValue;
|
0,
|
||||||
|
cell.bounds.size.width - 20,
|
||||||
|
cell.bounds.size.height)];
|
||||||
|
NSString *currentMaxBitrate =
|
||||||
|
[_settingsModel currentMaxBitrateSettingFromStore].stringValue;
|
||||||
textField.text = currentMaxBitrate;
|
textField.text = currentMaxBitrate;
|
||||||
textField.placeholder = @"Enter max bit rate (kbps)";
|
textField.placeholder = @"Enter max bit rate (kbps)";
|
||||||
textField.keyboardType = UIKeyboardTypeNumberPad;
|
textField.keyboardType = UIKeyboardTypeNumberPad;
|
||||||
textField.delegate = self;
|
textField.delegate = self;
|
||||||
|
|
||||||
// Numerical keyboards have no return button, we need to add one manually.
|
// Numerical keyboards have no return button, we need to add one manually.
|
||||||
UIToolbar *numberToolbar =
|
UIToolbar *numberToolbar = [[UIToolbar alloc]
|
||||||
[[UIToolbar alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
|
initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
|
||||||
numberToolbar.items = @[
|
numberToolbar.items = @[
|
||||||
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
|
[[UIBarButtonItem alloc]
|
||||||
|
initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
|
||||||
target:nil
|
target:nil
|
||||||
action:nil],
|
action:nil],
|
||||||
[[UIBarButtonItem alloc] initWithTitle:@"Apply"
|
[[UIBarButtonItem alloc]
|
||||||
|
initWithTitle:@"Apply"
|
||||||
style:UIBarButtonItemStyleDone
|
style:UIBarButtonItemStyleDone
|
||||||
target:self
|
target:self
|
||||||
action:@selector(numberTextFieldDidEndEditing:)]
|
action:@selector(numberTextFieldDidEndEditing:)]
|
||||||
@ -289,10 +308,12 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
|
|||||||
|
|
||||||
#pragma mark - Table view delegate(Audio settings)
|
#pragma mark - Table view delegate(Audio settings)
|
||||||
|
|
||||||
- (UITableViewCell *)audioSettingsTableViewCellForTableView:(UITableView *)tableView
|
- (UITableViewCell *)
|
||||||
|
audioSettingsTableViewCellForTableView:(UITableView *)tableView
|
||||||
atIndexPath:(NSIndexPath *)indexPath {
|
atIndexPath:(NSIndexPath *)indexPath {
|
||||||
NSString *dequeueIdentifier = @"ARDSettingsAudioSettingsCellIdentifier";
|
NSString *dequeueIdentifier = @"ARDSettingsAudioSettingsCellIdentifier";
|
||||||
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
UITableViewCell *cell =
|
||||||
|
[tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
|
||||||
if (!cell) {
|
if (!cell) {
|
||||||
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
|
||||||
reuseIdentifier:dequeueIdentifier];
|
reuseIdentifier:dequeueIdentifier];
|
||||||
|
|||||||
@ -41,11 +41,13 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
- (instancetype)initWithFrame:(CGRect)frame {
|
- (instancetype)initWithFrame:(CGRect)frame {
|
||||||
self = [super initWithFrame:frame];
|
self = [super initWithFrame:frame];
|
||||||
if (self) {
|
if (self) {
|
||||||
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
|
_remoteVideoView =
|
||||||
|
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
|
||||||
|
|
||||||
[self addSubview:_remoteVideoView];
|
[self addSubview:_remoteVideoView];
|
||||||
|
|
||||||
_localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
|
_localVideoView =
|
||||||
|
[[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
|
||||||
[self addSubview:_localVideoView];
|
[self addSubview:_localVideoView];
|
||||||
|
|
||||||
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
|
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
|
||||||
@ -69,7 +71,8 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
_cameraSwitchButton.backgroundColor = [UIColor grayColor];
|
_cameraSwitchButton.backgroundColor = [UIColor grayColor];
|
||||||
_cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
|
_cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
|
||||||
_cameraSwitchButton.layer.masksToBounds = YES;
|
_cameraSwitchButton.layer.masksToBounds = YES;
|
||||||
image = [UIImage imageForName:@"ic_switch_video_black_24dp.png" color:[UIColor whiteColor]];
|
image = [UIImage imageForName:@"ic_switch_video_black_24dp.png"
|
||||||
|
color:[UIColor whiteColor]];
|
||||||
[_cameraSwitchButton setImage:image forState:UIControlStateNormal];
|
[_cameraSwitchButton setImage:image forState:UIControlStateNormal];
|
||||||
[_cameraSwitchButton addTarget:self
|
[_cameraSwitchButton addTarget:self
|
||||||
action:@selector(onCameraSwitch:)
|
action:@selector(onCameraSwitch:)
|
||||||
@ -93,8 +96,7 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
_statusLabel.textColor = [UIColor whiteColor];
|
_statusLabel.textColor = [UIColor whiteColor];
|
||||||
[self addSubview:_statusLabel];
|
[self addSubview:_statusLabel];
|
||||||
|
|
||||||
UITapGestureRecognizer *tapRecognizer =
|
UITapGestureRecognizer *tapRecognizer = [[UITapGestureRecognizer alloc]
|
||||||
[[UITapGestureRecognizer alloc]
|
|
||||||
initWithTarget:self
|
initWithTarget:self
|
||||||
action:@selector(didTripleTap:)];
|
action:@selector(didTripleTap:)];
|
||||||
tapRecognizer.numberOfTapsRequired = 3;
|
tapRecognizer.numberOfTapsRequired = 3;
|
||||||
@ -130,23 +132,23 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
CGRect localVideoFrame =
|
CGRect localVideoFrame =
|
||||||
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
|
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
|
||||||
// Place the view in the bottom right.
|
// Place the view in the bottom right.
|
||||||
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
|
localVideoFrame.origin.x = CGRectGetMaxX(bounds) -
|
||||||
- localVideoFrame.size.width - kLocalVideoViewPadding;
|
localVideoFrame.size.width - kLocalVideoViewPadding;
|
||||||
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
|
localVideoFrame.origin.y = CGRectGetMaxY(bounds) -
|
||||||
- localVideoFrame.size.height - kLocalVideoViewPadding;
|
localVideoFrame.size.height - kLocalVideoViewPadding;
|
||||||
_localVideoView.frame = localVideoFrame;
|
_localVideoView.frame = localVideoFrame;
|
||||||
|
|
||||||
// Place stats at the top.
|
// Place stats at the top.
|
||||||
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
|
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
|
||||||
_statsView.frame = CGRectMake(CGRectGetMinX(bounds),
|
_statsView.frame = CGRectMake(CGRectGetMinX(bounds),
|
||||||
CGRectGetMinY(bounds) + kStatusBarHeight,
|
CGRectGetMinY(bounds) + kStatusBarHeight,
|
||||||
statsSize.width, statsSize.height);
|
statsSize.width,
|
||||||
|
statsSize.height);
|
||||||
|
|
||||||
// Place hangup button in the bottom left.
|
// Place hangup button in the bottom left.
|
||||||
_hangupButton.frame =
|
_hangupButton.frame =
|
||||||
CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
|
CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
|
||||||
CGRectGetMaxY(bounds) - kButtonPadding -
|
CGRectGetMaxY(bounds) - kButtonPadding - kButtonSize,
|
||||||
kButtonSize,
|
|
||||||
kButtonSize,
|
kButtonSize,
|
||||||
kButtonSize);
|
kButtonSize);
|
||||||
|
|
||||||
@ -158,8 +160,7 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
|
|
||||||
// Place route button to the right of camera button.
|
// Place route button to the right of camera button.
|
||||||
CGRect routeChangeFrame = _cameraSwitchButton.frame;
|
CGRect routeChangeFrame = _cameraSwitchButton.frame;
|
||||||
routeChangeFrame.origin.x =
|
routeChangeFrame.origin.x = CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
|
||||||
CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
|
|
||||||
_routeChangeButton.frame = routeChangeFrame;
|
_routeChangeButton.frame = routeChangeFrame;
|
||||||
|
|
||||||
[_statusLabel sizeToFit];
|
[_statusLabel sizeToFit];
|
||||||
@ -169,7 +170,8 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
|
|
||||||
#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
|
#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
|
||||||
|
|
||||||
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
|
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView
|
||||||
|
didChangeVideoSize:(CGSize)size {
|
||||||
if (videoView == _remoteVideoView) {
|
if (videoView == _remoteVideoView) {
|
||||||
_remoteVideoSize = size;
|
_remoteVideoSize = size;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -22,7 +22,8 @@
|
|||||||
#import "ARDSettingsModel.h"
|
#import "ARDSettingsModel.h"
|
||||||
#import "ARDVideoCallView.h"
|
#import "ARDVideoCallView.h"
|
||||||
|
|
||||||
@interface ARDVideoCallViewController () <ARDAppClientDelegate,
|
@interface ARDVideoCallViewController () <
|
||||||
|
ARDAppClientDelegate,
|
||||||
ARDVideoCallViewDelegate,
|
ARDVideoCallViewDelegate,
|
||||||
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
|
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
|
||||||
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
|
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
|
||||||
@ -51,7 +52,9 @@
|
|||||||
_delegate = delegate;
|
_delegate = delegate;
|
||||||
|
|
||||||
_client = [[ARDAppClient alloc] initWithDelegate:self];
|
_client = [[ARDAppClient alloc] initWithDelegate:self];
|
||||||
[_client connectToRoomWithId:room settings:settingsModel isLoopback:isLoopback];
|
[_client connectToRoomWithId:room
|
||||||
|
settings:settingsModel
|
||||||
|
isLoopback:isLoopback];
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
@ -63,7 +66,8 @@
|
|||||||
[self statusTextForState:RTCIceConnectionStateNew];
|
[self statusTextForState:RTCIceConnectionStateNew];
|
||||||
self.view = _videoCallView;
|
self.view = _videoCallView;
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
[session addDelegate:self];
|
[session addDelegate:self];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -101,19 +105,23 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
didCreateLocalCapturer:
|
||||||
|
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
||||||
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
|
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
|
||||||
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
||||||
_captureController =
|
_captureController =
|
||||||
[[ARDCaptureController alloc] initWithCapturer:localCapturer settings:settingsModel];
|
[[ARDCaptureController alloc] initWithCapturer:localCapturer
|
||||||
|
settings:settingsModel];
|
||||||
[_captureController startCapture];
|
[_captureController startCapture];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
|
didCreateLocalFileCapturer:
|
||||||
|
(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
|
||||||
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
|
||||||
if (@available(iOS 10, *)) {
|
if (@available(iOS 10, *)) {
|
||||||
_fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
|
_fileCaptureController =
|
||||||
|
[[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
|
||||||
[_fileCaptureController startCapture];
|
[_fileCaptureController startCapture];
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -124,7 +132,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
didReceiveRemoteVideoTrack:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
self.remoteVideoTrack = remoteVideoTrack;
|
self.remoteVideoTrack = remoteVideoTrack;
|
||||||
__weak ARDVideoCallViewController *weakSelf = self;
|
__weak ARDVideoCallViewController *weakSelf = self;
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
@ -133,13 +142,13 @@
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
|
- (void)appClient:(ARDAppClient *)client
|
||||||
|
didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
|
||||||
_videoCallView.statsView.stats = stats;
|
_videoCallView.statsView.stats = stats;
|
||||||
[_videoCallView setNeedsLayout];
|
[_videoCallView setNeedsLayout];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
|
||||||
didError:(NSError *)error {
|
|
||||||
NSString *message =
|
NSString *message =
|
||||||
[NSString stringWithFormat:@"%@", error.localizedDescription];
|
[NSString stringWithFormat:@"%@", error.localizedDescription];
|
||||||
[self hangup];
|
[self hangup];
|
||||||
@ -164,7 +173,8 @@
|
|||||||
if (_portOverride == AVAudioSessionPortOverrideNone) {
|
if (_portOverride == AVAudioSessionPortOverrideNone) {
|
||||||
override = AVAudioSessionPortOverrideSpeaker;
|
override = AVAudioSessionPortOverrideSpeaker;
|
||||||
}
|
}
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
dispatchAsyncOnType:RTCDispatcherTypeAudioSession
|
||||||
block:^{
|
block:^{
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
@ -239,7 +249,8 @@
|
|||||||
message:message
|
message:message
|
||||||
preferredStyle:UIAlertControllerStyleAlert];
|
preferredStyle:UIAlertControllerStyleAlert];
|
||||||
|
|
||||||
UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
|
UIAlertAction *defaultAction =
|
||||||
|
[UIAlertAction actionWithTitle:@"OK"
|
||||||
style:UIAlertActionStyleDefault
|
style:UIAlertActionStyleDefault
|
||||||
handler:^(UIAlertAction *action){
|
handler:^(UIAlertAction *action){
|
||||||
}];
|
}];
|
||||||
|
|||||||
@ -19,7 +19,8 @@
|
|||||||
if ([self.name isEqualToString:@"H264"]) {
|
if ([self.name isEqualToString:@"H264"]) {
|
||||||
NSString *profileId = self.parameters[@"profile-level-id"];
|
NSString *profileId = self.parameters[@"profile-level-id"];
|
||||||
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
||||||
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId];
|
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
|
||||||
|
initWithHexString:profileId];
|
||||||
if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
|
if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
|
||||||
profileLevelId.profile == RTCH264ProfileHigh) {
|
profileLevelId.profile == RTCH264ProfileHigh) {
|
||||||
return @"H264 (High)";
|
return @"H264 (High)";
|
||||||
|
|||||||
@ -31,15 +31,18 @@
|
|||||||
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
|
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
|
||||||
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
|
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
|
||||||
[_callbackLogger start:^(NSString *logMessage) {
|
[_callbackLogger start:^(NSString *logMessage) {
|
||||||
os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
|
os_log(rtc_os_log,
|
||||||
|
"%{public}s",
|
||||||
|
[logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *, NSObject *> *)setupInfo {
|
- (void)broadcastStartedWithSetupInfo:
|
||||||
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but
|
(NSDictionary<NSString *, NSObject *> *)setupInfo {
|
||||||
// optional.
|
// User has requested to start the broadcast. Setup info from the UI extension
|
||||||
|
// can be supplied but optional.
|
||||||
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
|
||||||
|
|
||||||
_client = [[ARDAppClient alloc] initWithDelegate:self];
|
_client = [[ARDAppClient alloc] initWithDelegate:self];
|
||||||
@ -57,7 +60,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)broadcastPaused {
|
- (void)broadcastPaused {
|
||||||
// User has requested to pause the broadcast. Samples will stop being delivered.
|
// User has requested to pause the broadcast. Samples will stop being
|
||||||
|
// delivered.
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)broadcastResumed {
|
- (void)broadcastResumed {
|
||||||
@ -86,7 +90,8 @@
|
|||||||
|
|
||||||
#pragma mark - ARDAppClientDelegate
|
#pragma mark - ARDAppClientDelegate
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didChangeState:(ARDAppClientState)state {
|
- (void)appClient:(ARDAppClient *)client
|
||||||
|
didChangeState:(ARDAppClientState)state {
|
||||||
switch (state) {
|
switch (state) {
|
||||||
case kARDAppClientStateConnected:
|
case kARDAppClientStateConnected:
|
||||||
RTCLog(@"Client connected.");
|
RTCLog(@"Client connected.");
|
||||||
@ -100,16 +105,19 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state {
|
- (void)appClient:(ARDAppClient *)client
|
||||||
|
didChangeConnectionState:(RTCIceConnectionState)state {
|
||||||
RTCLog(@"ICE state changed: %ld", (long)state);
|
RTCLog(@"ICE state changed: %ld", (long)state);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
didCreateLocalCapturer:
|
||||||
|
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer {
|
didCreateLocalExternalSampleCapturer:
|
||||||
|
(ARDExternalSampleCapturer *)externalSampleCapturer {
|
||||||
self.capturer = externalSampleCapturer;
|
self.capturer = externalSampleCapturer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -118,10 +126,12 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient *)client
|
||||||
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
didReceiveRemoteVideoTrack:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
|
- (void)appClient:(ARDAppClient *)client
|
||||||
|
didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
|
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error {
|
||||||
|
|||||||
@ -18,7 +18,8 @@
|
|||||||
UIView *view = [[UIView alloc] initWithFrame:CGRectZero];
|
UIView *view = [[UIView alloc] initWithFrame:CGRectZero];
|
||||||
view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7];
|
view.backgroundColor = [UIColor colorWithWhite:1.0 alpha:0.7];
|
||||||
|
|
||||||
UIImageView *imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
|
UIImageView *imageView =
|
||||||
|
[[UIImageView alloc] initWithImage:[UIImage imageNamed:@"Icon-180"]];
|
||||||
imageView.translatesAutoresizingMaskIntoConstraints = NO;
|
imageView.translatesAutoresizingMaskIntoConstraints = NO;
|
||||||
[view addSubview:imageView];
|
[view addSubview:imageView];
|
||||||
|
|
||||||
@ -52,22 +53,34 @@
|
|||||||
UILayoutGuide *margin = view.layoutMarginsGuide;
|
UILayoutGuide *margin = view.layoutMarginsGuide;
|
||||||
[imageView.widthAnchor constraintEqualToConstant:60.0].active = YES;
|
[imageView.widthAnchor constraintEqualToConstant:60.0].active = YES;
|
||||||
[imageView.heightAnchor constraintEqualToConstant:60.0].active = YES;
|
[imageView.heightAnchor constraintEqualToConstant:60.0].active = YES;
|
||||||
[imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20].active = YES;
|
[imageView.topAnchor constraintEqualToAnchor:margin.topAnchor constant:20]
|
||||||
[imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active = YES;
|
.active = YES;
|
||||||
|
[imageView.centerXAnchor constraintEqualToAnchor:view.centerXAnchor].active =
|
||||||
[_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
|
|
||||||
[_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor constant:20].active =
|
|
||||||
YES;
|
YES;
|
||||||
[_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
|
|
||||||
|
|
||||||
[doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor].active = YES;
|
[_roomNameField.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor]
|
||||||
[doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
|
.active = YES;
|
||||||
|
[_roomNameField.topAnchor constraintEqualToAnchor:imageView.bottomAnchor
|
||||||
|
constant:20]
|
||||||
|
.active = YES;
|
||||||
|
[_roomNameField.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor]
|
||||||
|
.active = YES;
|
||||||
|
|
||||||
[cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor].active = YES;
|
[doneButton.leadingAnchor constraintEqualToAnchor:margin.leadingAnchor]
|
||||||
[cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor constant:-20].active = YES;
|
.active = YES;
|
||||||
|
[doneButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor
|
||||||
|
constant:-20]
|
||||||
|
.active = YES;
|
||||||
|
|
||||||
|
[cancelButton.trailingAnchor constraintEqualToAnchor:margin.trailingAnchor]
|
||||||
|
.active = YES;
|
||||||
|
[cancelButton.bottomAnchor constraintEqualToAnchor:margin.bottomAnchor
|
||||||
|
constant:-20]
|
||||||
|
.active = YES;
|
||||||
|
|
||||||
UITapGestureRecognizer *tgr =
|
UITapGestureRecognizer *tgr =
|
||||||
[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)];
|
[[UITapGestureRecognizer alloc] initWithTarget:self
|
||||||
|
action:@selector(didTap:)];
|
||||||
[view addGestureRecognizer:tgr];
|
[view addGestureRecognizer:tgr];
|
||||||
|
|
||||||
self.view = view;
|
self.view = view;
|
||||||
@ -78,21 +91,26 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)userDidFinishSetup {
|
- (void)userDidFinishSetup {
|
||||||
// URL of the resource where broadcast can be viewed that will be returned to the application
|
// URL of the resource where broadcast can be viewed that will be returned to
|
||||||
NSURL *broadcastURL = [NSURL
|
// the application
|
||||||
URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@", _roomNameField.text]];
|
NSURL *broadcastURL =
|
||||||
|
[NSURL URLWithString:[NSString stringWithFormat:@"https://appr.tc/r/%@",
|
||||||
|
_roomNameField.text]];
|
||||||
|
|
||||||
// Dictionary with setup information that will be provided to broadcast extension when broadcast
|
// Dictionary with setup information that will be provided to broadcast
|
||||||
// is started
|
// extension when broadcast is started
|
||||||
NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text};
|
NSDictionary *setupInfo = @{@"roomName" : _roomNameField.text};
|
||||||
|
|
||||||
// Tell ReplayKit that the extension is finished setting up and can begin broadcasting
|
// Tell ReplayKit that the extension is finished setting up and can begin
|
||||||
[self.extensionContext completeRequestWithBroadcastURL:broadcastURL setupInfo:setupInfo];
|
// broadcasting
|
||||||
|
[self.extensionContext completeRequestWithBroadcastURL:broadcastURL
|
||||||
|
setupInfo:setupInfo];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)userDidCancelSetup {
|
- (void)userDidCancelSetup {
|
||||||
// Tell ReplayKit that the extension was cancelled by the user
|
// Tell ReplayKit that the extension was cancelled by the user
|
||||||
[self.extensionContext cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
|
[self.extensionContext
|
||||||
|
cancelRequestWithError:[NSError errorWithDomain:@"com.google.AppRTCMobile"
|
||||||
code:-1
|
code:-1
|
||||||
userInfo:nil]];
|
userInfo:nil]];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -26,10 +26,8 @@
|
|||||||
RTCInitializeSSL();
|
RTCInitializeSSL();
|
||||||
NSScreen* screen = [NSScreen mainScreen];
|
NSScreen* screen = [NSScreen mainScreen];
|
||||||
NSRect visibleRect = [screen visibleFrame];
|
NSRect visibleRect = [screen visibleFrame];
|
||||||
NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
|
NSRect windowRect =
|
||||||
NSMidY(visibleRect),
|
NSMakeRect(NSMidX(visibleRect), NSMidY(visibleRect), 1320, 1140);
|
||||||
1320,
|
|
||||||
1140);
|
|
||||||
NSUInteger styleMask = NSWindowStyleMaskTitled | NSWindowStyleMaskClosable;
|
NSUInteger styleMask = NSWindowStyleMaskTitled | NSWindowStyleMaskClosable;
|
||||||
_window = [[NSWindow alloc] initWithContentRect:windowRect
|
_window = [[NSWindow alloc] initWithContentRect:windowRect
|
||||||
styleMask:styleMask
|
styleMask:styleMask
|
||||||
@ -52,4 +50,3 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
|||||||
@ -36,15 +36,18 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
@interface APPRTCMainView : NSView
|
@interface APPRTCMainView : NSView
|
||||||
|
|
||||||
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
|
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
|
||||||
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
|
@property(nonatomic, readonly)
|
||||||
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
|
NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
|
||||||
|
@property(nonatomic, readonly)
|
||||||
|
NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
|
||||||
@property(nonatomic, readonly) NSTextView* logView;
|
@property(nonatomic, readonly) NSTextView* logView;
|
||||||
|
|
||||||
- (void)displayLogMessage:(NSString*)message;
|
- (void)displayLogMessage:(NSString*)message;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface APPRTCMainView () <NSTextFieldDelegate, RTC_OBJC_TYPE (RTCVideoViewDelegate)>
|
@interface APPRTCMainView () <NSTextFieldDelegate,
|
||||||
|
RTC_OBJC_TYPE (RTCVideoViewDelegate)>
|
||||||
@end
|
@end
|
||||||
@implementation APPRTCMainView {
|
@implementation APPRTCMainView {
|
||||||
NSScrollView* _scrollView;
|
NSScrollView* _scrollView;
|
||||||
@ -63,7 +66,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
|
|
||||||
- (void)displayLogMessage:(NSString*)message {
|
- (void)displayLogMessage:(NSString*)message {
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
|
self.logView.string =
|
||||||
|
[NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
|
||||||
NSRange range = NSMakeRange(self.logView.string.length, 0);
|
NSRange range = NSMakeRange(self.logView.string.length, 0);
|
||||||
[self.logView scrollRangeToVisible:range];
|
[self.logView scrollRangeToVisible:range];
|
||||||
});
|
});
|
||||||
@ -84,13 +88,9 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)updateConstraints {
|
- (void)updateConstraints {
|
||||||
NSParameterAssert(
|
NSParameterAssert(_roomField != nil && _scrollView != nil &&
|
||||||
_roomField != nil &&
|
_remoteVideoView != nil && _localVideoView != nil &&
|
||||||
_scrollView != nil &&
|
_actionItemsView != nil && _connectButton != nil &&
|
||||||
_remoteVideoView != nil &&
|
|
||||||
_localVideoView != nil &&
|
|
||||||
_actionItemsView!= nil &&
|
|
||||||
_connectButton != nil &&
|
|
||||||
_loopbackButton != nil);
|
_loopbackButton != nil);
|
||||||
|
|
||||||
[self removeConstraints:[self constraints]];
|
[self removeConstraints:[self constraints]];
|
||||||
@ -115,10 +115,11 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
};
|
};
|
||||||
// Declare this separately to avoid compiler warning about splitting string
|
// Declare this separately to avoid compiler warning about splitting string
|
||||||
// within an NSArray expression.
|
// within an NSArray expression.
|
||||||
NSString* verticalConstraintLeft =
|
NSString* verticalConstraintLeft = @"V:|-[_remoteVideoView(remoteViewHeight)]"
|
||||||
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_scrollView(kBottomViewHeight)]-|";
|
@"-[_scrollView(kBottomViewHeight)]-|";
|
||||||
NSString* verticalConstraintRight =
|
NSString* verticalConstraintRight =
|
||||||
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(kBottomViewHeight)]-|";
|
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView("
|
||||||
|
@"kBottomViewHeight)]-|";
|
||||||
NSArray* constraintFormats = @[
|
NSArray* constraintFormats = @[
|
||||||
verticalConstraintLeft,
|
verticalConstraintLeft,
|
||||||
verticalConstraintRight,
|
verticalConstraintRight,
|
||||||
@ -148,7 +149,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
|
|
||||||
#pragma mark - Constraints helper
|
#pragma mark - Constraints helper
|
||||||
|
|
||||||
+ (void)addConstraints:(NSArray*)constraints toView:(NSView*)view
|
+ (void)addConstraints:(NSArray*)constraints
|
||||||
|
toView:(NSView*)view
|
||||||
viewsDictionary:(NSDictionary*)viewsDictionary
|
viewsDictionary:(NSDictionary*)viewsDictionary
|
||||||
metrics:(NSDictionary*)metrics {
|
metrics:(NSDictionary*)metrics {
|
||||||
for (NSString* constraintFormat in constraints) {
|
for (NSString* constraintFormat in constraints) {
|
||||||
@ -170,7 +172,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
// Generate room id for loopback options.
|
// Generate room id for loopback options.
|
||||||
if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
|
if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
|
||||||
roomString = [NSUUID UUID].UUIDString;
|
roomString = [NSUUID UUID].UUIDString;
|
||||||
roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
|
roomString = [roomString stringByReplacingOccurrencesOfString:@"-"
|
||||||
|
withString:@""];
|
||||||
}
|
}
|
||||||
[self.delegate appRTCMainView:self
|
[self.delegate appRTCMainView:self
|
||||||
didEnterRoomId:roomString
|
didEnterRoomId:roomString
|
||||||
@ -180,7 +183,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
|
|
||||||
#pragma mark - RTCVideoViewDelegate
|
#pragma mark - RTCVideoViewDelegate
|
||||||
|
|
||||||
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
|
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView
|
||||||
|
didChangeVideoSize:(CGSize)size {
|
||||||
if (videoView == _remoteVideoView) {
|
if (videoView == _remoteVideoView) {
|
||||||
_remoteVideoSize = size;
|
_remoteVideoSize = size;
|
||||||
} else if (videoView == _localVideoView) {
|
} else if (videoView == _localVideoView) {
|
||||||
@ -216,8 +220,10 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
[_scrollView setDocumentView:_logView];
|
[_scrollView setDocumentView:_logView];
|
||||||
[self addSubview:_scrollView];
|
[self addSubview:_scrollView];
|
||||||
|
|
||||||
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
|
_remoteVideoView =
|
||||||
_localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
|
[[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
|
||||||
|
_localVideoView =
|
||||||
|
[[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
|
||||||
|
|
||||||
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
|
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
|
||||||
[self addSubview:_remoteVideoView];
|
[self addSubview:_remoteVideoView];
|
||||||
@ -262,8 +268,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface APPRTCViewController ()
|
@interface APPRTCViewController () <ARDAppClientDelegate,
|
||||||
<ARDAppClientDelegate, APPRTCMainViewDelegate>
|
APPRTCMainViewDelegate>
|
||||||
@property(nonatomic, readonly) APPRTCMainView* mainView;
|
@property(nonatomic, readonly) APPRTCMainView* mainView;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@ -327,9 +333,10 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient*)client
|
- (void)appClient:(ARDAppClient*)client
|
||||||
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
didCreateLocalCapturer:
|
||||||
_captureController =
|
(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
|
||||||
[[ARDCaptureController alloc] initWithCapturer:localCapturer
|
_captureController = [[ARDCaptureController alloc]
|
||||||
|
initWithCapturer:localCapturer
|
||||||
settings:[[ARDSettingsModel alloc] init]];
|
settings:[[ARDSettingsModel alloc] init]];
|
||||||
[_captureController startCapture];
|
[_captureController startCapture];
|
||||||
}
|
}
|
||||||
@ -341,19 +348,18 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient*)client
|
- (void)appClient:(ARDAppClient*)client
|
||||||
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
didReceiveRemoteVideoTrack:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
|
||||||
_remoteVideoTrack = remoteVideoTrack;
|
_remoteVideoTrack = remoteVideoTrack;
|
||||||
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
|
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient*)client didError:(NSError*)error {
|
||||||
didError:(NSError *)error {
|
|
||||||
[self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]];
|
[self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]];
|
||||||
[self disconnect];
|
[self disconnect];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)appClient:(ARDAppClient *)client
|
- (void)appClient:(ARDAppClient*)client didGetStats:(NSArray*)stats {
|
||||||
didGetStats:(NSArray *)stats {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - APPRTCMainViewDelegate
|
#pragma mark - APPRTCMainViewDelegate
|
||||||
@ -361,7 +367,6 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
- (void)appRTCMainView:(APPRTCMainView*)mainView
|
- (void)appRTCMainView:(APPRTCMainView*)mainView
|
||||||
didEnterRoomId:(NSString*)roomId
|
didEnterRoomId:(NSString*)roomId
|
||||||
loopback:(BOOL)isLoopback {
|
loopback:(BOOL)isLoopback {
|
||||||
|
|
||||||
if ([roomId isEqualToString:@""]) {
|
if ([roomId isEqualToString:@""]) {
|
||||||
[self.mainView displayLogMessage:@"Missing room id"];
|
[self.mainView displayLogMessage:@"Missing room id"];
|
||||||
return;
|
return;
|
||||||
@ -370,7 +375,8 @@ static NSUInteger const kBottomViewHeight = 200;
|
|||||||
[self disconnect];
|
[self disconnect];
|
||||||
ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
|
ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
|
||||||
[client connectToRoomWithId:roomId
|
[client connectToRoomWithId:roomId
|
||||||
settings:[[ARDSettingsModel alloc] init] // Use default settings.
|
settings:[[ARDSettingsModel alloc]
|
||||||
|
init] // Use default settings.
|
||||||
isLoopback:isLoopback];
|
isLoopback:isLoopback];
|
||||||
_client = client;
|
_client = client;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -31,33 +31,36 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)applicationWillResignActive:(UIApplication *)application {
|
- (void)applicationWillResignActive:(UIApplication *)application {
|
||||||
// Sent when the application is about to move from active to inactive state. This can occur for
|
// Sent when the application is about to move from active to inactive state.
|
||||||
// certain types of temporary interruptions (such as an incoming phone call or SMS message) or
|
// This can occur for certain types of temporary interruptions (such as an
|
||||||
// when the user quits the application and it begins the transition to the background state. Use
|
// incoming phone call or SMS message) or when the user quits the application
|
||||||
// this method to pause ongoing tasks, disable timers, and invalidate graphics rendering
|
// and it begins the transition to the background state. Use this method to
|
||||||
|
// pause ongoing tasks, disable timers, and invalidate graphics rendering
|
||||||
// callbacks. Games should use this method to pause the game.
|
// callbacks. Games should use this method to pause the game.
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)applicationDidEnterBackground:(UIApplication *)application {
|
- (void)applicationDidEnterBackground:(UIApplication *)application {
|
||||||
// Use this method to release shared resources, save user data, invalidate timers, and store
|
// Use this method to release shared resources, save user data, invalidate
|
||||||
// enough application state information to restore your application to its current state in case
|
// timers, and store enough application state information to restore your
|
||||||
// it is terminated later. If your application supports background execution, this method is
|
// application to its current state in case it is terminated later. If your
|
||||||
// called instead of applicationWillTerminate: when the user quits.
|
// application supports background execution, this method is called instead of
|
||||||
|
// applicationWillTerminate: when the user quits.
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)applicationWillEnterForeground:(UIApplication *)application {
|
- (void)applicationWillEnterForeground:(UIApplication *)application {
|
||||||
// Called as part of the transition from the background to the active state; here you can undo
|
// Called as part of the transition from the background to the active state;
|
||||||
// many of the changes made on entering the background.
|
// here you can undo many of the changes made on entering the background.
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)applicationDidBecomeActive:(UIApplication *)application {
|
- (void)applicationDidBecomeActive:(UIApplication *)application {
|
||||||
// Restart any tasks that were paused (or not yet started) while the application was inactive. If
|
// Restart any tasks that were paused (or not yet started) while the
|
||||||
// the application was previously in the background, optionally refresh the user interface.
|
// application was inactive. If the application was previously in the
|
||||||
|
// background, optionally refresh the user interface.
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)applicationWillTerminate:(UIApplication *)application {
|
- (void)applicationWillTerminate:(UIApplication *)application {
|
||||||
// Called when the application is about to terminate. Save data if appropriate. See also
|
// Called when the application is about to terminate. Save data if
|
||||||
// applicationDidEnterBackground:.
|
// appropriate. See also applicationDidEnterBackground:.
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|||||||
@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
int main(int argc, char* argv[]) {
|
int main(int argc, char* argv[]) {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
return UIApplicationMain(argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
|
return UIApplicationMain(
|
||||||
|
argc, argv, nil, NSStringFromClass([NADAppDelegate class]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,7 +14,8 @@
|
|||||||
|
|
||||||
@synthesize delegate = _delegate;
|
@synthesize delegate = _delegate;
|
||||||
|
|
||||||
- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
- (instancetype)initWithDelegate:
|
||||||
|
(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
_delegate = delegate;
|
_delegate = delegate;
|
||||||
|
|||||||
@ -21,7 +21,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithName:(NSString *)name
|
- (instancetype)initWithName:(NSString *)name
|
||||||
parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters {
|
parameters:(nullable NSDictionary<NSString *, NSString *> *)
|
||||||
|
parameters {
|
||||||
NSDictionary<NSString *, NSString *> *params = parameters ? parameters : @{};
|
NSDictionary<NSString *, NSString *> *params = parameters ? parameters : @{};
|
||||||
return [self initWithName:name parameters:params scalabilityModes:@[]];
|
return [self initWithName:name parameters:params scalabilityModes:@[]];
|
||||||
}
|
}
|
||||||
@ -49,10 +50,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)isEqual:(id)object {
|
- (BOOL)isEqual:(id)object {
|
||||||
if (self == object)
|
if (self == object) return YES;
|
||||||
return YES;
|
if (![object isKindOfClass:[self class]]) return NO;
|
||||||
if (![object isKindOfClass:[self class]])
|
|
||||||
return NO;
|
|
||||||
return [self isEqualToCodecInfo:object];
|
return [self isEqualToCodecInfo:object];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -85,7 +85,8 @@ static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
|
|||||||
}
|
}
|
||||||
|
|
||||||
+ (instancetype)currentConfiguration {
|
+ (instancetype)currentConfiguration {
|
||||||
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
RTC_OBJC_TYPE(RTCAudioSession) *session =
|
||||||
|
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
|
||||||
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
|
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config =
|
||||||
[[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
|
[[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
|
||||||
config.category = session.category;
|
config.category = session.category;
|
||||||
@ -104,7 +105,8 @@ static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
|
+ (void)setWebRTCConfiguration:
|
||||||
|
(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration {
|
||||||
@synchronized(self) {
|
@synchronized(self) {
|
||||||
gWebRTCConfiguration = configuration;
|
gWebRTCConfiguration = configuration;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -26,12 +26,13 @@
|
|||||||
const int64_t kNanosecondsPerSecond = 1000000000;
|
const int64_t kNanosecondsPerSecond = 1000000000;
|
||||||
|
|
||||||
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
|
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
|
||||||
()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic,
|
()<AVCaptureVideoDataOutputSampleBufferDelegate> @property(nonatomic, readonly)
|
||||||
readonly) dispatch_queue_t frameQueue;
|
dispatch_queue_t frameQueue;
|
||||||
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
|
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
|
||||||
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
|
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
|
||||||
@property(nonatomic, assign) BOOL isRunning;
|
@property(nonatomic, assign) BOOL isRunning;
|
||||||
// Will the session be running once all asynchronous operations have been completed?
|
// Will the session be running once all asynchronous operations have been
|
||||||
|
// completed?
|
||||||
@property(nonatomic, assign) BOOL willBeRunning;
|
@property(nonatomic, assign) BOOL willBeRunning;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@ -55,15 +56,19 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
@synthesize willBeRunning = _willBeRunning;
|
@synthesize willBeRunning = _willBeRunning;
|
||||||
|
|
||||||
- (instancetype)init {
|
- (instancetype)init {
|
||||||
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
|
return [self initWithDelegate:nil
|
||||||
|
captureSession:[[AVCaptureSession alloc] init]];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
- (instancetype)initWithDelegate:
|
||||||
return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
|
(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
|
||||||
|
return [self initWithDelegate:delegate
|
||||||
|
captureSession:[[AVCaptureSession alloc] init]];
|
||||||
}
|
}
|
||||||
|
|
||||||
// This initializer is used for testing.
|
// This initializer is used for testing.
|
||||||
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
|
- (instancetype)initWithDelegate:
|
||||||
|
(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate
|
||||||
captureSession:(AVCaptureSession *)captureSession {
|
captureSession:(AVCaptureSession *)captureSession {
|
||||||
self = [super initWithDelegate:delegate];
|
self = [super initWithDelegate:delegate];
|
||||||
if (self) {
|
if (self) {
|
||||||
@ -113,22 +118,26 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
|
|
||||||
- (void)dealloc {
|
- (void)dealloc {
|
||||||
NSAssert(!_willBeRunning,
|
NSAssert(!_willBeRunning,
|
||||||
@"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
|
@"Session was still running in "
|
||||||
|
@"RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to "
|
||||||
@"call stopCapture?");
|
@"call stopCapture?");
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
||||||
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
|
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
|
||||||
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
|
discoverySessionWithDeviceTypes:@[
|
||||||
|
AVCaptureDeviceTypeBuiltInWideAngleCamera
|
||||||
|
]
|
||||||
mediaType:AVMediaTypeVideo
|
mediaType:AVMediaTypeVideo
|
||||||
position:AVCaptureDevicePositionUnspecified];
|
position:AVCaptureDevicePositionUnspecified];
|
||||||
return session.devices;
|
return session.devices;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
|
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:
|
||||||
// Support opening the device in any format. We make sure it's converted to a format we
|
(AVCaptureDevice *)device {
|
||||||
// can handle, if needed, in the method `-setupVideoDataOutput`.
|
// Support opening the device in any format. We make sure it's converted to a
|
||||||
|
// format we can handle, if needed, in the method `-setupVideoDataOutput`.
|
||||||
return device.formats;
|
return device.formats;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,7 +148,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||||
format:(AVCaptureDeviceFormat *)format
|
format:(AVCaptureDeviceFormat *)format
|
||||||
fps:(NSInteger)fps {
|
fps:(NSInteger)fps {
|
||||||
[self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
|
[self startCaptureWithDevice:device
|
||||||
|
format:format
|
||||||
|
fps:fps
|
||||||
|
completionHandler:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stopCapture {
|
- (void)stopCapture {
|
||||||
@ -149,17 +161,21 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||||
format:(AVCaptureDeviceFormat *)format
|
format:(AVCaptureDeviceFormat *)format
|
||||||
fps:(NSInteger)fps
|
fps:(NSInteger)fps
|
||||||
completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler {
|
completionHandler:(nullable void (^)(NSError *_Nullable error))
|
||||||
|
completionHandler {
|
||||||
_willBeRunning = YES;
|
_willBeRunning = YES;
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher)
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
|
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps",
|
||||||
|
format,
|
||||||
|
(long)fps);
|
||||||
|
|
||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
if (!self->_generatingOrientationNotifications) {
|
if (!self->_generatingOrientationNotifications) {
|
||||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
[[UIDevice currentDevice]
|
||||||
|
beginGeneratingDeviceOrientationNotifications];
|
||||||
self->_generatingOrientationNotifications = YES;
|
self->_generatingOrientationNotifications = YES;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -191,14 +207,16 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
- (void)stopCaptureWithCompletionHandler:
|
||||||
|
(nullable void (^)(void))completionHandler {
|
||||||
_willBeRunning = NO;
|
_willBeRunning = NO;
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher)
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
RTCLogInfo("Stop");
|
RTCLogInfo("Stop");
|
||||||
self.currentDevice = nil;
|
self.currentDevice = nil;
|
||||||
for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
|
for (AVCaptureDeviceInput *oldInput in
|
||||||
|
[self.captureSession.inputs copy]) {
|
||||||
[self.captureSession removeInput:oldInput];
|
[self.captureSession removeInput:oldInput];
|
||||||
}
|
}
|
||||||
[self.captureSession stopRunning];
|
[self.captureSession stopRunning];
|
||||||
@ -206,7 +224,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
if (self->_generatingOrientationNotifications) {
|
if (self->_generatingOrientationNotifications) {
|
||||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
[[UIDevice currentDevice]
|
||||||
|
endGeneratingDeviceOrientationNotifications];
|
||||||
self->_generatingOrientationNotifications = NO;
|
self->_generatingOrientationNotifications = NO;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -222,7 +241,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
|
|
||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
[self updateOrientation];
|
[self updateOrientation];
|
||||||
}];
|
}];
|
||||||
@ -236,7 +256,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
fromConnection:(AVCaptureConnection *)connection {
|
fromConnection:(AVCaptureConnection *)connection {
|
||||||
NSParameterAssert(captureOutput == _videoDataOutput);
|
NSParameterAssert(captureOutput == _videoDataOutput);
|
||||||
|
|
||||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
|
||||||
|
!CMSampleBufferIsValid(sampleBuffer) ||
|
||||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -249,16 +270,19 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
// Default to portrait orientation on iPhone.
|
// Default to portrait orientation on iPhone.
|
||||||
BOOL usingFrontCamera = NO;
|
BOOL usingFrontCamera = NO;
|
||||||
// Check the image's EXIF for the camera the image came from as the image could have been
|
// Check the image's EXIF for the camera the image came from as the image
|
||||||
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
|
// could have been delayed as we set alwaysDiscardsLateVideoFrames to NO.
|
||||||
AVCaptureDevicePosition cameraPosition =
|
AVCaptureDevicePosition cameraPosition =
|
||||||
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
|
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
|
||||||
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
|
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
|
||||||
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
|
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
|
||||||
} else {
|
} else {
|
||||||
AVCaptureDeviceInput *deviceInput =
|
AVCaptureDeviceInput *deviceInput =
|
||||||
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
|
(AVCaptureDeviceInput *)((AVCaptureInputPort *)
|
||||||
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
|
connection.inputPorts.firstObject)
|
||||||
|
.input;
|
||||||
|
usingFrontCamera =
|
||||||
|
AVCaptureDevicePositionFront == deviceInput.device.position;
|
||||||
}
|
}
|
||||||
switch (_orientation) {
|
switch (_orientation) {
|
||||||
case UIDeviceOrientationPortrait:
|
case UIDeviceOrientationPortrait:
|
||||||
@ -286,7 +310,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
|
|
||||||
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
||||||
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
|
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
|
||||||
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
|
int64_t timeStampNs =
|
||||||
|
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
|
||||||
kNanosecondsPerSecond;
|
kNanosecondsPerSecond;
|
||||||
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
|
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
|
||||||
@ -299,13 +324,14 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||||
fromConnection:(AVCaptureConnection *)connection {
|
fromConnection:(AVCaptureConnection *)connection {
|
||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
CFStringRef droppedReason =
|
CFStringRef droppedReason = CMGetAttachment(
|
||||||
CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
|
sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
|
||||||
#else
|
#else
|
||||||
// DroppedFrameReason unavailable on macOS.
|
// DroppedFrameReason unavailable on macOS.
|
||||||
CFStringRef droppedReason = nil;
|
CFStringRef droppedReason = nil;
|
||||||
#endif
|
#endif
|
||||||
RTCLogError(@"Dropped sample buffer. Reason: %@", (__bridge NSString *)droppedReason);
|
RTCLogError(@"Dropped sample buffer. Reason: %@",
|
||||||
|
(__bridge NSString *)droppedReason);
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - AVCaptureSession notifications
|
#pragma mark - AVCaptureSession notifications
|
||||||
@ -313,7 +339,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
|
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
|
||||||
NSString *reasonString = nil;
|
NSString *reasonString = nil;
|
||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
|
NSNumber *reason =
|
||||||
|
notification.userInfo[AVCaptureSessionInterruptionReasonKey];
|
||||||
if (reason) {
|
if (reason) {
|
||||||
switch (reason.intValue) {
|
switch (reason.intValue) {
|
||||||
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
|
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
|
||||||
@ -339,10 +366,12 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
||||||
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
|
NSError *error =
|
||||||
|
[notification.userInfo objectForKey:AVCaptureSessionErrorKey];
|
||||||
RTCLogError(@"Capture session runtime error: %@", error);
|
RTCLogError(@"Capture session runtime error: %@", error);
|
||||||
|
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
if (error.code == AVErrorMediaServicesWereReset) {
|
if (error.code == AVErrorMediaServicesWereReset) {
|
||||||
@ -359,7 +388,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
|
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
|
||||||
RTCLog(@"Capture session started.");
|
RTCLog(@"Capture session started.");
|
||||||
|
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
// If we successfully restarted after an unknown
|
// If we successfully restarted after an unknown
|
||||||
// error, allow future retries on fatal errors.
|
// error, allow future retries on fatal errors.
|
||||||
@ -376,7 +406,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
if (!self.hasRetriedOnFatalError) {
|
if (!self.hasRetriedOnFatalError) {
|
||||||
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
RTCLogWarning(
|
||||||
|
@"Attempting to recover from fatal capture error.");
|
||||||
[self handleNonFatalError];
|
[self handleNonFatalError];
|
||||||
self.hasRetriedOnFatalError = YES;
|
self.hasRetriedOnFatalError = YES;
|
||||||
} else {
|
} else {
|
||||||
@ -386,7 +417,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)handleNonFatalError {
|
- (void)handleNonFatalError {
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
RTCLog(@"Restarting capture session after error.");
|
RTCLog(@"Restarting capture session after error.");
|
||||||
if (self.isRunning) {
|
if (self.isRunning) {
|
||||||
@ -444,29 +476,34 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
|
|
||||||
- (void)setupVideoDataOutput {
|
- (void)setupVideoDataOutput {
|
||||||
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
|
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
|
||||||
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
|
AVCaptureVideoDataOutput *videoDataOutput =
|
||||||
|
[[AVCaptureVideoDataOutput alloc] init];
|
||||||
|
|
||||||
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
|
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel
|
||||||
// device with the most efficient output format first. Find the first format that we support.
|
// formats supported by the device with the most efficient output format
|
||||||
|
// first. Find the first format that we support.
|
||||||
NSSet<NSNumber *> *supportedPixelFormats =
|
NSSet<NSNumber *> *supportedPixelFormats =
|
||||||
[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
|
[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats];
|
||||||
NSMutableOrderedSet *availablePixelFormats =
|
NSMutableOrderedSet *availablePixelFormats = [NSMutableOrderedSet
|
||||||
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
|
orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
|
||||||
[availablePixelFormats intersectSet:supportedPixelFormats];
|
[availablePixelFormats intersectSet:supportedPixelFormats];
|
||||||
NSNumber *pixelFormat = availablePixelFormats.firstObject;
|
NSNumber *pixelFormat = availablePixelFormats.firstObject;
|
||||||
NSAssert(pixelFormat, @"Output device has no supported formats.");
|
NSAssert(pixelFormat, @"Output device has no supported formats.");
|
||||||
|
|
||||||
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
|
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
|
||||||
_outputPixelFormat = _preferredOutputPixelFormat;
|
_outputPixelFormat = _preferredOutputPixelFormat;
|
||||||
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
|
videoDataOutput.videoSettings =
|
||||||
|
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
|
||||||
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
|
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
|
||||||
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
|
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
|
||||||
_videoDataOutput = videoDataOutput;
|
_videoDataOutput = videoDataOutput;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
|
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
|
||||||
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
FourCharCode mediaSubType =
|
||||||
if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) {
|
CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
||||||
|
if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats]
|
||||||
|
containsObject:@(mediaSubType)]) {
|
||||||
mediaSubType = _preferredOutputPixelFormat;
|
mediaSubType = _preferredOutputPixelFormat;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -474,9 +511,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
_outputPixelFormat = mediaSubType;
|
_outputPixelFormat = mediaSubType;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update videoSettings with dimensions, as some virtual cameras, e.g. Snap Camera, may not work
|
// Update videoSettings with dimensions, as some virtual cameras, e.g. Snap
|
||||||
// otherwise.
|
// Camera, may not work otherwise.
|
||||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
CMVideoDimensions dimensions =
|
||||||
|
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||||
_videoDataOutput.videoSettings = @{
|
_videoDataOutput.videoSettings = @{
|
||||||
(id)kCVPixelBufferWidthKey : @(dimensions.width),
|
(id)kCVPixelBufferWidthKey : @(dimensions.width),
|
||||||
(id)kCVPixelBufferHeightKey : @(dimensions.height),
|
(id)kCVPixelBufferHeightKey : @(dimensions.height),
|
||||||
@ -486,26 +524,32 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
|
|
||||||
#pragma mark - Private, called inside capture queue
|
#pragma mark - Private, called inside capture queue
|
||||||
|
|
||||||
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
|
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format
|
||||||
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
fps:(NSInteger)fps {
|
||||||
|
NSAssert([RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||||
@"updateDeviceCaptureFormat must be called on the capture queue.");
|
@"updateDeviceCaptureFormat must be called on the capture queue.");
|
||||||
@try {
|
@try {
|
||||||
_currentDevice.activeFormat = format;
|
_currentDevice.activeFormat = format;
|
||||||
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
|
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
|
||||||
} @catch (NSException *exception) {
|
} @catch (NSException *exception) {
|
||||||
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
|
RTCLogError(@"Failed to set active format!\n User info:%@",
|
||||||
|
exception.userInfo);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)reconfigureCaptureSessionInput {
|
- (void)reconfigureCaptureSessionInput {
|
||||||
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
NSAssert(
|
||||||
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||||
@"reconfigureCaptureSessionInput must be called on the capture queue.");
|
@"reconfigureCaptureSessionInput must be called on the capture queue.");
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
AVCaptureDeviceInput *input =
|
AVCaptureDeviceInput *input =
|
||||||
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
|
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
|
||||||
if (!input) {
|
if (!input) {
|
||||||
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
|
RTCLogError(@"Failed to create front camera input: %@",
|
||||||
|
error.localizedDescription);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[_captureSession beginConfiguration];
|
[_captureSession beginConfiguration];
|
||||||
@ -521,7 +565,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)updateOrientation {
|
- (void)updateOrientation {
|
||||||
NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
NSAssert([RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
|
isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||||
@"updateOrientation must be called on the capture queue.");
|
@"updateOrientation must be called on the capture queue.");
|
||||||
#if TARGET_OS_IPHONE
|
#if TARGET_OS_IPHONE
|
||||||
_orientation = [UIDevice currentDevice].orientation;
|
_orientation = [UIDevice currentDevice].orientation;
|
||||||
|
|||||||
@ -50,7 +50,9 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
NSError *error =
|
NSError *error =
|
||||||
[NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
|
[NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
|
||||||
code:RTCFileVideoCapturerErrorCode_CapturerRunning
|
code:RTCFileVideoCapturerErrorCode_CapturerRunning
|
||||||
userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
|
userInfo:@{
|
||||||
|
NSUnderlyingErrorKey : @"Capturer has been started."
|
||||||
|
}];
|
||||||
|
|
||||||
errorBlock(error);
|
errorBlock(error);
|
||||||
return;
|
return;
|
||||||
@ -58,12 +60,14 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
_status = RTCFileVideoCapturerStatusStarted;
|
_status = RTCFileVideoCapturerStatusStarted;
|
||||||
}
|
}
|
||||||
|
|
||||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
dispatch_async(
|
||||||
|
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
||||||
NSString *pathForFile = [self pathForFileName:nameOfFile];
|
NSString *pathForFile = [self pathForFileName:nameOfFile];
|
||||||
if (!pathForFile) {
|
if (!pathForFile) {
|
||||||
NSString *errorString =
|
NSString *errorString = [NSString
|
||||||
[NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
|
stringWithFormat:@"File %@ not found in bundle", nameOfFile];
|
||||||
NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
|
NSError *error = [NSError
|
||||||
|
errorWithDomain:kRTCFileVideoCapturerErrorDomain
|
||||||
code:RTCFileVideoCapturerErrorCode_FileNotFound
|
code:RTCFileVideoCapturerErrorCode_FileNotFound
|
||||||
userInfo:@{NSUnderlyingErrorKey : errorString}];
|
userInfo:@{NSUnderlyingErrorKey : errorString}];
|
||||||
errorBlock(error);
|
errorBlock(error);
|
||||||
@ -90,10 +94,12 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
NSDictionary *options = @{
|
NSDictionary *options = @{
|
||||||
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
|
(NSString *)kCVPixelBufferPixelFormatTypeKey :
|
||||||
|
@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
|
||||||
};
|
};
|
||||||
_outTrack =
|
_outTrack =
|
||||||
[[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
|
[[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject
|
||||||
|
outputSettings:options];
|
||||||
[_reader addOutput:_outTrack];
|
[_reader addOutput:_outTrack];
|
||||||
|
|
||||||
[_reader startReading];
|
[_reader startReading];
|
||||||
@ -113,8 +119,8 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
NSString *path =
|
NSString *path = [[NSBundle mainBundle] pathForResource:nameComponents[0]
|
||||||
[[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
|
ofType:nameComponents[1]];
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,7 +153,8 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
[self readNextBuffer];
|
[self readNextBuffer];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
|
||||||
|
!CMSampleBufferIsValid(sampleBuffer) ||
|
||||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||||
CFRelease(sampleBuffer);
|
CFRelease(sampleBuffer);
|
||||||
[self readNextBuffer];
|
[self readNextBuffer];
|
||||||
@ -158,15 +165,19 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
|
||||||
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
CMTime presentationTime =
|
||||||
|
CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
|
||||||
Float64 presentationDifference =
|
Float64 presentationDifference =
|
||||||
CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
|
CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
|
||||||
_lastPresentationTime = presentationTime;
|
_lastPresentationTime = presentationTime;
|
||||||
int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
|
int64_t presentationDifferenceRound =
|
||||||
|
lroundf(presentationDifference * NSEC_PER_SEC);
|
||||||
|
|
||||||
__block dispatch_source_t timer = [self createStrictTimer];
|
__block dispatch_source_t timer = [self createStrictTimer];
|
||||||
// Strict timer that will fire `presentationDifferenceRound` ns from now and never again.
|
// Strict timer that will fire `presentationDifferenceRound` ns from now and
|
||||||
dispatch_source_set_timer(timer,
|
// never again.
|
||||||
|
dispatch_source_set_timer(
|
||||||
|
timer,
|
||||||
dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
|
dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
|
||||||
DISPATCH_TIME_FOREVER,
|
DISPATCH_TIME_FOREVER,
|
||||||
0);
|
0);
|
||||||
@ -177,14 +188,16 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||||
if (!pixelBuffer) {
|
if (!pixelBuffer) {
|
||||||
CFRelease(sampleBuffer);
|
CFRelease(sampleBuffer);
|
||||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
dispatch_async(
|
||||||
|
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
||||||
[self readNextBuffer];
|
[self readNextBuffer];
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
||||||
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
|
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
|
||||||
|
initWithPixelBuffer:pixelBuffer];
|
||||||
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
|
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
|
||||||
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
|
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
|
||||||
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
|
||||||
@ -193,7 +206,8 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
|||||||
timeStampNs:timeStampNs];
|
timeStampNs:timeStampNs];
|
||||||
CFRelease(sampleBuffer);
|
CFRelease(sampleBuffer);
|
||||||
|
|
||||||
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
dispatch_async(
|
||||||
|
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
|
||||||
[self readNextBuffer];
|
[self readNextBuffer];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -58,7 +58,8 @@
|
|||||||
if ([[self class] isMetalAvailable]) {
|
if ([[self class] isMetalAvailable]) {
|
||||||
_metalView = [[MTKView alloc] initWithFrame:self.bounds];
|
_metalView = [[MTKView alloc] initWithFrame:self.bounds];
|
||||||
[self addSubview:_metalView];
|
[self addSubview:_metalView];
|
||||||
_metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
|
_metalView.layerContentsPlacement =
|
||||||
|
NSViewLayerContentsPlacementScaleProportionallyToFit;
|
||||||
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
|
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
|
||||||
_metalView.framebufferOnly = YES;
|
_metalView.framebufferOnly = YES;
|
||||||
_metalView.delegate = self;
|
_metalView.delegate = self;
|
||||||
|
|||||||
@ -22,8 +22,9 @@
|
|||||||
#import "RTCMTLNV12Renderer.h"
|
#import "RTCMTLNV12Renderer.h"
|
||||||
#import "RTCMTLRGBRenderer.h"
|
#import "RTCMTLRGBRenderer.h"
|
||||||
|
|
||||||
// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
|
// To avoid unreconized symbol linker errors, we're taking advantage of the objc
|
||||||
// Linking errors occur when compiling for architectures that don't support Metal.
|
// runtime. Linking errors occur when compiling for architectures that don't
|
||||||
|
// support Metal.
|
||||||
#define MTKViewClass NSClassFromString(@"MTKView")
|
#define MTKViewClass NSClassFromString(@"MTKView")
|
||||||
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
|
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
|
||||||
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
||||||
@ -136,7 +137,8 @@
|
|||||||
#pragma mark - MTKViewDelegate methods
|
#pragma mark - MTKViewDelegate methods
|
||||||
|
|
||||||
- (void)drawInMTKView:(nonnull MTKView *)view {
|
- (void)drawInMTKView:(nonnull MTKView *)view {
|
||||||
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
|
NSAssert(view == self.metalView,
|
||||||
|
@"Receiving draw callbacks from foreign instance.");
|
||||||
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
|
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame;
|
||||||
// Skip rendering if we've already rendered this frame.
|
// Skip rendering if we've already rendered this frame.
|
||||||
if (!videoFrame || videoFrame.width <= 0 || videoFrame.height <= 0 ||
|
if (!videoFrame || videoFrame.width <= 0 || videoFrame.height <= 0 ||
|
||||||
@ -149,10 +151,14 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
RTCMTLRenderer *renderer;
|
RTCMTLRenderer *renderer;
|
||||||
if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
|
if ([videoFrame.buffer
|
||||||
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
|
isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
|
||||||
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
|
||||||
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
|
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer;
|
||||||
|
const OSType pixelFormat =
|
||||||
|
CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
|
||||||
|
if (pixelFormat == kCVPixelFormatType_32BGRA ||
|
||||||
|
pixelFormat == kCVPixelFormatType_32ARGB) {
|
||||||
if (!self.rendererRGB) {
|
if (!self.rendererRGB) {
|
||||||
self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
|
self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer];
|
||||||
if (![self.rendererRGB addRenderingDestination:self.metalView]) {
|
if (![self.rendererRGB addRenderingDestination:self.metalView]) {
|
||||||
@ -222,8 +228,8 @@
|
|||||||
CGSize videoFrameSize = self.videoFrameSize;
|
CGSize videoFrameSize = self.videoFrameSize;
|
||||||
RTCVideoRotation frameRotation = [self frameRotation];
|
RTCVideoRotation frameRotation = [self frameRotation];
|
||||||
|
|
||||||
BOOL useLandscape =
|
BOOL useLandscape = (frameRotation == RTCVideoRotation_0) ||
|
||||||
(frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
|
(frameRotation == RTCVideoRotation_180);
|
||||||
BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
|
BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
|
||||||
(self.videoFrame.rotation == RTCVideoRotation_180);
|
(self.videoFrame.rotation == RTCVideoRotation_180);
|
||||||
|
|
||||||
|
|||||||
@ -30,13 +30,13 @@
|
|||||||
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
|
// error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is
|
||||||
// the method that will trigger the binding of the render
|
// the method that will trigger the binding of the render
|
||||||
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
|
// buffer. Because the standard behaviour of -[UIView setNeedsDisplay]
|
||||||
// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains
|
// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView)
|
||||||
// its own `isDirty` flag.
|
// maintains its own `isDirty` flag.
|
||||||
|
|
||||||
@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
|
@interface RTC_OBJC_TYPE (RTCEAGLVideoView)
|
||||||
()<GLKViewDelegate>
|
()<GLKViewDelegate>
|
||||||
// `videoFrame` is set when we receive a frame from a worker thread and is read
|
// `videoFrame` is set when we receive a frame from a worker thread and is
|
||||||
// from the display link callback so atomicity is required.
|
// read from the display link callback so atomicity is required.
|
||||||
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
|
@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame;
|
||||||
@property(nonatomic, readonly) GLKView *glkView;
|
@property(nonatomic, readonly) GLKView *glkView;
|
||||||
@end
|
@end
|
||||||
@ -68,7 +68,8 @@
|
|||||||
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
|
return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
|
- (instancetype)initWithFrame:(CGRect)frame
|
||||||
|
shader:(id<RTC_OBJC_TYPE(RTCVideoViewShading)>)shader {
|
||||||
self = [super initWithFrame:frame];
|
self = [super initWithFrame:frame];
|
||||||
if (self) {
|
if (self) {
|
||||||
_shader = shader;
|
_shader = shader;
|
||||||
@ -104,8 +105,7 @@
|
|||||||
_glContext = glContext;
|
_glContext = glContext;
|
||||||
|
|
||||||
// GLKView manages a framebuffer for us.
|
// GLKView manages a framebuffer for us.
|
||||||
_glkView = [[GLKView alloc] initWithFrame:CGRectZero
|
_glkView = [[GLKView alloc] initWithFrame:CGRectZero context:_glContext];
|
||||||
context:_glContext];
|
|
||||||
_glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
|
_glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
|
||||||
_glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
|
_glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone;
|
||||||
_glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
|
_glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone;
|
||||||
@ -136,7 +136,8 @@
|
|||||||
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
|
RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf;
|
||||||
[strongSelf displayLinkTimerDidFire];
|
[strongSelf displayLinkTimerDidFire];
|
||||||
}];
|
}];
|
||||||
if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) {
|
if ([[UIApplication sharedApplication] applicationState] ==
|
||||||
|
UIApplicationStateActive) {
|
||||||
[self setupGL];
|
[self setupGL];
|
||||||
}
|
}
|
||||||
return YES;
|
return YES;
|
||||||
@ -198,7 +199,8 @@
|
|||||||
glClear(GL_COLOR_BUFFER_BIT);
|
glClear(GL_COLOR_BUFFER_BIT);
|
||||||
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
|
if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
|
||||||
if (!_nv12TextureCache) {
|
if (!_nv12TextureCache) {
|
||||||
_nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext];
|
_nv12TextureCache =
|
||||||
|
[[RTCNV12TextureCache alloc] initWithContext:_glContext];
|
||||||
}
|
}
|
||||||
if (_nv12TextureCache) {
|
if (_nv12TextureCache) {
|
||||||
[_nv12TextureCache uploadFrameToTextures:frame];
|
[_nv12TextureCache uploadFrameToTextures:frame];
|
||||||
@ -213,7 +215,8 @@
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!_i420TextureCache) {
|
if (!_i420TextureCache) {
|
||||||
_i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext];
|
_i420TextureCache =
|
||||||
|
[[RTCI420TextureCache alloc] initWithContext:_glContext];
|
||||||
}
|
}
|
||||||
[_i420TextureCache uploadFrameToTextures:frame];
|
[_i420TextureCache uploadFrameToTextures:frame];
|
||||||
[_shader applyShadingForFrameWithWidth:frame.width
|
[_shader applyShadingForFrameWithWidth:frame.width
|
||||||
|
|||||||
@ -31,14 +31,15 @@
|
|||||||
- (instancetype)initWithContext:(EAGLContext *)context {
|
- (instancetype)initWithContext:(EAGLContext *)context {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
CVReturn ret = CVOpenGLESTextureCacheCreate(
|
CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault,
|
||||||
kCFAllocatorDefault, NULL,
|
NULL,
|
||||||
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
|
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
|
||||||
context,
|
context,
|
||||||
#else
|
#else
|
||||||
(__bridge void *)context,
|
(__bridge void *)context,
|
||||||
#endif
|
#endif
|
||||||
NULL, &_textureCache);
|
NULL,
|
||||||
|
&_textureCache);
|
||||||
if (ret != kCVReturnSuccess) {
|
if (ret != kCVReturnSuccess) {
|
||||||
self = nil;
|
self = nil;
|
||||||
}
|
}
|
||||||
@ -57,9 +58,19 @@
|
|||||||
CFRelease(*textureOut);
|
CFRelease(*textureOut);
|
||||||
*textureOut = nil;
|
*textureOut = nil;
|
||||||
}
|
}
|
||||||
CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(
|
CVReturn ret =
|
||||||
kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, pixelFormat, width,
|
CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
|
||||||
height, pixelFormat, GL_UNSIGNED_BYTE, planeIndex, textureOut);
|
_textureCache,
|
||||||
|
pixelBuffer,
|
||||||
|
NULL,
|
||||||
|
GL_TEXTURE_2D,
|
||||||
|
pixelFormat,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
pixelFormat,
|
||||||
|
GL_UNSIGNED_BYTE,
|
||||||
|
planeIndex,
|
||||||
|
textureOut);
|
||||||
if (ret != kCVReturnSuccess) {
|
if (ret != kCVReturnSuccess) {
|
||||||
if (*textureOut) {
|
if (*textureOut) {
|
||||||
CFRelease(*textureOut);
|
CFRelease(*textureOut);
|
||||||
@ -80,7 +91,8 @@
|
|||||||
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
|
- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
|
||||||
NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
|
NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]],
|
||||||
@"frame must be CVPixelBuffer backed");
|
@"frame must be CVPixelBuffer backed");
|
||||||
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
|
||||||
|
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
|
||||||
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
|
CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer;
|
||||||
return [self loadTexture:&_yTextureRef
|
return [self loadTexture:&_yTextureRef
|
||||||
pixelBuffer:pixelBuffer
|
pixelBuffer:pixelBuffer
|
||||||
|
|||||||
@ -30,7 +30,8 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:kRTCVideoCodecH264Name
|
||||||
parameters:constrainedHighParams];
|
parameters:constrainedHighParams];
|
||||||
|
|
||||||
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
|
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
|
||||||
@ -39,11 +40,12 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:kRTCVideoCodecH264Name
|
||||||
parameters:constrainedBaselineParams];
|
parameters:constrainedBaselineParams];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
|
alloc] initWithName:kRTCVideoCodecVp8Name];
|
||||||
|
|
||||||
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
|
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
|
||||||
constrainedHighInfo,
|
constrainedHighInfo,
|
||||||
@ -52,18 +54,20 @@
|
|||||||
] mutableCopy];
|
] mutableCopy];
|
||||||
|
|
||||||
if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
|
if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) {
|
||||||
[result
|
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
|
initWithName:kRTCVideoCodecVp9Name]];
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
|
#if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY)
|
||||||
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
|
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:kRTCVideoCodecAv1Name]];
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
||||||
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
|
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
|
||||||
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
|
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
|
||||||
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
|
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
|
||||||
|
|||||||
@ -32,7 +32,8 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:kRTCVideoCodecH264Name
|
||||||
parameters:constrainedHighParams];
|
parameters:constrainedHighParams];
|
||||||
|
|
||||||
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
|
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
|
||||||
@ -41,11 +42,12 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:kRTCVideoCodecH264Name
|
||||||
parameters:constrainedBaselineParams];
|
parameters:constrainedBaselineParams];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name];
|
alloc] initWithName:kRTCVideoCodecVp8Name];
|
||||||
|
|
||||||
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
|
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *result = [@[
|
||||||
constrainedHighInfo,
|
constrainedHighInfo,
|
||||||
@ -54,18 +56,20 @@
|
|||||||
] mutableCopy];
|
] mutableCopy];
|
||||||
|
|
||||||
if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
|
if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) {
|
||||||
[result
|
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]];
|
initWithName:kRTCVideoCodecVp9Name]];
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
|
#if defined(RTC_USE_LIBAOM_AV1_ENCODER)
|
||||||
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]];
|
[result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:kRTCVideoCodecAv1Name]];
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
||||||
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
|
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
|
||||||
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
|
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
|
||||||
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
|
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
|
||||||
@ -88,7 +92,8 @@
|
|||||||
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
|
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
|
||||||
[[[self class] supportedCodecs] mutableCopy];
|
[[[self class] supportedCodecs] mutableCopy];
|
||||||
|
|
||||||
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs = [NSMutableArray array];
|
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *orderedCodecs =
|
||||||
|
[NSMutableArray array];
|
||||||
NSUInteger index = [codecs indexOfObject:self.preferredCodec];
|
NSUInteger index = [codecs indexOfObject:self.preferredCodec];
|
||||||
if (index != NSNotFound) {
|
if (index != NSNotFound) {
|
||||||
[orderedCodecs addObject:[codecs objectAtIndex:index]];
|
[orderedCodecs addObject:[codecs objectAtIndex:index]];
|
||||||
|
|||||||
@ -16,7 +16,8 @@
|
|||||||
@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
|
@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264)
|
||||||
|
|
||||||
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
|
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
|
||||||
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
|
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
|
||||||
|
[NSMutableArray array];
|
||||||
NSString *codecName = kRTCVideoCodecH264Name;
|
NSString *codecName = kRTCVideoCodecH264Name;
|
||||||
|
|
||||||
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
|
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
|
||||||
@ -25,7 +26,8 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:codecName
|
||||||
parameters:constrainedHighParams];
|
parameters:constrainedHighParams];
|
||||||
[codecs addObject:constrainedHighInfo];
|
[codecs addObject:constrainedHighInfo];
|
||||||
|
|
||||||
@ -35,14 +37,16 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:codecName
|
||||||
parameters:constrainedBaselineParams];
|
parameters:constrainedBaselineParams];
|
||||||
[codecs addObject:constrainedBaselineInfo];
|
[codecs addObject:constrainedBaselineInfo];
|
||||||
|
|
||||||
return [codecs copy];
|
return [codecs copy];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
- (id<RTC_OBJC_TYPE(RTCVideoDecoder)>)createDecoder:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
||||||
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
|
return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -16,7 +16,8 @@
|
|||||||
@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
|
@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264)
|
||||||
|
|
||||||
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
|
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs {
|
||||||
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs = [NSMutableArray array];
|
NSMutableArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *codecs =
|
||||||
|
[NSMutableArray array];
|
||||||
NSString *codecName = kRTCVideoCodecH264Name;
|
NSString *codecName = kRTCVideoCodecH264Name;
|
||||||
|
|
||||||
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
|
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
|
||||||
@ -25,7 +26,8 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:codecName
|
||||||
parameters:constrainedHighParams];
|
parameters:constrainedHighParams];
|
||||||
[codecs addObject:constrainedHighInfo];
|
[codecs addObject:constrainedHighInfo];
|
||||||
|
|
||||||
@ -35,14 +37,16 @@
|
|||||||
@"packetization-mode" : @"1",
|
@"packetization-mode" : @"1",
|
||||||
};
|
};
|
||||||
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc]
|
||||||
|
initWithName:codecName
|
||||||
parameters:constrainedBaselineParams];
|
parameters:constrainedBaselineParams];
|
||||||
[codecs addObject:constrainedBaselineInfo];
|
[codecs addObject:constrainedBaselineInfo];
|
||||||
|
|
||||||
return [codecs copy];
|
return [codecs copy];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
- (id<RTC_OBJC_TYPE(RTCVideoEncoder)>)createEncoder:
|
||||||
|
(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info {
|
||||||
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
|
return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -51,15 +51,18 @@
|
|||||||
[RTC_OBJC_TYPE(RTCDispatcher)
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeMain
|
dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||||
block:^{
|
block:^{
|
||||||
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
|
AVCaptureVideoPreviewLayer *previewLayer =
|
||||||
|
[self previewLayer];
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher)
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
previewLayer.session = captureSession;
|
previewLayer.session = captureSession;
|
||||||
[RTC_OBJC_TYPE(RTCDispatcher)
|
[RTC_OBJC_TYPE(RTCDispatcher)
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeMain
|
dispatchAsyncOnType:
|
||||||
|
RTCDispatcherTypeMain
|
||||||
block:^{
|
block:^{
|
||||||
[self setCorrectVideoOrientation];
|
[self
|
||||||
|
setCorrectVideoOrientation];
|
||||||
}];
|
}];
|
||||||
}];
|
}];
|
||||||
}];
|
}];
|
||||||
@ -97,21 +100,24 @@
|
|||||||
previewLayer.connection.videoOrientation =
|
previewLayer.connection.videoOrientation =
|
||||||
AVCaptureVideoOrientationPortrait;
|
AVCaptureVideoOrientationPortrait;
|
||||||
}
|
}
|
||||||
// If device orientation switches to FaceUp or FaceDown, don't change video orientation.
|
// If device orientation switches to FaceUp or FaceDown, don't change video
|
||||||
|
// orientation.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (void)addOrientationObserver {
|
- (void)addOrientationObserver {
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
[[NSNotificationCenter defaultCenter]
|
||||||
|
addObserver:self
|
||||||
selector:@selector(orientationChanged:)
|
selector:@selector(orientationChanged:)
|
||||||
name:UIDeviceOrientationDidChangeNotification
|
name:UIDeviceOrientationDidChangeNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)removeOrientationObserver {
|
- (void)removeOrientationObserver {
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self
|
[[NSNotificationCenter defaultCenter]
|
||||||
|
removeObserver:self
|
||||||
name:UIDeviceOrientationDidChangeNotification
|
name:UIDeviceOrientationDidChangeNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
}
|
}
|
||||||
|
|||||||
@ -20,13 +20,11 @@ static dispatch_queue_t kNetworkMonitorQueue = nil;
|
|||||||
static dispatch_once_t onceToken;
|
static dispatch_once_t onceToken;
|
||||||
dispatch_once(&onceToken, ^{
|
dispatch_once(&onceToken, ^{
|
||||||
kAudioSessionQueue = dispatch_queue_create(
|
kAudioSessionQueue = dispatch_queue_create(
|
||||||
"org.webrtc.RTCDispatcherAudioSession",
|
"org.webrtc.RTCDispatcherAudioSession", DISPATCH_QUEUE_SERIAL);
|
||||||
DISPATCH_QUEUE_SERIAL);
|
|
||||||
kCaptureSessionQueue = dispatch_queue_create(
|
kCaptureSessionQueue = dispatch_queue_create(
|
||||||
"org.webrtc.RTCDispatcherCaptureSession",
|
"org.webrtc.RTCDispatcherCaptureSession", DISPATCH_QUEUE_SERIAL);
|
||||||
DISPATCH_QUEUE_SERIAL);
|
kNetworkMonitorQueue = dispatch_queue_create(
|
||||||
kNetworkMonitorQueue =
|
"org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL);
|
||||||
dispatch_queue_create("org.webrtc.RTCDispatcherNetworkMonitor", DISPATCH_QUEUE_SERIAL);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -39,10 +37,12 @@ static dispatch_queue_t kNetworkMonitorQueue = nil;
|
|||||||
+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
|
+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
|
||||||
dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
|
dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
|
||||||
const char* targetLabel = dispatch_queue_get_label(targetQueue);
|
const char* targetLabel = dispatch_queue_get_label(targetQueue);
|
||||||
const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
|
const char* currentLabel =
|
||||||
|
dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
|
||||||
|
|
||||||
NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
|
NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
|
||||||
NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
|
NSAssert(strlen(currentLabel) > 0,
|
||||||
|
@"Label is required for the current queue.");
|
||||||
|
|
||||||
return strcmp(targetLabel, currentLabel) == 0;
|
return strcmp(targetLabel, currentLabel) == 0;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -37,7 +37,8 @@
|
|||||||
- (void)testCallbackGetsCalledForAppropriateLevel {
|
- (void)testCallbackGetsCalledForAppropriateLevel {
|
||||||
self.logger.severity = RTCLoggingSeverityWarning;
|
self.logger.severity = RTCLoggingSeverityWarning;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"callbackWarning"];
|
||||||
|
|
||||||
[self.logger start:^(NSString *message) {
|
[self.logger start:^(NSString *message) {
|
||||||
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
||||||
@ -52,10 +53,11 @@
|
|||||||
- (void)testCallbackWithSeverityGetsCalledForAppropriateLevel {
|
- (void)testCallbackWithSeverityGetsCalledForAppropriateLevel {
|
||||||
self.logger.severity = RTCLoggingSeverityWarning;
|
self.logger.severity = RTCLoggingSeverityWarning;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"callbackWarning"];
|
||||||
|
|
||||||
[self.logger
|
[self.logger startWithMessageAndSeverityHandler:^(
|
||||||
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
|
NSString *message, RTCLoggingSeverity severity) {
|
||||||
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
||||||
XCTAssertEqual(severity, RTCLoggingSeverityError);
|
XCTAssertEqual(severity, RTCLoggingSeverityError);
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
@ -69,7 +71,8 @@
|
|||||||
- (void)testCallbackDoesNotGetCalledForOtherLevels {
|
- (void)testCallbackDoesNotGetCalledForOtherLevels {
|
||||||
self.logger.severity = RTCLoggingSeverityError;
|
self.logger.severity = RTCLoggingSeverityError;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"callbackError"];
|
||||||
|
|
||||||
[self.logger start:^(NSString *message) {
|
[self.logger start:^(NSString *message) {
|
||||||
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
||||||
@ -86,10 +89,11 @@
|
|||||||
- (void)testCallbackWithSeverityDoesNotGetCalledForOtherLevels {
|
- (void)testCallbackWithSeverityDoesNotGetCalledForOtherLevels {
|
||||||
self.logger.severity = RTCLoggingSeverityError;
|
self.logger.severity = RTCLoggingSeverityError;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackError"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"callbackError"];
|
||||||
|
|
||||||
[self.logger
|
[self.logger startWithMessageAndSeverityHandler:^(
|
||||||
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
|
NSString *message, RTCLoggingSeverity severity) {
|
||||||
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
||||||
XCTAssertEqual(severity, RTCLoggingSeverityError);
|
XCTAssertEqual(severity, RTCLoggingSeverityError);
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
@ -105,7 +109,8 @@
|
|||||||
- (void)testCallbackDoesNotgetCalledForSeverityNone {
|
- (void)testCallbackDoesNotgetCalledForSeverityNone {
|
||||||
self.logger.severity = RTCLoggingSeverityNone;
|
self.logger.severity = RTCLoggingSeverityNone;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"unexpectedCallback"];
|
||||||
|
|
||||||
[self.logger start:^(NSString *message) {
|
[self.logger start:^(NSString *message) {
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
@ -117,17 +122,19 @@
|
|||||||
RTCLogError("Horrible error");
|
RTCLogError("Horrible error");
|
||||||
|
|
||||||
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
||||||
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
|
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
|
||||||
|
timeout:1.0];
|
||||||
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)testCallbackWithSeverityDoesNotgetCalledForSeverityNone {
|
- (void)testCallbackWithSeverityDoesNotgetCalledForSeverityNone {
|
||||||
self.logger.severity = RTCLoggingSeverityNone;
|
self.logger.severity = RTCLoggingSeverityNone;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"unexpectedCallback"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"unexpectedCallback"];
|
||||||
|
|
||||||
[self.logger
|
[self.logger startWithMessageAndSeverityHandler:^(
|
||||||
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity severity) {
|
NSString *message, RTCLoggingSeverity severity) {
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
XCTAssertTrue(false);
|
XCTAssertTrue(false);
|
||||||
}];
|
}];
|
||||||
@ -137,7 +144,8 @@
|
|||||||
RTCLogError("Horrible error");
|
RTCLogError("Horrible error");
|
||||||
|
|
||||||
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
||||||
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
|
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
|
||||||
|
timeout:1.0];
|
||||||
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -154,7 +162,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)testStopCallbackLogger {
|
- (void)testStopCallbackLogger {
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"stopped"];
|
||||||
|
|
||||||
[self.logger start:^(NSString *message) {
|
[self.logger start:^(NSString *message) {
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
@ -165,15 +174,17 @@
|
|||||||
RTCLogInfo("Just some info");
|
RTCLogInfo("Just some info");
|
||||||
|
|
||||||
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
||||||
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
|
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
|
||||||
|
timeout:1.0];
|
||||||
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)testStopCallbackWithSeverityLogger {
|
- (void)testStopCallbackWithSeverityLogger {
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"stopped"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"stopped"];
|
||||||
|
|
||||||
[self.logger
|
[self.logger startWithMessageAndSeverityHandler:^(
|
||||||
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
|
NSString *message, RTCLoggingSeverity loggingServerity) {
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
@ -182,12 +193,14 @@
|
|||||||
RTCLogInfo("Just some info");
|
RTCLogInfo("Just some info");
|
||||||
|
|
||||||
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
||||||
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
|
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
|
||||||
|
timeout:1.0];
|
||||||
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)testDestroyingCallbackLogger {
|
- (void)testDestroyingCallbackLogger {
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"destroyed"];
|
||||||
|
|
||||||
[self.logger start:^(NSString *message) {
|
[self.logger start:^(NSString *message) {
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
@ -198,15 +211,17 @@
|
|||||||
RTCLogInfo("Just some info");
|
RTCLogInfo("Just some info");
|
||||||
|
|
||||||
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
||||||
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
|
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
|
||||||
|
timeout:1.0];
|
||||||
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)testDestroyingCallbackWithSeverityLogger {
|
- (void)testDestroyingCallbackWithSeverityLogger {
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"destroyed"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"destroyed"];
|
||||||
|
|
||||||
[self.logger
|
[self.logger startWithMessageAndSeverityHandler:^(
|
||||||
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
|
NSString *message, RTCLoggingSeverity loggingServerity) {
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
@ -215,17 +230,19 @@
|
|||||||
RTCLogInfo("Just some info");
|
RTCLogInfo("Just some info");
|
||||||
|
|
||||||
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
XCTWaiter *waiter = [[XCTWaiter alloc] init];
|
||||||
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ] timeout:1.0];
|
XCTWaiterResult result = [waiter waitForExpectations:@[ callbackExpectation ]
|
||||||
|
timeout:1.0];
|
||||||
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
XCTAssertEqual(result, XCTWaiterResultTimedOut);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)testCallbackWithSeverityLoggerCannotStartTwice {
|
- (void)testCallbackWithSeverityLoggerCannotStartTwice {
|
||||||
self.logger.severity = RTCLoggingSeverityWarning;
|
self.logger.severity = RTCLoggingSeverityWarning;
|
||||||
|
|
||||||
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"callbackWarning"];
|
XCTestExpectation *callbackExpectation =
|
||||||
|
[self expectationWithDescription:@"callbackWarning"];
|
||||||
|
|
||||||
[self.logger
|
[self.logger startWithMessageAndSeverityHandler:^(
|
||||||
startWithMessageAndSeverityHandler:^(NSString *message, RTCLoggingSeverity loggingServerity) {
|
NSString *message, RTCLoggingSeverity loggingServerity) {
|
||||||
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
XCTAssertTrue([message hasSuffix:@"Horrible error\n"]);
|
||||||
XCTAssertEqual(loggingServerity, RTCLoggingSeverityError);
|
XCTAssertEqual(loggingServerity, RTCLoggingSeverityError);
|
||||||
[callbackExpectation fulfill];
|
[callbackExpectation fulfill];
|
||||||
|
|||||||
@ -23,8 +23,10 @@
|
|||||||
NSString *fullPath = [NSString stringWithFormat:@"%s", __FILE__];
|
NSString *fullPath = [NSString stringWithFormat:@"%s", __FILE__];
|
||||||
NSString *extension = fullPath.pathExtension;
|
NSString *extension = fullPath.pathExtension;
|
||||||
|
|
||||||
XCTAssertEqualObjects(
|
XCTAssertEqualObjects(@"m",
|
||||||
@"m", extension, @"Do not rename %@. It should end with .m.", fullPath.lastPathComponent);
|
extension,
|
||||||
|
@"Do not rename %@. It should end with .m.",
|
||||||
|
fullPath.lastPathComponent);
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|||||||
@ -23,19 +23,21 @@ static NSString *level31ConstrainedBaseline = @"42e01f";
|
|||||||
|
|
||||||
- (void)testInitWithString {
|
- (void)testInitWithString {
|
||||||
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
||||||
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh];
|
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
|
||||||
|
initWithHexString:level31ConstrainedHigh];
|
||||||
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
|
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh);
|
||||||
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
|
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
|
||||||
|
|
||||||
profileLevelId =
|
profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
|
||||||
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline];
|
initWithHexString:level31ConstrainedBaseline];
|
||||||
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
|
XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline);
|
||||||
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
|
XCTAssertEqual(profileLevelId.level, RTCH264Level3_1);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)testInitWithProfileAndLevel {
|
- (void)testInitWithProfileAndLevel {
|
||||||
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
|
||||||
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh
|
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc]
|
||||||
|
initWithProfile:RTCH264ProfileConstrainedHigh
|
||||||
level:RTCH264Level3_1];
|
level:RTCH264Level3_1];
|
||||||
XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
|
XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh);
|
||||||
|
|
||||||
|
|||||||
@ -78,21 +78,26 @@ static size_t kBufferHeight = 200;
|
|||||||
nil,
|
nil,
|
||||||
&pixelBufferRef);
|
&pixelBufferRef);
|
||||||
OCMStub([frameMock buffer])
|
OCMStub([frameMock buffer])
|
||||||
.andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]);
|
.andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
|
||||||
|
initWithPixelBuffer:pixelBufferRef]);
|
||||||
} else {
|
} else {
|
||||||
OCMStub([frameMock buffer])
|
OCMStub([frameMock buffer])
|
||||||
.andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:kBufferWidth
|
.andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc]
|
||||||
|
initWithWidth:kBufferWidth
|
||||||
height:kBufferHeight]);
|
height:kBufferHeight]);
|
||||||
}
|
}
|
||||||
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) width]).andReturn(kBufferWidth);
|
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) width])
|
||||||
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) height]).andReturn(kBufferHeight);
|
.andReturn(kBufferWidth);
|
||||||
|
OCMStub([((RTC_OBJC_TYPE(RTCVideoFrame) *)frameMock) height])
|
||||||
|
.andReturn(kBufferHeight);
|
||||||
OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
|
OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX));
|
||||||
return frameMock;
|
return frameMock;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id)rendererMockWithSuccessfulSetup:(BOOL)success {
|
- (id)rendererMockWithSuccessfulSetup:(BOOL)success {
|
||||||
id rendererMock = OCMClassMock([RTCMTLRenderer class]);
|
id rendererMock = OCMClassMock([RTCMTLRenderer class]);
|
||||||
OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success);
|
OCMStub([rendererMock addRenderingDestination:[OCMArg any]])
|
||||||
|
.andReturn(success);
|
||||||
return rendererMock;
|
return rendererMock;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,8 +129,8 @@ static size_t kBufferHeight = 200;
|
|||||||
// given
|
// given
|
||||||
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
|
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
|
self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]);
|
||||||
|
|
||||||
[[self.frameMock reject] buffer];
|
[[self.frameMock reject] buffer];
|
||||||
@ -148,11 +153,12 @@ static size_t kBufferHeight = 200;
|
|||||||
self.frameMock = [self frameMockWithCVPixelBuffer:NO];
|
self.frameMock = [self frameMockWithCVPixelBuffer:NO];
|
||||||
|
|
||||||
OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
|
OCMExpect([self.rendererI420Mock drawFrame:self.frameMock]);
|
||||||
OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock);
|
OCMExpect([self.classMock createI420Renderer])
|
||||||
|
.andReturn(self.rendererI420Mock);
|
||||||
[[self.classMock reject] createNV12Renderer];
|
[[self.classMock reject] createNV12Renderer];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
|
|
||||||
// when
|
// when
|
||||||
[realView renderFrame:self.frameMock];
|
[realView renderFrame:self.frameMock];
|
||||||
@ -170,11 +176,12 @@ static size_t kBufferHeight = 200;
|
|||||||
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
||||||
|
|
||||||
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
||||||
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
|
OCMExpect([self.classMock createNV12Renderer])
|
||||||
|
.andReturn(self.rendererNV12Mock);
|
||||||
[[self.classMock reject] createI420Renderer];
|
[[self.classMock reject] createI420Renderer];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
|
|
||||||
// when
|
// when
|
||||||
[realView renderFrame:self.frameMock];
|
[realView renderFrame:self.frameMock];
|
||||||
@ -191,11 +198,12 @@ static size_t kBufferHeight = 200;
|
|||||||
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
||||||
|
|
||||||
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
||||||
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
|
OCMExpect([self.classMock createNV12Renderer])
|
||||||
|
.andReturn(self.rendererNV12Mock);
|
||||||
[[self.classMock reject] createI420Renderer];
|
[[self.classMock reject] createI420Renderer];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
|
|
||||||
[realView renderFrame:self.frameMock];
|
[realView renderFrame:self.frameMock];
|
||||||
[realView drawInMTKView:realView.metalView];
|
[realView drawInMTKView:realView.metalView];
|
||||||
@ -203,10 +211,12 @@ static size_t kBufferHeight = 200;
|
|||||||
[self.classMock verify];
|
[self.classMock verify];
|
||||||
|
|
||||||
// Recreate view.
|
// Recreate view.
|
||||||
realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc]
|
||||||
|
initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
||||||
// View hould reinit renderer.
|
// View hould reinit renderer.
|
||||||
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
|
OCMExpect([self.classMock createNV12Renderer])
|
||||||
|
.andReturn(self.rendererNV12Mock);
|
||||||
|
|
||||||
[realView renderFrame:self.frameMock];
|
[realView renderFrame:self.frameMock];
|
||||||
[realView drawInMTKView:realView.metalView];
|
[realView drawInMTKView:realView.metalView];
|
||||||
@ -220,11 +230,12 @@ static size_t kBufferHeight = 200;
|
|||||||
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
||||||
|
|
||||||
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
||||||
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
|
OCMExpect([self.classMock createNV12Renderer])
|
||||||
|
.andReturn(self.rendererNV12Mock);
|
||||||
[[self.classMock reject] createI420Renderer];
|
[[self.classMock reject] createI420Renderer];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
[realView renderFrame:self.frameMock];
|
[realView renderFrame:self.frameMock];
|
||||||
[realView drawInMTKView:realView.metalView];
|
[realView drawInMTKView:realView.metalView];
|
||||||
|
|
||||||
@ -245,11 +256,12 @@ static size_t kBufferHeight = 200;
|
|||||||
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
self.frameMock = [self frameMockWithCVPixelBuffer:YES];
|
||||||
|
|
||||||
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]);
|
||||||
OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock);
|
OCMExpect([self.classMock createNV12Renderer])
|
||||||
|
.andReturn(self.rendererNV12Mock);
|
||||||
[[self.classMock reject] createI420Renderer];
|
[[self.classMock reject] createI420Renderer];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
[realView renderFrame:self.frameMock];
|
[realView renderFrame:self.frameMock];
|
||||||
[realView drawInMTKView:realView.metalView];
|
[realView drawInMTKView:realView.metalView];
|
||||||
|
|
||||||
@ -269,12 +281,13 @@ static size_t kBufferHeight = 200;
|
|||||||
- (void)testReportsSizeChangesToDelegate {
|
- (void)testReportsSizeChangesToDelegate {
|
||||||
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
|
OCMStub([self.classMock isMetalAvailable]).andReturn(YES);
|
||||||
|
|
||||||
id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
|
id delegateMock =
|
||||||
|
OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate)));
|
||||||
CGSize size = CGSizeMake(640, 480);
|
CGSize size = CGSizeMake(640, 480);
|
||||||
OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
|
OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]);
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView)
|
||||||
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
alloc] initWithFrame:CGRectMake(0, 0, 640, 480)];
|
||||||
realView.delegate = delegateMock;
|
realView.delegate = delegateMock;
|
||||||
[realView setSize:size];
|
[realView setSize:size];
|
||||||
|
|
||||||
@ -290,7 +303,8 @@ static size_t kBufferHeight = 200;
|
|||||||
createMetalView:CGRectZero];
|
createMetalView:CGRectZero];
|
||||||
OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
|
OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]);
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
|
RTC_OBJC_TYPE(RTCMTLVideoView) *realView =
|
||||||
|
[[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init];
|
||||||
[realView setVideoContentMode:UIViewContentModeScaleAspectFill];
|
[realView setVideoContentMode:UIViewContentModeScaleAspectFill];
|
||||||
|
|
||||||
OCMVerifyAll(metalKitView);
|
OCMVerifyAll(metalKitView);
|
||||||
|
|||||||
@ -44,7 +44,8 @@
|
|||||||
- (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
|
- (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame {
|
||||||
CVPixelBufferRef nullPixelBuffer = NULL;
|
CVPixelBufferRef nullPixelBuffer = NULL;
|
||||||
RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
|
RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer =
|
||||||
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer];
|
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
|
||||||
|
initWithPixelBuffer:nullPixelBuffer];
|
||||||
RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
|
RTC_OBJC_TYPE(RTCVideoFrame) *badFrame =
|
||||||
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
|
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer
|
||||||
rotation:RTCVideoRotation_0
|
rotation:RTCVideoRotation_0
|
||||||
|
|||||||
@ -32,7 +32,8 @@
|
|||||||
#import <XCTest/XCTest.h>
|
#import <XCTest/XCTest.h>
|
||||||
|
|
||||||
@interface MockVideoEncoderDecoderFactory
|
@interface MockVideoEncoderDecoderFactory
|
||||||
: NSObject <RTC_OBJC_TYPE (RTCVideoEncoderFactory), RTC_OBJC_TYPE (RTCVideoDecoderFactory)>
|
: NSObject <RTC_OBJC_TYPE (RTCVideoEncoderFactory),
|
||||||
|
RTC_OBJC_TYPE (RTCVideoDecoderFactory)>
|
||||||
- (instancetype)initWithSupportedCodecs:
|
- (instancetype)initWithSupportedCodecs:
|
||||||
(nonnull NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
|
(nonnull NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)supportedCodecs;
|
||||||
@end
|
@end
|
||||||
@ -73,10 +74,12 @@
|
|||||||
|
|
||||||
- (void)testPeerConnectionLifetime {
|
- (void)testPeerConnectionLifetime {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:@{}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
||||||
@ -84,8 +87,9 @@
|
|||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
peerConnection =
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
|
constraints:constraints
|
||||||
|
delegate:nil];
|
||||||
[peerConnection close];
|
[peerConnection close];
|
||||||
factory = nil;
|
factory = nil;
|
||||||
}
|
}
|
||||||
@ -114,9 +118,11 @@
|
|||||||
|
|
||||||
- (void)testDataChannelLifetime {
|
- (void)testDataChannelLifetime {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:@{}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
|
RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig =
|
||||||
[[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
|
[[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init];
|
||||||
@ -127,10 +133,11 @@
|
|||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
peerConnection =
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
|
constraints:constraints
|
||||||
dataChannel =
|
delegate:nil];
|
||||||
[peerConnection dataChannelForLabel:@"test_channel" configuration:dataChannelConfig];
|
dataChannel = [peerConnection dataChannelForLabel:@"test_channel"
|
||||||
|
configuration:dataChannelConfig];
|
||||||
XCTAssertNotNil(dataChannel);
|
XCTAssertNotNil(dataChannel);
|
||||||
[peerConnection close];
|
[peerConnection close];
|
||||||
peerConnection = nil;
|
peerConnection = nil;
|
||||||
@ -144,10 +151,12 @@
|
|||||||
|
|
||||||
- (void)testRTCRtpTransceiverLifetime {
|
- (void)testRTCRtpTransceiverLifetime {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:@{}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
||||||
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
||||||
@ -158,9 +167,11 @@
|
|||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
peerConnection =
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
|
constraints:contraints
|
||||||
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init];
|
delegate:nil];
|
||||||
|
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio
|
||||||
|
init:init];
|
||||||
XCTAssertNotNil(tranceiver);
|
XCTAssertNotNil(tranceiver);
|
||||||
[peerConnection close];
|
[peerConnection close];
|
||||||
peerConnection = nil;
|
peerConnection = nil;
|
||||||
@ -174,10 +185,12 @@
|
|||||||
|
|
||||||
- (void)testRTCRtpSenderLifetime {
|
- (void)testRTCRtpSenderLifetime {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
config.sdpSemantics = RTCSdpSemanticsPlanB;
|
config.sdpSemantics = RTCSdpSemanticsPlanB;
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:@{}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
||||||
@ -186,9 +199,11 @@
|
|||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
peerConnection =
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
[factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
|
constraints:constraints
|
||||||
sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"];
|
delegate:nil];
|
||||||
|
sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo
|
||||||
|
streamId:@"stream"];
|
||||||
XCTAssertNotNil(sender);
|
XCTAssertNotNil(sender);
|
||||||
[peerConnection close];
|
[peerConnection close];
|
||||||
peerConnection = nil;
|
peerConnection = nil;
|
||||||
@ -202,10 +217,12 @@
|
|||||||
|
|
||||||
- (void)testRTCRtpReceiverLifetime {
|
- (void)testRTCRtpReceiverLifetime {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
config.sdpSemantics = RTCSdpSemanticsPlanB;
|
config.sdpSemantics = RTCSdpSemanticsPlanB;
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:@{}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
||||||
@ -217,10 +234,14 @@
|
|||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
|
pc1 = [factory peerConnectionWithConfiguration:config
|
||||||
|
constraints:constraints
|
||||||
|
delegate:nil];
|
||||||
[pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
|
[pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
|
||||||
|
|
||||||
pc2 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
|
pc2 = [factory peerConnectionWithConfiguration:config
|
||||||
|
constraints:constraints
|
||||||
|
delegate:nil];
|
||||||
[pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
|
[pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"];
|
||||||
|
|
||||||
NSTimeInterval negotiationTimeout = 15;
|
NSTimeInterval negotiationTimeout = 15;
|
||||||
@ -306,7 +327,8 @@
|
|||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"];
|
videoTrack = [factory videoTrackWithSource:[factory videoSource]
|
||||||
|
trackId:@"videoTrack"];
|
||||||
XCTAssertNotNil(videoTrack);
|
XCTAssertNotNil(videoTrack);
|
||||||
factory = nil;
|
factory = nil;
|
||||||
}
|
}
|
||||||
@ -318,10 +340,11 @@
|
|||||||
|
|
||||||
- (void)testRollback {
|
- (void)testRollback {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [[RTC_OBJC_TYPE(
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
|
RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
|
||||||
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
|
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
|
||||||
}
|
}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
@ -329,14 +352,19 @@
|
|||||||
__block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
__block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
||||||
__block RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
|
__block RTC_OBJC_TYPE(RTCPeerConnection) * pc1;
|
||||||
RTC_OBJC_TYPE(RTCSessionDescription) *rollback =
|
RTC_OBJC_TYPE(RTCSessionDescription) *rollback =
|
||||||
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeRollback sdp:@""];
|
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
|
||||||
|
initWithType:RTCSdpTypeRollback
|
||||||
|
sdp:@""];
|
||||||
|
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
|
||||||
pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil];
|
pc1 = [factory peerConnectionWithConfiguration:config
|
||||||
|
constraints:constraints
|
||||||
|
delegate:nil];
|
||||||
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
|
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
|
||||||
[pc1 offerForConstraints:constraints
|
[pc1 offerForConstraints:constraints
|
||||||
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
|
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer,
|
||||||
|
NSError * error) {
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
XCTAssertNotNil(offer);
|
XCTAssertNotNil(offer);
|
||||||
|
|
||||||
@ -352,7 +380,8 @@
|
|||||||
NSTimeInterval negotiationTimeout = 15;
|
NSTimeInterval negotiationTimeout = 15;
|
||||||
dispatch_semaphore_wait(
|
dispatch_semaphore_wait(
|
||||||
negotiatedSem,
|
negotiatedSem,
|
||||||
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(negotiationTimeout * NSEC_PER_SEC)));
|
dispatch_time(DISPATCH_TIME_NOW,
|
||||||
|
(int64_t)(negotiationTimeout * NSEC_PER_SEC)));
|
||||||
|
|
||||||
XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
|
XCTAssertEqual(pc1.signalingState, RTCSignalingStateStable);
|
||||||
|
|
||||||
@ -377,9 +406,12 @@
|
|||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"]
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"]
|
||||||
];
|
];
|
||||||
|
|
||||||
encoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
|
encoder = [[MockVideoEncoderDecoderFactory alloc]
|
||||||
decoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
|
initWithSupportedCodecs:supportedCodecs];
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoder
|
decoder = [[MockVideoEncoderDecoderFactory alloc]
|
||||||
|
initWithSupportedCodecs:supportedCodecs];
|
||||||
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
|
||||||
|
initWithEncoderFactory:encoder
|
||||||
decoderFactory:decoder];
|
decoderFactory:decoder];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
|
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
|
||||||
@ -406,9 +438,12 @@
|
|||||||
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"]
|
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"]
|
||||||
];
|
];
|
||||||
|
|
||||||
encoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
|
encoder = [[MockVideoEncoderDecoderFactory alloc]
|
||||||
decoder = [[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
|
initWithSupportedCodecs:supportedCodecs];
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoder
|
decoder = [[MockVideoEncoderDecoderFactory alloc]
|
||||||
|
initWithSupportedCodecs:supportedCodecs];
|
||||||
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
|
||||||
|
initWithEncoderFactory:encoder
|
||||||
decoderFactory:decoder];
|
decoderFactory:decoder];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
|
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
|
||||||
@ -426,9 +461,11 @@
|
|||||||
|
|
||||||
- (void)testSetCodecPreferences {
|
- (void)testSetCodecPreferences {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
||||||
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
||||||
@ -439,20 +476,24 @@
|
|||||||
];
|
];
|
||||||
|
|
||||||
MockVideoEncoderDecoderFactory *encoder =
|
MockVideoEncoderDecoderFactory *encoder =
|
||||||
[[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
|
[[MockVideoEncoderDecoderFactory alloc]
|
||||||
|
initWithSupportedCodecs:supportedCodecs];
|
||||||
MockVideoEncoderDecoderFactory *decoder =
|
MockVideoEncoderDecoderFactory *decoder =
|
||||||
[[MockVideoEncoderDecoderFactory alloc] initWithSupportedCodecs:supportedCodecs];
|
[[MockVideoEncoderDecoderFactory alloc]
|
||||||
|
initWithSupportedCodecs:supportedCodecs];
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
|
||||||
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
|
RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
|
||||||
RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
|
RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver;
|
||||||
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoder
|
factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]
|
||||||
|
initWithEncoderFactory:encoder
|
||||||
decoderFactory:decoder];
|
decoderFactory:decoder];
|
||||||
|
|
||||||
peerConnection = [factory peerConnectionWithConfiguration:config
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
constraints:constraints
|
constraints:constraints
|
||||||
delegate:nil];
|
delegate:nil];
|
||||||
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo init:init];
|
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo
|
||||||
|
init:init];
|
||||||
XCTAssertNotNil(tranceiver);
|
XCTAssertNotNil(tranceiver);
|
||||||
|
|
||||||
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
|
RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities =
|
||||||
@ -468,7 +509,8 @@
|
|||||||
XCTAssertNotNil(targetCodec);
|
XCTAssertNotNil(targetCodec);
|
||||||
|
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
BOOL result = [tranceiver setCodecPreferences:@[ targetCodec ] error:&error];
|
BOOL result = [tranceiver setCodecPreferences:@[ targetCodec ]
|
||||||
|
error:&error];
|
||||||
XCTAssertTrue(result);
|
XCTAssertTrue(result);
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
|
|
||||||
@ -478,7 +520,8 @@
|
|||||||
__block BOOL completed = NO;
|
__block BOOL completed = NO;
|
||||||
[peerConnection
|
[peerConnection
|
||||||
offerForConstraints:constraints
|
offerForConstraints:constraints
|
||||||
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
|
completionHandler:^(
|
||||||
|
RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
|
||||||
NSError *_Nullable error) {
|
NSError *_Nullable error) {
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
XCTAssertNotNil(sdp);
|
XCTAssertNotNil(sdp);
|
||||||
@ -489,8 +532,8 @@
|
|||||||
XCTAssertNotNil(targetCodec.preferredPayloadType);
|
XCTAssertNotNil(targetCodec.preferredPayloadType);
|
||||||
XCTAssertNotNil(targetCodec.clockRate);
|
XCTAssertNotNil(targetCodec.clockRate);
|
||||||
|
|
||||||
NSString *expected =
|
NSString *expected = [NSString
|
||||||
[NSString stringWithFormat:@"a=rtpmap:%i VP8/%i",
|
stringWithFormat:@"a=rtpmap:%i VP8/%i",
|
||||||
targetCodec.preferredPayloadType.intValue,
|
targetCodec.preferredPayloadType.intValue,
|
||||||
targetCodec.clockRate.intValue];
|
targetCodec.clockRate.intValue];
|
||||||
|
|
||||||
@ -505,7 +548,8 @@
|
|||||||
factory = nil;
|
factory = nil;
|
||||||
tranceiver = nil;
|
tranceiver = nil;
|
||||||
|
|
||||||
dispatch_semaphore_wait(semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
|
dispatch_semaphore_wait(
|
||||||
|
semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
|
||||||
XCTAssertTrue(completed);
|
XCTAssertTrue(completed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -513,9 +557,11 @@
|
|||||||
|
|
||||||
- (void)testSetHeaderExtensionsToNegotiate {
|
- (void)testSetHeaderExtensionsToNegotiate {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
||||||
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
||||||
@ -528,18 +574,22 @@
|
|||||||
peerConnection = [factory peerConnectionWithConfiguration:config
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
constraints:constraints
|
constraints:constraints
|
||||||
delegate:nil];
|
delegate:nil];
|
||||||
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo init:init];
|
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo
|
||||||
|
init:init];
|
||||||
XCTAssertNotNil(tranceiver);
|
XCTAssertNotNil(tranceiver);
|
||||||
|
|
||||||
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *> *headerExtensionsToNegotiate =
|
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *>
|
||||||
tranceiver.headerExtensionsToNegotiate;
|
*headerExtensionsToNegotiate = tranceiver.headerExtensionsToNegotiate;
|
||||||
|
|
||||||
__block RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *targetExtension = nil;
|
__block RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *targetExtension =
|
||||||
|
nil;
|
||||||
[headerExtensionsToNegotiate
|
[headerExtensionsToNegotiate
|
||||||
enumerateObjectsUsingBlock:^(RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
|
enumerateObjectsUsingBlock:^(
|
||||||
|
RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
|
||||||
NSUInteger idx,
|
NSUInteger idx,
|
||||||
BOOL * stop) {
|
BOOL * stop) {
|
||||||
if ([extension.uri isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
|
if ([extension.uri
|
||||||
|
isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
|
||||||
targetExtension = extension;
|
targetExtension = extension;
|
||||||
} else {
|
} else {
|
||||||
extension.direction = RTCRtpTransceiverDirectionStopped;
|
extension.direction = RTCRtpTransceiverDirectionStopped;
|
||||||
@ -547,7 +597,8 @@
|
|||||||
}];
|
}];
|
||||||
|
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
BOOL isOK = [tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
|
BOOL isOK =
|
||||||
|
[tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
|
||||||
error:&error];
|
error:&error];
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
XCTAssertTrue(isOK);
|
XCTAssertTrue(isOK);
|
||||||
@ -556,8 +607,10 @@
|
|||||||
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
|
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
|
||||||
|
|
||||||
__block BOOL completed = NO;
|
__block BOOL completed = NO;
|
||||||
[peerConnection offerForConstraints:constraints
|
[peerConnection
|
||||||
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
|
offerForConstraints:constraints
|
||||||
|
completionHandler:^(
|
||||||
|
RTC_OBJC_TYPE(RTCSessionDescription) *_Nullable sdp,
|
||||||
NSError *_Nullable error) {
|
NSError *_Nullable error) {
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
XCTAssertNotNil(sdp);
|
XCTAssertNotNil(sdp);
|
||||||
@ -568,8 +621,8 @@
|
|||||||
XCTAssertNotNil(targetExtension);
|
XCTAssertNotNil(targetExtension);
|
||||||
XCTAssertNotNil(targetExtension.preferredId);
|
XCTAssertNotNil(targetExtension.preferredId);
|
||||||
|
|
||||||
NSString *expected =
|
NSString *expected = [NSString
|
||||||
[NSString stringWithFormat:@"a=extmap:%i %@",
|
stringWithFormat:@"a=extmap:%i %@",
|
||||||
targetExtension.preferredId.intValue,
|
targetExtension.preferredId.intValue,
|
||||||
targetExtension.uri];
|
targetExtension.uri];
|
||||||
|
|
||||||
@ -584,7 +637,8 @@
|
|||||||
factory = nil;
|
factory = nil;
|
||||||
tranceiver = nil;
|
tranceiver = nil;
|
||||||
|
|
||||||
dispatch_semaphore_wait(semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
|
dispatch_semaphore_wait(
|
||||||
|
semaphore, dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
|
||||||
XCTAssertTrue(completed);
|
XCTAssertTrue(completed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -592,9 +646,11 @@
|
|||||||
|
|
||||||
- (void)testSetHeaderExtensionsToNegotiateError {
|
- (void)testSetHeaderExtensionsToNegotiateError {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
RTC_OBJC_TYPE(RTCConfiguration) *config =
|
||||||
|
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
|
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
|
||||||
|
initWithMandatoryConstraints:nil
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init =
|
||||||
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init];
|
||||||
@ -607,24 +663,28 @@
|
|||||||
peerConnection = [factory peerConnectionWithConfiguration:config
|
peerConnection = [factory peerConnectionWithConfiguration:config
|
||||||
constraints:constraints
|
constraints:constraints
|
||||||
delegate:nil];
|
delegate:nil];
|
||||||
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo init:init];
|
tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeVideo
|
||||||
|
init:init];
|
||||||
XCTAssertNotNil(tranceiver);
|
XCTAssertNotNil(tranceiver);
|
||||||
|
|
||||||
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *> *headerExtensionsToNegotiate =
|
NSArray<RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) *>
|
||||||
tranceiver.headerExtensionsToNegotiate;
|
*headerExtensionsToNegotiate = tranceiver.headerExtensionsToNegotiate;
|
||||||
|
|
||||||
[headerExtensionsToNegotiate
|
[headerExtensionsToNegotiate
|
||||||
enumerateObjectsUsingBlock:^(RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
|
enumerateObjectsUsingBlock:^(
|
||||||
|
RTC_OBJC_TYPE(RTCRtpHeaderExtensionCapability) * extension,
|
||||||
NSUInteger idx,
|
NSUInteger idx,
|
||||||
BOOL * stop) {
|
BOOL * stop) {
|
||||||
if ([extension.uri isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
|
if ([extension.uri
|
||||||
|
isEqualToString:@"urn:ietf:params:rtp-hdrext:sdes:mid"]) {
|
||||||
extension.direction = RTCRtpTransceiverDirectionStopped;
|
extension.direction = RTCRtpTransceiverDirectionStopped;
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
|
|
||||||
// Stopping a mandatory extension should yield an error
|
// Stopping a mandatory extension should yield an error
|
||||||
NSError *error = nil;
|
NSError *error = nil;
|
||||||
BOOL isOK = [tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
|
BOOL isOK =
|
||||||
|
[tranceiver setHeaderExtensionsToNegotiate:headerExtensionsToNegotiate
|
||||||
error:&error];
|
error:&error];
|
||||||
XCTAssertNotNil(error);
|
XCTAssertNotNil(error);
|
||||||
XCTAssertFalse(isOK);
|
XCTAssertFalse(isOK);
|
||||||
@ -642,14 +702,17 @@
|
|||||||
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
|
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1;
|
||||||
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
|
__weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2;
|
||||||
RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
|
RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints =
|
||||||
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{
|
[[RTC_OBJC_TYPE(RTCMediaConstraints)
|
||||||
|
alloc] initWithMandatoryConstraints:@{
|
||||||
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
|
kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue
|
||||||
}
|
}
|
||||||
optionalConstraints:nil];
|
optionalConstraints:nil];
|
||||||
|
|
||||||
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
|
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
|
||||||
[weakPC1 offerForConstraints:sdpConstraints
|
[weakPC1
|
||||||
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) {
|
offerForConstraints:sdpConstraints
|
||||||
|
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer,
|
||||||
|
NSError * error) {
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
XCTAssertNotNil(offer);
|
XCTAssertNotNil(offer);
|
||||||
[weakPC1
|
[weakPC1
|
||||||
@ -663,7 +726,8 @@
|
|||||||
[weakPC2
|
[weakPC2
|
||||||
answerForConstraints:sdpConstraints
|
answerForConstraints:sdpConstraints
|
||||||
completionHandler:^(
|
completionHandler:^(
|
||||||
RTC_OBJC_TYPE(RTCSessionDescription) * answer,
|
RTC_OBJC_TYPE(RTCSessionDescription) *
|
||||||
|
answer,
|
||||||
NSError * error) {
|
NSError * error) {
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
XCTAssertNotNil(answer);
|
XCTAssertNotNil(answer);
|
||||||
@ -673,9 +737,11 @@
|
|||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
[weakPC1
|
[weakPC1
|
||||||
setRemoteDescription:answer
|
setRemoteDescription:answer
|
||||||
completionHandler:^(NSError *error) {
|
completionHandler:^(
|
||||||
|
NSError *error) {
|
||||||
XCTAssertNil(error);
|
XCTAssertNil(error);
|
||||||
dispatch_semaphore_signal(negotiatedSem);
|
dispatch_semaphore_signal(
|
||||||
|
negotiatedSem);
|
||||||
}];
|
}];
|
||||||
}];
|
}];
|
||||||
}];
|
}];
|
||||||
@ -685,13 +751,15 @@
|
|||||||
|
|
||||||
return 0 ==
|
return 0 ==
|
||||||
dispatch_semaphore_wait(negotiatedSem,
|
dispatch_semaphore_wait(negotiatedSem,
|
||||||
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)));
|
dispatch_time(DISPATCH_TIME_NOW,
|
||||||
|
(int64_t)(timeout * NSEC_PER_SEC)));
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray<NSString *> *)rtpMapsFromSDP:(NSString *)sdp {
|
- (NSArray<NSString *> *)rtpMapsFromSDP:(NSString *)sdp {
|
||||||
NSMutableArray<NSString *> *rtpMaps = [NSMutableArray new];
|
NSMutableArray<NSString *> *rtpMaps = [NSMutableArray new];
|
||||||
NSArray *sdpLines =
|
NSArray *sdpLines =
|
||||||
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet newlineCharacterSet]];
|
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet
|
||||||
|
newlineCharacterSet]];
|
||||||
for (NSString *line in sdpLines) {
|
for (NSString *line in sdpLines) {
|
||||||
if ([line hasPrefix:@"a=rtpmap"]) {
|
if ([line hasPrefix:@"a=rtpmap"]) {
|
||||||
[rtpMaps addObject:line];
|
[rtpMaps addObject:line];
|
||||||
@ -703,7 +771,8 @@
|
|||||||
- (NSArray<NSString *> *)extMapsFromSDP:(NSString *)sdp {
|
- (NSArray<NSString *> *)extMapsFromSDP:(NSString *)sdp {
|
||||||
NSMutableArray<NSString *> *extMaps = [NSMutableArray new];
|
NSMutableArray<NSString *> *extMaps = [NSMutableArray new];
|
||||||
NSArray *sdpLines =
|
NSArray *sdpLines =
|
||||||
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet newlineCharacterSet]];
|
[sdp componentsSeparatedByCharactersInSet:[NSCharacterSet
|
||||||
|
newlineCharacterSet]];
|
||||||
for (NSString *line in sdpLines) {
|
for (NSString *line in sdpLines) {
|
||||||
if ([line hasPrefix:@"a=extmap:"]) {
|
if ([line hasPrefix:@"a=extmap:"]) {
|
||||||
[extMaps addObject:line];
|
[extMaps addObject:line];
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user