diff --git a/examples/objc/AppRTCMobile/ARDAppClient+Internal.h b/examples/objc/AppRTCMobile/ARDAppClient+Internal.h index 96103c26a2..ef08c3546e 100644 --- a/examples/objc/AppRTCMobile/ARDAppClient+Internal.h +++ b/examples/objc/AppRTCMobile/ARDAppClient+Internal.h @@ -16,9 +16,9 @@ #import "ARDSignalingChannel.h" #import "ARDTURNClient.h" -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface ARDAppClient () +@interface ARDAppClient () // All properties should only be mutated from the main queue. @property(nonatomic, strong) id roomServerClient; @@ -26,8 +26,8 @@ @property(nonatomic, strong) id loopbackChannel; @property(nonatomic, strong) id turnClient; -@property(nonatomic, strong) RTCPeerConnection *peerConnection; -@property(nonatomic, strong) RTCPeerConnectionFactory *factory; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; @property(nonatomic, strong) NSMutableArray *messageQueue; @property(nonatomic, assign) BOOL isTurnComplete; @@ -42,7 +42,7 @@ @property(nonatomic, strong) NSURL *webSocketRestURL; @property(nonatomic, readonly) BOOL isLoopback; -@property(nonatomic, strong) RTCMediaConstraints *defaultPeerConnectionConstraints; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints; - (instancetype)initWithRoomServerClient:(id)rsClient signalingChannel:(id)channel diff --git a/examples/objc/AppRTCMobile/ARDAppClient.h b/examples/objc/AppRTCMobile/ARDAppClient.h index 5e9c4cb8d0..98b1fe91e5 100644 --- a/examples/objc/AppRTCMobile/ARDAppClient.h +++ b/examples/objc/AppRTCMobile/ARDAppClient.h @@ -24,9 +24,9 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) { @class ARDAppClient; @class ARDSettingsModel; @class ARDExternalSampleCapturer; -@class RTCMediaConstraints; -@class RTCCameraVideoCapturer; -@class RTCFileVideoCapturer; +@class RTC_OBJC_TYPE(RTCMediaConstraints); +@class RTC_OBJC_TYPE(RTCCameraVideoCapturer); +@class RTC_OBJC_TYPE(RTCFileVideoCapturer); // The delegate is informed of pertinent events and will be called on the // main queue. @@ -37,12 +37,13 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) { - (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state; - (void)appClient:(ARDAppClient *)client - didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer; - -- (void)appClient:(ARDAppClient *)client didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack; + didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer; - (void)appClient:(ARDAppClient *)client - didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack; + didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack; + +- (void)appClient:(ARDAppClient *)client + didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack; - (void)appClient:(ARDAppClient *)client didError:(NSError *)error; @@ -50,7 +51,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) { @optional - (void)appClient:(ARDAppClient *)client - didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer; + didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer; - (void)appClient:(ARDAppClient *)client didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer; diff --git a/examples/objc/AppRTCMobile/ARDAppClient.m b/examples/objc/AppRTCMobile/ARDAppClient.m index ab1e088628..74bdc653d8 100644 --- a/examples/objc/AppRTCMobile/ARDAppClient.m +++ b/examples/objc/AppRTCMobile/ARDAppClient.m @@ -105,10 +105,10 @@ static int const kKbpsMultiplier = 1000; @end @implementation ARDAppClient { - RTCFileLogger *_fileLogger; + RTC_OBJC_TYPE(RTCFileLogger) * _fileLogger; ARDTimerProxy *_statsTimer; ARDSettingsModel *_settings; - RTCVideoTrack *_localVideoTrack; + RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack; } @synthesize shouldGetStats = _shouldGetStats; @@ -172,7 +172,7 @@ static int const kKbpsMultiplier = 1000; - (void)configure { _messageQueue = [NSMutableArray array]; _iceServers = [NSMutableArray array]; - _fileLogger = [[RTCFileLogger alloc] init]; + _fileLogger = [[RTC_OBJC_TYPE(RTCFileLogger) alloc] init]; [_fileLogger start]; } @@ -224,11 +224,14 @@ static int const kKbpsMultiplier = 1000; _isLoopback = isLoopback; self.state = kARDAppClientStateConnecting; - RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init]; - RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; + RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) *decoderFactory = + [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]; + RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory = + [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]; encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore]; - _factory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory]; + _factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory + decoderFactory:decoderFactory]; #if defined(WEBRTC_IOS) if (kARDAppClientEnableTracing) { @@ -365,38 +368,38 @@ static int const kKbpsMultiplier = 1000; } } -#pragma mark - RTCPeerConnectionDelegate +#pragma mark - RTC_OBJC_TYPE(RTCPeerConnectionDelegate) // Callbacks for this delegate occur on non-main thread and need to be // dispatched back to main queue as needed. -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState:(RTCSignalingState)stateChanged { RTCLog(@"Signaling state changed: %ld", (long)stateChanged); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didAddStream:(RTCMediaStream *)stream { +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream { RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.", (unsigned long)stream.videoTracks.count, (unsigned long)stream.audioTracks.count); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver { - RTCMediaStreamTrack *track = transceiver.receiver.track; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver { + RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track; RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveStream:(RTCMediaStream *)stream { +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream { RTCLog(@"Stream was removed."); } -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection { +- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection { RTCLog(@"WARNING: Renegotiation needed but unimplemented."); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState { RTCLog(@"ICE state changed: %ld", (long)newState); dispatch_async(dispatch_get_main_queue(), ^{ @@ -404,18 +407,18 @@ static int const kKbpsMultiplier = 1000; }); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeConnectionState:(RTCPeerConnectionState)newState { RTCLog(@"ICE+DTLS state changed: %ld", (long)newState); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState { RTCLog(@"ICE gathering state changed: %ld", (long)newState); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didGenerateIceCandidate:(RTCIceCandidate *)candidate { +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate { dispatch_async(dispatch_get_main_queue(), ^{ ARDICECandidateMessage *message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate]; @@ -423,8 +426,8 @@ static int const kKbpsMultiplier = 1000; }); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveIceCandidates:(NSArray *)candidates { +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveIceCandidates:(NSArray *)candidates { dispatch_async(dispatch_get_main_queue(), ^{ ARDICECandidateRemovalMessage *message = [[ARDICECandidateRemovalMessage alloc] @@ -433,24 +436,24 @@ static int const kKbpsMultiplier = 1000; }); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeLocalCandidate:(RTCIceCandidate *)local - didChangeRemoteCandidate:(RTCIceCandidate *)remote +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local + didChangeRemoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote lastReceivedMs:(int)lastDataReceivedMs didHaveReason:(NSString *)reason { RTCLog(@"ICE candidate pair changed because: %@", reason); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didOpenDataChannel:(RTCDataChannel *)dataChannel { +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel { } #pragma mark - RTCSessionDescriptionDelegate // Callbacks for this delegate occur on non-main thread and need to be // dispatched back to main queue as needed. -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didCreateSessionDescription:(RTCSessionDescription *)sdp +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didCreateSessionDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp error:(NSError *)error { dispatch_async(dispatch_get_main_queue(), ^{ if (error) { @@ -480,7 +483,7 @@ static int const kKbpsMultiplier = 1000; }); } -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didSetSessionDescriptionWithError:(NSError *)error { dispatch_async(dispatch_get_main_queue(), ^{ if (error) { @@ -499,15 +502,16 @@ static int const kKbpsMultiplier = 1000; // If we're answering and we've just set the remote offer we need to create // an answer and set the local description. if (!self.isInitiator && !self.peerConnection.localDescription) { - RTCMediaConstraints *constraints = [self defaultAnswerConstraints]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints]; __weak ARDAppClient *weakSelf = self; - [self.peerConnection answerForConstraints:constraints - completionHandler:^(RTCSessionDescription *sdp, NSError *error) { - ARDAppClient *strongSelf = weakSelf; - [strongSelf peerConnection:strongSelf.peerConnection - didCreateSessionDescription:sdp - error:error]; - }]; + [self.peerConnection + answerForConstraints:constraints + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) { + ARDAppClient *strongSelf = weakSelf; + [strongSelf peerConnection:strongSelf.peerConnection + didCreateSessionDescription:sdp + error:error]; + }]; } }); } @@ -544,12 +548,10 @@ static int const kKbpsMultiplier = 1000; self.state = kARDAppClientStateConnected; // Create peer connection. - RTCMediaConstraints *constraints = [self defaultPeerConnectionConstraints]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCCertificate *pcert = [RTCCertificate generateCertificateWithParams:@{ - @"expires" : @100000, - @"name" : @"RSASSA-PKCS1-v1_5" - }]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate) + generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}]; config.iceServers = _iceServers; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; config.certificate = pcert; @@ -562,14 +564,14 @@ static int const kKbpsMultiplier = 1000; if (_isInitiator) { // Send offer. __weak ARDAppClient *weakSelf = self; - [_peerConnection offerForConstraints:[self defaultOfferConstraints] - completionHandler:^(RTCSessionDescription *sdp, - NSError *error) { - ARDAppClient *strongSelf = weakSelf; - [strongSelf peerConnection:strongSelf.peerConnection - didCreateSessionDescription:sdp - error:error]; - }]; + [_peerConnection + offerForConstraints:[self defaultOfferConstraints] + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) { + ARDAppClient *strongSelf = weakSelf; + [strongSelf peerConnection:strongSelf.peerConnection + didCreateSessionDescription:sdp + error:error]; + }]; } else { // Check if we've received an offer. [self drainMessageQueueIfReady]; @@ -619,7 +621,7 @@ static int const kKbpsMultiplier = 1000; case kARDSignalingMessageTypeAnswer: { ARDSessionDescriptionMessage *sdpMessage = (ARDSessionDescriptionMessage *)message; - RTCSessionDescription *description = sdpMessage.sessionDescription; + RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription; __weak ARDAppClient *weakSelf = self; [_peerConnection setRemoteDescription:description completionHandler:^(NSError *error) { @@ -679,7 +681,7 @@ static int const kKbpsMultiplier = 1000; } - (void)setMaxBitrateForPeerConnectionVideoSender { - for (RTCRtpSender *sender in _peerConnection.senders) { + for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) { if (sender.track != nil) { if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) { [self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender]; @@ -688,20 +690,20 @@ static int const kKbpsMultiplier = 1000; } } -- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTCRtpSender *)sender { +- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender { if (maxBitrate.intValue <= 0) { return; } - RTCRtpParameters *parametersToModify = sender.parameters; - for (RTCRtpEncodingParameters *encoding in parametersToModify.encodings) { + RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters; + for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) { encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier); } [sender setParameters:parametersToModify]; } -- (RTCRtpTransceiver *)videoTransceiver { - for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) { +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver { + for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) { if (transceiver.mediaType == RTCRtpMediaTypeVideo) { return transceiver; } @@ -710,29 +712,30 @@ static int const kKbpsMultiplier = 1000; } - (void)createMediaSenders { - RTCMediaConstraints *constraints = [self defaultMediaAudioConstraints]; - RTCAudioSource *source = [_factory audioSourceWithConstraints:constraints]; - RTCAudioTrack *track = [_factory audioTrackWithSource:source - trackId:kARDAudioTrackId]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints]; + RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints]; + RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source + trackId:kARDAudioTrackId]; [_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]]; _localVideoTrack = [self createLocalVideoTrack]; if (_localVideoTrack) { [_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]]; [_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack]; // We can set up rendering for the remote track right away since the transceiver already has an - // RTCRtpReceiver with a track. The track will automatically get unmuted and produce frames - // once RTP is received. - RTCVideoTrack *track = (RTCVideoTrack *)([self videoTransceiver].receiver.track); + // RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and + // produce frames once RTP is received. + RTC_OBJC_TYPE(RTCVideoTrack) *track = + (RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track); [_delegate appClient:self didReceiveRemoteVideoTrack:track]; } } -- (RTCVideoTrack *)createLocalVideoTrack { +- (RTC_OBJC_TYPE(RTCVideoTrack) *)createLocalVideoTrack { if ([_settings currentAudioOnlySettingFromStore]) { return nil; } - RTCVideoSource *source = [_factory videoSource]; + RTC_OBJC_TYPE(RTCVideoSource) *source = [_factory videoSource]; #if !TARGET_IPHONE_SIMULATOR if (self.isBroadcast) { @@ -740,13 +743,15 @@ static int const kKbpsMultiplier = 1000; [[ARDExternalSampleCapturer alloc] initWithDelegate:source]; [_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer]; } else { - RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:source]; + RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:source]; [_delegate appClient:self didCreateLocalCapturer:capturer]; } #else #if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0) if (@available(iOS 10, *)) { - RTCFileVideoCapturer *fileCapturer = [[RTCFileVideoCapturer alloc] initWithDelegate:source]; + RTC_OBJC_TYPE(RTCFileVideoCapturer) *fileCapturer = + [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:source]; [_delegate appClient:self didCreateLocalFileCapturer:fileCapturer]; } #endif @@ -781,40 +786,38 @@ static int const kKbpsMultiplier = 1000; #pragma mark - Defaults - - (RTCMediaConstraints *)defaultMediaAudioConstraints { - NSDictionary *mandatoryConstraints = @{}; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; - return constraints; +- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints { + NSDictionary *mandatoryConstraints = @{}; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints + optionalConstraints:nil]; + return constraints; } -- (RTCMediaConstraints *)defaultAnswerConstraints { +- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultAnswerConstraints { return [self defaultOfferConstraints]; } -- (RTCMediaConstraints *)defaultOfferConstraints { +- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints { NSDictionary *mandatoryConstraints = @{ @"OfferToReceiveAudio" : @"true", @"OfferToReceiveVideo" : @"true" }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatoryConstraints - optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints + optionalConstraints:nil]; return constraints; } -- (RTCMediaConstraints *)defaultPeerConnectionConstraints { +- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultPeerConnectionConstraints { if (_defaultPeerConnectionConstraints) { return _defaultPeerConnectionConstraints; } NSString *value = _isLoopback ? @"false" : @"true"; NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value }; - RTCMediaConstraints* constraints = - [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:nil - optionalConstraints:optionalConstraints]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil + optionalConstraints:optionalConstraints]; return constraints; } diff --git a/examples/objc/AppRTCMobile/ARDCaptureController.h b/examples/objc/AppRTCMobile/ARDCaptureController.h index 0150bedc3d..66302b533d 100644 --- a/examples/objc/AppRTCMobile/ARDCaptureController.h +++ b/examples/objc/AppRTCMobile/ARDCaptureController.h @@ -15,7 +15,7 @@ // Controls the camera. Handles starting the capture, switching cameras etc. @interface ARDCaptureController : NSObject -- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer +- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer settings:(ARDSettingsModel *)settings; - (void)startCapture; - (void)stopCapture; diff --git a/examples/objc/AppRTCMobile/ARDCaptureController.m b/examples/objc/AppRTCMobile/ARDCaptureController.m index 6afc6c8942..f1ce008b1d 100644 --- a/examples/objc/AppRTCMobile/ARDCaptureController.m +++ b/examples/objc/AppRTCMobile/ARDCaptureController.m @@ -17,12 +17,12 @@ const Float64 kFramerateLimit = 30.0; @implementation ARDCaptureController { - RTCCameraVideoCapturer *_capturer; + RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer; ARDSettingsModel *_settings; BOOL _usingFrontCamera; } -- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer +- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer settings:(ARDSettingsModel *)settings { if (self = [super init]) { _capturer = capturer; @@ -63,7 +63,8 @@ const Float64 kFramerateLimit = 30.0; #pragma mark - Private - (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; + NSArray *captureDevices = + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]; for (AVCaptureDevice *device in captureDevices) { if (device.position == position) { return device; @@ -74,7 +75,7 @@ const Float64 kFramerateLimit = 30.0; - (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device { NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]; int targetWidth = [_settings currentVideoResolutionWidthFromStore]; int targetHeight = [_settings currentVideoResolutionHeightFromStore]; AVCaptureDeviceFormat *selectedFormat = nil; diff --git a/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h b/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h index 98a60fc7ed..dbbf18fb30 100644 --- a/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h +++ b/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.h @@ -14,5 +14,5 @@ - (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer; @end -@interface ARDExternalSampleCapturer : RTCVideoCapturer -@end +@interface ARDExternalSampleCapturer : RTC_OBJC_TYPE +(RTCVideoCapturer) @end diff --git a/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m b/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m index 9d93ea1e6c..a35fd2535c 100644 --- a/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m +++ b/examples/objc/AppRTCMobile/ARDExternalSampleCapturer.m @@ -15,7 +15,7 @@ @implementation ARDExternalSampleCapturer -- (instancetype)initWithDelegate:(__weak id)delegate { +- (instancetype)initWithDelegate:(__weak id)delegate { return [super initWithDelegate:delegate]; } @@ -32,12 +32,14 @@ return; } - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:RTCVideoRotation_0 - timeStampNs:timeStampNs]; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; } diff --git a/examples/objc/AppRTCMobile/ARDSettingsModel.h b/examples/objc/AppRTCMobile/ARDSettingsModel.h index bac762c246..b89ac0bb20 100644 --- a/examples/objc/AppRTCMobile/ARDSettingsModel.h +++ b/examples/objc/AppRTCMobile/ARDSettingsModel.h @@ -53,12 +53,12 @@ NS_ASSUME_NONNULL_BEGIN /** * Returns array of available video codecs. */ -- (NSArray *)availableVideoCodecs; +- (NSArray *)availableVideoCodecs; /** * Returns current video codec setting from store if present or default (H264) otherwise. */ -- (RTCVideoCodecInfo *)currentVideoCodecSettingFromStore; +- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore; /** * Stores the provided video codec setting into the store. @@ -68,7 +68,7 @@ NS_ASSUME_NONNULL_BEGIN * @param video codec settings the string to be stored. * @return YES/NO depending on success. */ -- (BOOL)storeVideoCodecSetting:(RTCVideoCodecInfo *)videoCodec; +- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec; /** * Returns current max bitrate setting from store if present. diff --git a/examples/objc/AppRTCMobile/ARDSettingsModel.m b/examples/objc/AppRTCMobile/ARDSettingsModel.m index fb3ef7ee8b..812e81aa4f 100644 --- a/examples/objc/AppRTCMobile/ARDSettingsModel.m +++ b/examples/objc/AppRTCMobile/ARDSettingsModel.m @@ -27,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN - (NSArray *)availableVideoResolutions { NSMutableSet *> *resolutions = [[NSMutableSet *> alloc] init]; - for (AVCaptureDevice *device in [RTCCameraVideoCapturer captureDevices]) { + for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) { for (AVCaptureDeviceFormat *format in - [RTCCameraVideoCapturer supportedFormatsForDevice:device]) { + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) { CMVideoDimensions resolution = CMVideoFormatDescriptionGetDimensions(format.formatDescription); NSArray *resolutionObject = @[ @(resolution.width), @(resolution.height) ]; @@ -70,17 +70,17 @@ NS_ASSUME_NONNULL_BEGIN return YES; } -- (NSArray *)availableVideoCodecs { - return [RTCDefaultVideoEncoderFactory supportedCodecs]; +- (NSArray *)availableVideoCodecs { + return [RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) supportedCodecs]; } -- (RTCVideoCodecInfo *)currentVideoCodecSettingFromStore { +- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore { [self registerStoreDefaults]; NSData *codecData = [[self settingsStore] videoCodec]; return [NSKeyedUnarchiver unarchiveObjectWithData:codecData]; } -- (BOOL)storeVideoCodecSetting:(RTCVideoCodecInfo *)videoCodec { +- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec { if (![[self availableVideoCodecs] containsObject:videoCodec]) { return NO; } @@ -149,7 +149,7 @@ NS_ASSUME_NONNULL_BEGIN return [self availableVideoResolutions].firstObject; } -- (RTCVideoCodecInfo *)defaultVideoCodecSetting { +- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)defaultVideoCodecSetting { return [self availableVideoCodecs].firstObject; } diff --git a/examples/objc/AppRTCMobile/ARDSignalingMessage.h b/examples/objc/AppRTCMobile/ARDSignalingMessage.h index 89b2f55d09..dd9a6ba1d8 100644 --- a/examples/objc/AppRTCMobile/ARDSignalingMessage.h +++ b/examples/objc/AppRTCMobile/ARDSignalingMessage.h @@ -32,25 +32,25 @@ typedef enum { @interface ARDICECandidateMessage : ARDSignalingMessage -@property(nonatomic, readonly) RTCIceCandidate *candidate; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceCandidate) * candidate; -- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate; +- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate; @end @interface ARDICECandidateRemovalMessage : ARDSignalingMessage -@property(nonatomic, readonly) NSArray *candidates; +@property(nonatomic, readonly) NSArray *candidates; -- (instancetype)initWithRemovedCandidates:(NSArray *)candidates; +- (instancetype)initWithRemovedCandidates:(NSArray *)candidates; @end @interface ARDSessionDescriptionMessage : ARDSignalingMessage -@property(nonatomic, readonly) RTCSessionDescription *sessionDescription; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription; -- (instancetype)initWithDescription:(RTCSessionDescription *)description; +- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description; @end diff --git a/examples/objc/AppRTCMobile/ARDSignalingMessage.m b/examples/objc/AppRTCMobile/ARDSignalingMessage.m index 3efc502532..57af826935 100644 --- a/examples/objc/AppRTCMobile/ARDSignalingMessage.m +++ b/examples/objc/AppRTCMobile/ARDSignalingMessage.m @@ -45,19 +45,19 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; NSString *typeString = values[kARDSignalingMessageTypeKey]; ARDSignalingMessage *message = nil; if ([typeString isEqualToString:@"candidate"]) { - RTCIceCandidate *candidate = - [RTCIceCandidate candidateFromJSONDictionary:values]; + RTC_OBJC_TYPE(RTCIceCandidate) *candidate = + [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:values]; message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate]; } else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) { RTCLogInfo(@"Received remove-candidates message"); - NSArray *candidates = - [RTCIceCandidate candidatesFromJSONDictionary:values]; + NSArray *candidates = + [RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values]; message = [[ARDICECandidateRemovalMessage alloc] initWithRemovedCandidates:candidates]; } else if ([typeString isEqualToString:@"offer"] || [typeString isEqualToString:@"answer"]) { - RTCSessionDescription *description = - [RTCSessionDescription descriptionFromJSONDictionary:values]; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + [RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values]; message = [[ARDSessionDescriptionMessage alloc] initWithDescription:description]; } else if ([typeString isEqualToString:@"bye"]) { @@ -78,7 +78,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; @synthesize candidate = _candidate; -- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate { +- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate { if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) { _candidate = candidate; } @@ -95,8 +95,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; @synthesize candidates = _candidates; -- (instancetype)initWithRemovedCandidates:( - NSArray *)candidates { +- (instancetype)initWithRemovedCandidates:(NSArray *)candidates { NSParameterAssert(candidates.count); if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) { _candidates = candidates; @@ -105,9 +104,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; } - (NSData *)JSONData { - return - [RTCIceCandidate JSONDataForIceCandidates:_candidates - withType:kARDTypeValueRemoveCandidates]; + return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates + withType:kARDTypeValueRemoveCandidates]; } @end @@ -116,7 +114,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; @synthesize sessionDescription = _sessionDescription; -- (instancetype)initWithDescription:(RTCSessionDescription *)description { +- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description { ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer; RTCSdpType sdpType = description.type; switch (sdpType) { @@ -127,8 +125,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates"; messageType = kARDSignalingMessageTypeAnswer; break; case RTCSdpTypePrAnswer: - NSAssert(NO, @"Unexpected type: %@", - [RTCSessionDescription stringForType:sdpType]); + NSAssert( + NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]); break; } if (self = [super initWithType:messageType]) { diff --git a/examples/objc/AppRTCMobile/ARDStatsBuilder.h b/examples/objc/AppRTCMobile/ARDStatsBuilder.h index a876b96a4d..631648dd52 100644 --- a/examples/objc/AppRTCMobile/ARDStatsBuilder.h +++ b/examples/objc/AppRTCMobile/ARDStatsBuilder.h @@ -10,7 +10,9 @@ #import -@class RTCLegacyStatsReport; +#import + +@class RTC_OBJC_TYPE(RTCLegacyStatsReport); /** Class used to accumulate stats information into a single displayable string. */ @@ -24,6 +26,6 @@ /** Parses the information in the stats report into an appropriate internal * format used to generate the stats string. */ -- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport; +- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport; @end diff --git a/examples/objc/AppRTCMobile/ARDStatsBuilder.m b/examples/objc/AppRTCMobile/ARDStatsBuilder.m index f9a1920399..759921e229 100644 --- a/examples/objc/AppRTCMobile/ARDStatsBuilder.m +++ b/examples/objc/AppRTCMobile/ARDStatsBuilder.m @@ -11,6 +11,7 @@ #import "ARDStatsBuilder.h" #import +#import #import "ARDBitrateTracker.h" #import "ARDUtilities.h" @@ -141,7 +142,7 @@ return result; } -- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { NSString *reportType = statsReport.type; if ([reportType isEqualToString:@"ssrc"] && [statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) { @@ -179,7 +180,7 @@ } } -- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseBweStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { [statsReport.values enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) { [self updateBweStatOfKey:key value:value]; @@ -206,7 +207,7 @@ } } -- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseConnectionStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { NSString *activeConnection = statsReport.values[@"googActiveConnection"]; if (![activeConnection isEqualToString:@"true"]) { return; @@ -217,7 +218,7 @@ }]; } -- (void)parseSendSsrcStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseSendSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { NSDictionary *values = statsReport.values; if ([values objectForKey:@"googFrameRateSent"]) { // Video track. @@ -238,7 +239,7 @@ } } -- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseAudioSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { [statsReport.values enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) { [self updateAudioSendStatOfKey:key value:value]; @@ -275,14 +276,14 @@ } } -- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseVideoSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { [statsReport.values enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) { [self updateVideoSendStatOfKey:key value:value]; }]; } -- (void)parseRecvSsrcStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseRecvSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { NSDictionary *values = statsReport.values; if ([values objectForKey:@"googFrameWidthReceived"]) { // Video track. @@ -307,7 +308,7 @@ } } -- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseAudioRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { [statsReport.values enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) { [self updateAudioRecvStatOfKey:key value:value]; @@ -334,7 +335,7 @@ } } -- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport { +- (void)parseVideoRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport { [statsReport.values enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) { [self updateVideoRecvStatOfKey:key value:value]; diff --git a/examples/objc/AppRTCMobile/ARDTURNClient.h b/examples/objc/AppRTCMobile/ARDTURNClient.h index 0cefaf6c19..c192ffb540 100644 --- a/examples/objc/AppRTCMobile/ARDTURNClient.h +++ b/examples/objc/AppRTCMobile/ARDTURNClient.h @@ -10,7 +10,9 @@ #import -@class RTCIceServer; +#import + +@class RTC_OBJC_TYPE(RTCIceServer); @protocol ARDTURNClient diff --git a/examples/objc/AppRTCMobile/ARDTURNClient.m b/examples/objc/AppRTCMobile/ARDTURNClient.m index 05f5cd9d11..069231cd7e 100644 --- a/examples/objc/AppRTCMobile/ARDTURNClient.m +++ b/examples/objc/AppRTCMobile/ARDTURNClient.m @@ -65,10 +65,10 @@ static NSInteger kARDTURNClientErrorBadResponse = -1; } NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data]; NSMutableArray *turnServers = [NSMutableArray array]; - [turnResponseDict[@"iceServers"] enumerateObjectsUsingBlock: - ^(NSDictionary *obj, NSUInteger idx, BOOL *stop){ - [turnServers addObject:[RTCIceServer serverFromJSONDictionary:obj]]; - }]; + [turnResponseDict[@"iceServers"] + enumerateObjectsUsingBlock:^(NSDictionary *obj, NSUInteger idx, BOOL *stop) { + [turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]]; + }]; if (!turnServers) { NSError *responseError = [[NSError alloc] initWithDomain:kARDTURNClientErrorDomain diff --git a/examples/objc/AppRTCMobile/ARDWebSocketChannel.m b/examples/objc/AppRTCMobile/ARDWebSocketChannel.m index 19a60d4664..c05d87b3d7 100644 --- a/examples/objc/AppRTCMobile/ARDWebSocketChannel.m +++ b/examples/objc/AppRTCMobile/ARDWebSocketChannel.m @@ -217,12 +217,12 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg"; // Change message to answer, send back to server. ARDSessionDescriptionMessage *sdpMessage = (ARDSessionDescriptionMessage *)message; - RTCSessionDescription *description = sdpMessage.sessionDescription; + RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription; NSString *dsc = description.sdp; dsc = [dsc stringByReplacingOccurrencesOfString:@"offer" withString:@"answer"]; - RTCSessionDescription *answerDescription = - [[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer sdp:dsc]; + RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc]; ARDSignalingMessage *answer = [[ARDSessionDescriptionMessage alloc] initWithDescription:answerDescription]; diff --git a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h index facfb7a05f..80f3ef7a26 100644 --- a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h +++ b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.h @@ -10,11 +10,13 @@ #import -@interface RTCIceCandidate (JSON) +@interface RTC_OBJC_TYPE (RTCIceCandidate) +(JSON) -+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary; -+ (NSArray *)candidatesFromJSONDictionary:(NSDictionary *)dictionary; -+ (NSData *)JSONDataForIceCandidates:(NSArray *)candidates + + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary; ++ (NSArray *)candidatesFromJSONDictionary: + (NSDictionary *)dictionary; ++ (NSData *)JSONDataForIceCandidates:(NSArray *)candidates withType:(NSString *)typeValue; - (NSData *)JSONData; diff --git a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m index e88ca11ff1..579cadbe88 100644 --- a/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m +++ b/examples/objc/AppRTCMobile/RTCIceCandidate+JSON.m @@ -19,24 +19,24 @@ static NSString const *kRTCICECandidateMLineIndexKey = @"label"; static NSString const *kRTCICECandidateSdpKey = @"candidate"; static NSString const *kRTCICECandidatesTypeKey = @"candidates"; +@implementation RTC_OBJC_TYPE (RTCIceCandidate) +(JSON) -@implementation RTCIceCandidate (JSON) - -+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary { + + (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary { NSString *mid = dictionary[kRTCICECandidateMidKey]; NSString *sdp = dictionary[kRTCICECandidateSdpKey]; NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey]; NSInteger mLineIndex = [num integerValue]; - return [[RTCIceCandidate alloc] initWithSdp:sdp - sdpMLineIndex:mLineIndex - sdpMid:mid]; + return [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp + sdpMLineIndex:mLineIndex + sdpMid:mid]; } -+ (NSData *)JSONDataForIceCandidates:(NSArray *)candidates ++ (NSData *)JSONDataForIceCandidates:(NSArray *)candidates withType:(NSString *)typeValue { NSMutableArray *jsonCandidates = [NSMutableArray arrayWithCapacity:candidates.count]; - for (RTCIceCandidate *candidate in candidates) { + for (RTC_OBJC_TYPE(RTCIceCandidate) * candidate in candidates) { NSDictionary *jsonCandidate = [candidate JSONDictionary]; [jsonCandidates addObject:jsonCandidate]; } @@ -56,14 +56,14 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates"; return data; } -+ (NSArray *)candidatesFromJSONDictionary: ++ (NSArray *)candidatesFromJSONDictionary: (NSDictionary *)dictionary { NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey]; - NSMutableArray *candidates = + NSMutableArray *candidates = [NSMutableArray arrayWithCapacity:jsonCandidates.count]; for (NSDictionary *jsonCandidate in jsonCandidates) { - RTCIceCandidate *candidate = - [RTCIceCandidate candidateFromJSONDictionary:jsonCandidate]; + RTC_OBJC_TYPE(RTCIceCandidate) *candidate = + [RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate]; [candidates addObject:candidate]; } return candidates; diff --git a/examples/objc/AppRTCMobile/RTCIceServer+JSON.h b/examples/objc/AppRTCMobile/RTCIceServer+JSON.h index 3f858062bd..4950a58684 100644 --- a/examples/objc/AppRTCMobile/RTCIceServer+JSON.h +++ b/examples/objc/AppRTCMobile/RTCIceServer+JSON.h @@ -10,8 +10,9 @@ #import -@interface RTCIceServer (JSON) +@interface RTC_OBJC_TYPE (RTCIceServer) +(JSON) -+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary; + + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary; @end diff --git a/examples/objc/AppRTCMobile/RTCIceServer+JSON.m b/examples/objc/AppRTCMobile/RTCIceServer+JSON.m index 912b521c48..b5272a2f64 100644 --- a/examples/objc/AppRTCMobile/RTCIceServer+JSON.m +++ b/examples/objc/AppRTCMobile/RTCIceServer+JSON.m @@ -10,15 +10,16 @@ #import "RTCIceServer+JSON.h" -@implementation RTCIceServer (JSON) +@implementation RTC_OBJC_TYPE (RTCIceServer) +(JSON) -+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary { + + (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary { NSArray *turnUrls = dictionary[@"urls"]; NSString *username = dictionary[@"username"] ?: @""; NSString *credential = dictionary[@"credential"] ?: @""; - return [[RTCIceServer alloc] initWithURLStrings:turnUrls - username:username - credential:credential]; + return [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:turnUrls + username:username + credential:credential]; } @end diff --git a/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h b/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h index 07bc270379..6130303335 100644 --- a/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h +++ b/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.h @@ -10,9 +10,11 @@ #import -@interface RTCSessionDescription (JSON) +@interface RTC_OBJC_TYPE (RTCSessionDescription) +(JSON) -+ (RTCSessionDescription *)descriptionFromJSONDictionary:(NSDictionary *)dictionary; + + (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary + : (NSDictionary *)dictionary; - (NSData *)JSONData; @end diff --git a/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m b/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m index a6059f77a8..28268faa84 100644 --- a/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m +++ b/examples/objc/AppRTCMobile/RTCSessionDescription+JSON.m @@ -13,14 +13,15 @@ static NSString const *kRTCSessionDescriptionTypeKey = @"type"; static NSString const *kRTCSessionDescriptionSdpKey = @"sdp"; -@implementation RTCSessionDescription (JSON) +@implementation RTC_OBJC_TYPE (RTCSessionDescription) +(JSON) -+ (RTCSessionDescription *)descriptionFromJSONDictionary: - (NSDictionary *)dictionary { + + (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary + : (NSDictionary *)dictionary { NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey]; RTCSdpType type = [[self class] typeForString:typeString]; NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey]; - return [[RTCSessionDescription alloc] initWithType:type sdp:sdp]; + return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp]; } - (NSData *)JSONData { diff --git a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h index 7e0387de3c..a42ef19683 100644 --- a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h +++ b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.h @@ -10,7 +10,9 @@ #import -@class RTCFileVideoCapturer; +#import + +@class RTC_OBJC_TYPE(RTCFileVideoCapturer); /** * Controls a file capturer. @@ -23,7 +25,7 @@ NS_CLASS_AVAILABLE_IOS(10) * * @param capturer The capturer to be controlled. */ -- (instancetype)initWithCapturer:(RTCFileVideoCapturer *)capturer; +- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer; /** * Starts the file capturer. diff --git a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m index 1f0276f189..a6a1127059 100644 --- a/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m +++ b/examples/objc/AppRTCMobile/ios/ARDFileCaptureController.m @@ -14,14 +14,14 @@ @interface ARDFileCaptureController () -@property(nonatomic, strong) RTCFileVideoCapturer *fileCapturer; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * fileCapturer; @end @implementation ARDFileCaptureController @synthesize fileCapturer = _fileCapturer; -- (instancetype)initWithCapturer:(RTCFileVideoCapturer *)capturer { +- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer { if (self = [super init]) { _fileCapturer = capturer; } diff --git a/examples/objc/AppRTCMobile/ios/ARDMainViewController.m b/examples/objc/AppRTCMobile/ios/ARDMainViewController.m index 63b3dd76ca..dea7742a1b 100644 --- a/examples/objc/AppRTCMobile/ios/ARDMainViewController.m +++ b/examples/objc/AppRTCMobile/ios/ARDMainViewController.m @@ -28,10 +28,9 @@ static NSString *const barButtonImageString = @"ic_settings_black_24dp.png"; // Launch argument to be passed to indicate that the app should start loopback immediatly static NSString *const loopbackLaunchProcessArgument = @"loopback"; -@interface ARDMainViewController () < - ARDMainViewDelegate, - ARDVideoCallViewControllerDelegate, - RTCAudioSessionDelegate> +@interface ARDMainViewController () @property(nonatomic, strong) ARDMainView *mainView; @property(nonatomic, strong) AVAudioPlayer *audioPlayer; @end @@ -57,13 +56,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback"; self.view = _mainView; [self addSettingsBarButton]; - RTCAudioSessionConfiguration *webRTCConfig = - [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; webRTCConfig.categoryOptions = webRTCConfig.categoryOptions | AVAudioSessionCategoryOptionDefaultToSpeaker; - [RTCAudioSessionConfiguration setWebRTCConfiguration:webRTCConfig]; + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig]; - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session addDelegate:self]; [self configureAudioSession]; @@ -124,7 +123,7 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback"; ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init]; - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore]; session.isAudioEnabled = NO; @@ -158,32 +157,33 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback"; [self restartAudioPlayerIfNeeded]; }]; } - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; session.isAudioEnabled = NO; } -#pragma mark - RTCAudioSessionDelegate +#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate) -- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session { +- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session { // Stop playback on main queue and then configure WebRTC. - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain - block:^{ - if (self.mainView.isAudioLoopPlaying) { - RTCLog(@"Stopping audio loop due to WebRTC start."); - [self.audioPlayer stop]; - } - RTCLog(@"Setting isAudioEnabled to YES."); - session.isAudioEnabled = YES; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) + dispatchAsyncOnType:RTCDispatcherTypeMain + block:^{ + if (self.mainView.isAudioLoopPlaying) { + RTCLog(@"Stopping audio loop due to WebRTC start."); + [self.audioPlayer stop]; + } + RTCLog(@"Setting isAudioEnabled to YES."); + session.isAudioEnabled = YES; + }]; } -- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session { +- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session { // WebRTC is done with the audio session. Restart playback. - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain - block:^{ - RTCLog(@"audioSessionDidStopPlayOrRecord"); - [self restartAudioPlayerIfNeeded]; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain + block:^{ + RTCLog(@"audioSessionDidStopPlayOrRecord"); + [self restartAudioPlayerIfNeeded]; + }]; } #pragma mark - Private @@ -202,13 +202,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback"; } - (void)configureAudioSession { - RTCAudioSessionConfiguration *configuration = - [[RTCAudioSessionConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *configuration = + [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init]; configuration.category = AVAudioSessionCategoryAmbient; configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers; configuration.mode = AVAudioSessionModeDefault; - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; BOOL hasSucceeded = NO; NSError *error = nil; diff --git a/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m b/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m index f2fea15c09..9bcbd3aa5c 100644 --- a/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m +++ b/examples/objc/AppRTCMobile/ios/ARDSettingsViewController.m @@ -62,7 +62,7 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) { return [_settingsModel availableVideoResolutions]; } -- (NSArray *)videoCodecArray { +- (NSArray *)videoCodecArray { return [_settingsModel availableVideoCodecs]; } @@ -214,7 +214,7 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:dequeueIdentifier]; } - RTCVideoCodecInfo *codec = self.videoCodecArray[indexPath.row]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row]; cell.textLabel.text = [codec humanReadableDescription]; if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) { cell.accessoryType = UITableViewCellAccessoryCheckmark; @@ -231,7 +231,7 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath updateListSelectionAtIndexPath:indexPath inSection:ARDSettingsSectionVideoCodec]; - RTCVideoCodecInfo *videoCodec = self.videoCodecArray[indexPath.row]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row]; [_settingsModel storeVideoCodecSetting:videoCodec]; } diff --git a/examples/objc/AppRTCMobile/ios/ARDStatsView.m b/examples/objc/AppRTCMobile/ios/ARDStatsView.m index 05d91b243b..7e006a82f2 100644 --- a/examples/objc/AppRTCMobile/ios/ARDStatsView.m +++ b/examples/objc/AppRTCMobile/ios/ARDStatsView.m @@ -35,7 +35,7 @@ } - (void)setStats:(NSArray *)stats { - for (RTCLegacyStatsReport *report in stats) { + for (RTC_OBJC_TYPE(RTCLegacyStatsReport) * report in stats) { [_statsBuilder parseStatsReport:report]; } _statsLabel.text = _statsBuilder.statsString; diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h index d76805f104..43f432df59 100644 --- a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h +++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.h @@ -37,8 +37,8 @@ @interface ARDVideoCallView : UIView @property(nonatomic, readonly) UILabel *statusLabel; -@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView; -@property(nonatomic, readonly) __kindof UIView *remoteVideoView; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView; +@property(nonatomic, readonly) __kindof UIView *remoteVideoView; @property(nonatomic, readonly) ARDStatsView *statsView; @property(nonatomic, weak) id delegate; diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m index 60b78c7340..45cfa9cb61 100644 --- a/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m +++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallView.m @@ -25,7 +25,7 @@ static CGFloat const kLocalVideoViewSize = 120; static CGFloat const kLocalVideoViewPadding = 8; static CGFloat const kStatusBarHeight = 20; -@interface ARDVideoCallView () +@interface ARDVideoCallView () @end @implementation ARDVideoCallView { @@ -45,16 +45,17 @@ static CGFloat const kStatusBarHeight = 20; if (self = [super initWithFrame:frame]) { #if defined(RTC_SUPPORTS_METAL) - _remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero]; + _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; #else - RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero]; + RTC_OBJC_TYPE(RTCEAGLVideoView) *remoteView = + [[RTC_OBJC_TYPE(RTCEAGLVideoView) alloc] initWithFrame:CGRectZero]; remoteView.delegate = self; _remoteVideoView = remoteView; #endif [self addSubview:_remoteVideoView]; - _localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero]; + _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; [self addSubview:_localVideoView]; _statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero]; @@ -175,9 +176,9 @@ static CGFloat const kStatusBarHeight = 20; CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); } -#pragma mark - RTCVideoViewDelegate +#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate) -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { +- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { if (videoView == _remoteVideoView) { _remoteVideoSize = size; } diff --git a/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m b/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m index f7f4a877ef..4eb38878e0 100644 --- a/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m +++ b/examples/objc/AppRTCMobile/ios/ARDVideoCallViewController.m @@ -24,15 +24,15 @@ @interface ARDVideoCallViewController () -@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack; + RTC_OBJC_TYPE (RTCAudioSessionDelegate)> +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack; @property(nonatomic, readonly) ARDVideoCallView *videoCallView; @property(nonatomic, assign) AVAudioSessionPortOverride portOverride; @end @implementation ARDVideoCallViewController { ARDAppClient *_client; - RTCVideoTrack *_remoteVideoTrack; + RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack; ARDCaptureController *_captureController; ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10); } @@ -62,7 +62,7 @@ [self statusTextForState:RTCIceConnectionStateNew]; self.view = _videoCallView; - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session addDelegate:self]; } @@ -100,7 +100,7 @@ } - (void)appClient:(ARDAppClient *)client - didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer { + didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { _videoCallView.localVideoView.captureSession = localCapturer.captureSession; ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init]; _captureController = @@ -109,7 +109,7 @@ } - (void)appClient:(ARDAppClient *)client - didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer { + didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer { #if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0) if (@available(iOS 10, *)) { _fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer]; @@ -119,11 +119,11 @@ } - (void)appClient:(ARDAppClient *)client - didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack { + didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack { } - (void)appClient:(ARDAppClient *)client - didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack { + didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { self.remoteVideoTrack = remoteVideoTrack; __weak ARDVideoCallViewController *weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ @@ -163,19 +163,21 @@ if (_portOverride == AVAudioSessionPortOverrideNone) { override = AVAudioSessionPortOverrideSpeaker; } - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeAudioSession - block:^{ - RTCAudioSession *session = [RTCAudioSession sharedInstance]; - [session lockForConfiguration]; - NSError *error = nil; - if ([session overrideOutputAudioPort:override error:&error]) { - self.portOverride = override; - } else { - RTCLogError(@"Error overriding output port: %@", - error.localizedDescription); - } - [session unlockForConfiguration]; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession + block:^{ + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + [session lockForConfiguration]; + NSError *error = nil; + if ([session overrideOutputAudioPort:override + error:&error]) { + self.portOverride = override; + } else { + RTCLogError(@"Error overriding output port: %@", + error.localizedDescription); + } + [session unlockForConfiguration]; + }]; } - (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view { @@ -183,16 +185,16 @@ _videoCallView.statsView.hidden = NO; } -#pragma mark - RTCAudioSessionDelegate +#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate) -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches { RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches); } #pragma mark - Private -- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack { +- (void)setRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { if (_remoteVideoTrack == remoteVideoTrack) { return; } diff --git a/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h b/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h index c0372851a2..f6bdae5010 100644 --- a/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h +++ b/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.h @@ -10,8 +10,9 @@ #import -@interface RTCVideoCodecInfo (HumanReadable) +@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) +(HumanReadable) -- (NSString *)humanReadableDescription; + - (NSString *)humanReadableDescription; @end diff --git a/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m b/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m index d0bf1b5c5b..7a3ad4bd25 100644 --- a/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m +++ b/examples/objc/AppRTCMobile/ios/RTCVideoCodecInfo+HumanReadable.m @@ -12,13 +12,14 @@ #import -@implementation RTCVideoCodecInfo (HumanReadable) +@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) +(HumanReadable) -- (NSString *)humanReadableDescription { + - (NSString *)humanReadableDescription { if ([self.name isEqualToString:@"H264"]) { NSString *profileId = self.parameters[@"profile-level-id"]; - RTCH264ProfileLevelId *profileLevelId = - [[RTCH264ProfileLevelId alloc] initWithHexString:profileId]; + RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId]; if (profileLevelId.profile == RTCH264ProfileConstrainedHigh || profileLevelId.profile == RTCH264ProfileHigh) { return @"H264 (High)"; diff --git a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m index da1003afed..1fab5b5eec 100644 --- a/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m +++ b/examples/objc/AppRTCMobile/ios/broadcast_extension/ARDBroadcastSampleHandler.m @@ -20,14 +20,14 @@ @implementation ARDBroadcastSampleHandler { ARDAppClient *_client; - RTCCallbackLogger *_callbackLogger; + RTC_OBJC_TYPE(RTCCallbackLogger) * _callbackLogger; } @synthesize capturer = _capturer; - (instancetype)init { if (self = [super init]) { - _callbackLogger = [[RTCCallbackLogger alloc] init]; + _callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init]; os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog"); [_callbackLogger start:^(NSString *logMessage) { os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]); @@ -104,7 +104,7 @@ } - (void)appClient:(ARDAppClient *)client - didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer { + didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { } - (void)appClient:(ARDAppClient *)client @@ -113,11 +113,11 @@ } - (void)appClient:(ARDAppClient *)client - didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack { + didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack { } - (void)appClient:(ARDAppClient *)client - didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack { + didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { } - (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats { diff --git a/examples/objc/AppRTCMobile/mac/APPRTCViewController.m b/examples/objc/AppRTCMobile/mac/APPRTCViewController.m index a972a20e4e..1d0619d573 100644 --- a/examples/objc/AppRTCMobile/mac/APPRTCViewController.m +++ b/examples/objc/AppRTCMobile/mac/APPRTCViewController.m @@ -37,15 +37,15 @@ static NSUInteger const kBottomViewHeight = 200; @interface APPRTCMainView : NSView @property(nonatomic, weak) id delegate; -@property(nonatomic, readonly) NSView* localVideoView; -@property(nonatomic, readonly) NSView* remoteVideoView; +@property(nonatomic, readonly) NSView* localVideoView; +@property(nonatomic, readonly) NSView* remoteVideoView; @property(nonatomic, readonly) NSTextView* logView; - (void)displayLogMessage:(NSString*)message; @end -@interface APPRTCMainView () +@interface APPRTCMainView () @end @implementation APPRTCMainView { NSScrollView* _scrollView; @@ -178,10 +178,9 @@ static NSUInteger const kBottomViewHeight = 200; [self setNeedsUpdateConstraints:YES]; } -#pragma mark - RTCNSGLVideoViewDelegate +#pragma mark - RTC_OBJC_TYPE(RTCNSGLVideoViewDelegate) -- (void)videoView:(RTCNSGLVideoView*)videoView - didChangeVideoSize:(NSSize)size { +- (void)videoView:(RTC_OBJC_TYPE(RTCNSGLVideoView) *)videoView didChangeVideoSize:(NSSize)size { if (videoView == _remoteVideoView) { _remoteVideoSize = size; } else if (videoView == _localVideoView) { @@ -222,9 +221,10 @@ static NSUInteger const kBottomViewHeight = 200; // If not we're providing sensible default. #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wpartial-availability" - if ([RTCMTLNSVideoView class] && [RTCMTLNSVideoView isMetalAvailable]) { - _remoteVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect]; - _localVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect]; + if ([RTC_OBJC_TYPE(RTCMTLNSVideoView) class] && + [RTC_OBJC_TYPE(RTCMTLNSVideoView) isMetalAvailable]) { + _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; + _localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect]; } #pragma clang diagnostic pop if (_remoteVideoView == nil) { @@ -238,13 +238,13 @@ static NSUInteger const kBottomViewHeight = 200; NSOpenGLPixelFormat* pixelFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:attributes]; - RTCNSGLVideoView* remote = - [[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; + RTC_OBJC_TYPE(RTCNSGLVideoView)* remote = + [[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; remote.delegate = self; _remoteVideoView = remote; - RTCNSGLVideoView* local = - [[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; + RTC_OBJC_TYPE(RTCNSGLVideoView)* local = + [[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat]; local.delegate = self; _localVideoView = local; } @@ -299,8 +299,8 @@ static NSUInteger const kBottomViewHeight = 200; @implementation APPRTCViewController { ARDAppClient* _client; - RTCVideoTrack* _localVideoTrack; - RTCVideoTrack* _remoteVideoTrack; + RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack; + RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack; ARDCaptureController* _captureController; } @@ -357,21 +357,21 @@ static NSUInteger const kBottomViewHeight = 200; } - (void)appClient:(ARDAppClient*)client - didCreateLocalCapturer:(RTCCameraVideoCapturer*)localCapturer { + didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer { _captureController = [[ARDCaptureController alloc] initWithCapturer:localCapturer settings:[[ARDSettingsModel alloc] init]]; [_captureController startCapture]; } -- (void)appClient:(ARDAppClient *)client - didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack { +- (void)appClient:(ARDAppClient*)client + didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack { _localVideoTrack = localVideoTrack; [_localVideoTrack addRenderer:self.mainView.localVideoView]; } -- (void)appClient:(ARDAppClient *)client - didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack { +- (void)appClient:(ARDAppClient*)client + didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { _remoteVideoTrack = remoteVideoTrack; [_remoteVideoTrack addRenderer:self.mainView.remoteVideoView]; } diff --git a/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm b/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm index c58a731ff2..3592ed0e15 100644 --- a/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm +++ b/examples/objc/AppRTCMobile/tests/ARDAppClient_xctest.mm @@ -196,8 +196,8 @@ // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion // crash in Debug. caller.defaultPeerConnectionConstraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil + optionalConstraints:nil]; weakCaller = caller; answerer = [self createAppClientForRoomId:roomId @@ -214,8 +214,8 @@ // TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion // crash in Debug. answerer.defaultPeerConnectionConstraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil + optionalConstraints:nil]; weakAnswerer = answerer; // Kick off connection. @@ -248,8 +248,8 @@ connectedHandler:^{} localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }]; caller.defaultPeerConnectionConstraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil + optionalConstraints:nil]; // Kick off connection. [caller connectToRoomWithId:roomId diff --git a/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm b/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm index e3942c79fe..e3d6cf46fe 100644 --- a/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm +++ b/examples/objc/AppRTCMobile/tests/ARDFileCaptureController_xctest.mm @@ -31,7 +31,7 @@ NS_CLASS_AVAILABLE_IOS(10) - (void)setUp { [super setUp]; - self.fileCapturerMock = OCMClassMock([RTCFileVideoCapturer class]); + self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]); self.fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock]; } diff --git a/examples/objcnativeapi/objc/NADViewController.mm b/examples/objcnativeapi/objc/NADViewController.mm index d96f6498e7..7f6ffbb7e5 100644 --- a/examples/objcnativeapi/objc/NADViewController.mm +++ b/examples/objcnativeapi/objc/NADViewController.mm @@ -24,9 +24,9 @@ @interface NADViewController () -@property(nonatomic) RTCCameraVideoCapturer *capturer; -@property(nonatomic) RTCCameraPreviewView *localVideoView; -@property(nonatomic) __kindof UIView *remoteVideoView; +@property(nonatomic) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer; +@property(nonatomic) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView; +@property(nonatomic) __kindof UIView *remoteVideoView; @property(nonatomic) UIButton *callButton; @property(nonatomic) UIButton *hangUpButton; @@ -50,14 +50,14 @@ _view = [[UIView alloc] initWithFrame:CGRectZero]; #if defined(RTC_SUPPORTS_METAL) - _remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero]; + _remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; #else - _remoteVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero]; + _remoteVideoView = [[RTC_OBJC_TYPE(RTCEAGLVideoView) alloc] initWithFrame:CGRectZero]; #endif _remoteVideoView.translatesAutoresizingMaskIntoConstraints = NO; [_view addSubview:_remoteVideoView]; - _localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero]; + _localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero]; _localVideoView.translatesAutoresizingMaskIntoConstraints = NO; [_view addSubview:_localVideoView]; @@ -106,14 +106,15 @@ - (void)viewDidLoad { [super viewDidLoad]; - self.capturer = [[RTCCameraVideoCapturer alloc] init]; + self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] init]; self.localVideoView.captureSession = self.capturer.captureSession; _call_client.reset(new webrtc_examples::ObjCCallClient()); // Start capturer. AVCaptureDevice *selectedDevice = nil; - NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; + NSArray *captureDevices = + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]; for (AVCaptureDevice *device in captureDevices) { if (device.position == AVCaptureDevicePositionFront) { selectedDevice = device; @@ -126,7 +127,7 @@ int targetHeight = 480; int currentDiff = INT_MAX; NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:selectedDevice]; + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:selectedDevice]; for (AVCaptureDeviceFormat *format in formats) { CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); diff --git a/examples/objcnativeapi/objc/objc_call_client.h b/examples/objcnativeapi/objc/objc_call_client.h index de7e5fd449..90ac20ac01 100644 --- a/examples/objcnativeapi/objc/objc_call_client.h +++ b/examples/objcnativeapi/objc/objc_call_client.h @@ -14,13 +14,16 @@ #include #include +#import "sdk/objc/base/RTCMacros.h" + #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" #include "rtc_base/critical_section.h" #include "rtc_base/thread_checker.h" -@class RTCVideoCapturer; -@protocol RTCVideoRenderer; +@class RTC_OBJC_TYPE(RTCVideoCapturer); +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer); namespace webrtc_examples { @@ -28,7 +31,8 @@ class ObjCCallClient { public: ObjCCallClient(); - void Call(RTCVideoCapturer* capturer, id remote_renderer); + void Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer, + id remote_renderer); void Hangup(); private: diff --git a/examples/objcnativeapi/objc/objc_call_client.mm b/examples/objcnativeapi/objc/objc_call_client.mm index 4be30bbb01..52ee2b5f95 100644 --- a/examples/objcnativeapi/objc/objc_call_client.mm +++ b/examples/objcnativeapi/objc/objc_call_client.mm @@ -64,7 +64,8 @@ ObjCCallClient::ObjCCallClient() CreatePeerConnectionFactory(); } -void ObjCCallClient::Call(RTCVideoCapturer* capturer, id remote_renderer) { +void ObjCCallClient::Call(RTC_OBJC_TYPE(RTCVideoCapturer) * capturer, + id remote_renderer) { RTC_DCHECK_RUN_ON(&thread_checker_); rtc::CritScope lock(&pc_mutex_); @@ -122,10 +123,10 @@ void ObjCCallClient::CreatePeerConnectionFactory() { media_deps.task_queue_factory = dependencies.task_queue_factory.get(); media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); - media_deps.video_encoder_factory = - webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]); - media_deps.video_decoder_factory = - webrtc::ObjCToNativeVideoDecoderFactory([[RTCDefaultVideoDecoderFactory alloc] init]); + media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory( + [[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init]); + media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory( + [[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init]); media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create(); dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps)); RTC_LOG(LS_INFO) << "Media engine created: " << dependencies.media_engine.get(); diff --git a/modules/audio_coding/codecs/ilbc/complexityMeasures.m b/modules/audio_coding/codecs/ilbc/complexityMeasures.m index 5c39e3615c..4bda83622f 100644 --- a/modules/audio_coding/codecs/ilbc/complexityMeasures.m +++ b/modules/audio_coding/codecs/ilbc/complexityMeasures.m @@ -1,14 +1,12 @@ -% -% Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -% -% Use of this source code is governed by a BSD-style license -% that can be found in the LICENSE file in the root of the source -% tree. An additional intellectual property rights grant can be found -% in the file PATENTS. All contributing project authors may -% be found in the AUTHORS file in the root of the source tree. -% +% % Copyright(c) 2011 The WebRTC project authors.All Rights Reserved.% + % Use of this source code is governed by a BSD + - + style license % that can be found in the LICENSE file in the root of the source + % tree.An additional intellectual property rights grant can be found + % in the file PATENTS.All contributing project authors may + % be found in the AUTHORS file in the root of the source tree.% -clear; + clear; pack; % % Enter the path to YOUR executable and remember to define the perprocessor @@ -56,4 +54,4 @@ result % Compute maximum complexity for a single frame (enc/dec separately and together) maxEncComplexityInAFrame = (max(sum(new(1:size(new,1)/2,:),2))/0.03)/1000000 maxDecComplexityInAFrame = (max(sum(new(size(new,1)/2+1:end,:),2))/0.03)/1000000 -totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame \ No newline at end of file +totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame diff --git a/modules/video_coding/codecs/test/objc_codec_factory_helper.mm b/modules/video_coding/codecs/test/objc_codec_factory_helper.mm index df12fd9689..ed82376251 100644 --- a/modules/video_coding/codecs/test/objc_codec_factory_helper.mm +++ b/modules/video_coding/codecs/test/objc_codec_factory_helper.mm @@ -19,11 +19,11 @@ namespace webrtc { namespace test { std::unique_ptr CreateObjCEncoderFactory() { - return ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]); + return ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); } std::unique_ptr CreateObjCDecoderFactory() { - return ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]); + return ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); } } // namespace test diff --git a/rtc_base/system/gcd_helpers.m b/rtc_base/system/gcd_helpers.m index ff113266a1..fd9a361fa1 100644 --- a/rtc_base/system/gcd_helpers.m +++ b/rtc_base/system/gcd_helpers.m @@ -19,4 +19,4 @@ dispatch_queue_t RTCDispatchQueueCreateWithTarget(const char *label, dispatch_queue_t queue = dispatch_queue_create(label, attr); dispatch_set_target_queue(queue, target); return queue; -} \ No newline at end of file +} diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index d983ae6c33..9b123d2d05 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -23,7 +23,7 @@ NS_ASSUME_NONNULL_BEGIN * Calls made to the webrtc::VideoRenderInterface will be adapted and passed to * this video renderer. */ -@property(nonatomic, readonly) id videoRenderer; +@property(nonatomic, readonly) id videoRenderer; /** * The native VideoSinkInterface surface exposed by this adapter. Calls made @@ -33,7 +33,7 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, readonly) rtc::VideoSinkInterface *nativeVideoRenderer; /** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */ -- (instancetype)initWithNativeRenderer:(id)videoRenderer +- (instancetype)initWithNativeRenderer:(id)videoRenderer NS_DESIGNATED_INITIALIZER; @end diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index 27dd6c2c52..ef02f72f60 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -26,7 +26,7 @@ class VideoRendererAdapter } void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { - RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); + RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) @@ -51,7 +51,7 @@ class VideoRendererAdapter @synthesize videoRenderer = _videoRenderer; -- (instancetype)initWithNativeRenderer:(id)videoRenderer { +- (instancetype)initWithNativeRenderer:(id)videoRenderer { NSParameterAssert(videoRenderer); if (self = [super init]) { _videoRenderer = videoRenderer; diff --git a/sdk/objc/api/logging/RTCCallbackLogger.h b/sdk/objc/api/logging/RTCCallbackLogger.h index 2bce03fe0f..c1aeb825cb 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.h +++ b/sdk/objc/api/logging/RTCCallbackLogger.h @@ -22,7 +22,7 @@ typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message, // This class intercepts WebRTC logs and forwards them to a registered block. // This class is not threadsafe. RTC_OBJC_EXPORT -@interface RTCCallbackLogger : NSObject +@interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject // The severity level to capture. The default is kRTCLoggingSeverityInfo. @property(nonatomic, assign) RTCLoggingSeverity severity; diff --git a/sdk/objc/api/logging/RTCCallbackLogger.mm b/sdk/objc/api/logging/RTCCallbackLogger.mm index e58b03b549..443fee1a65 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.mm +++ b/sdk/objc/api/logging/RTCCallbackLogger.mm @@ -64,7 +64,7 @@ class CallbackWithSeverityLogSink : public rtc::LogSink { RTCCallbackLoggerMessageAndSeverityHandler callback_handler_; }; -@implementation RTCCallbackLogger { +@implementation RTC_OBJC_TYPE (RTCCallbackLogger) { BOOL _hasStarted; std::unique_ptr _logSink; } diff --git a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h index bf1ea62044..2c333f9d73 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h @@ -12,20 +12,22 @@ #import "RTCMediaSource+Private.h" -@interface RTCAudioSource () +@interface RTC_OBJC_TYPE (RTCAudioSource) +() -/** - * The AudioSourceInterface object passed to this RTCAudioSource during - * construction. - */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + /** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeAudioSource; /** Initialize an RTCAudioSource from a native AudioSourceInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeAudioSource:(rtc::scoped_refptr)nativeAudioSource NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.h b/sdk/objc/api/peerconnection/RTCAudioSource.h index d1030e3fef..9f78dcd992 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCAudioSource : RTCMediaSource +@interface RTC_OBJC_TYPE (RTCAudioSource) : RTC_OBJC_TYPE(RTCMediaSource) - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm index a6822f6702..b56c6e9648 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.mm +++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm @@ -12,13 +12,13 @@ #include "rtc_base/checks.h" -@implementation RTCAudioSource { +@implementation RTC_OBJC_TYPE (RTCAudioSource) { } @synthesize volume = _volume; @synthesize nativeAudioSource = _nativeAudioSource; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeAudioSource: (rtc::scoped_refptr)nativeAudioSource { RTC_DCHECK(factory); @@ -32,7 +32,7 @@ return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type { RTC_NOTREACHED(); @@ -41,7 +41,7 @@ - (NSString *)description { NSString *stateString = [[self class] stringForState:self.state]; - return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString]; + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCAudioSource)( %p ): %@", self, stateString]; } - (void)setVolume:(double)volume { diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 88dd971b67..6495500484 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -14,15 +14,16 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; -@interface RTCAudioTrack () +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@interface RTC_OBJC_TYPE (RTCAudioTrack) +() -/** AudioTrackInterface created or passed in at construction. */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; + /** AudioTrackInterface created or passed in at construction. */ + @property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; /** Initialize an RTCAudioTrack with an id. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCAudioSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index 501ef92ec4..95eb5d3d48 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -13,15 +13,15 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioSource; +@class RTC_OBJC_TYPE(RTCAudioSource); RTC_OBJC_EXPORT -@interface RTCAudioTrack : RTCMediaStreamTrack +@interface RTC_OBJC_TYPE (RTCAudioTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack) - (instancetype)init NS_UNAVAILABLE; /** The audio source for this audio track. */ -@property(nonatomic, readonly) RTCAudioSource *source; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 3389b7612c..6a97f46eaa 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -17,12 +17,12 @@ #include "rtc_base/checks.h" -@implementation RTCAudioTrack +@implementation RTC_OBJC_TYPE (RTCAudioTrack) @synthesize source = _source; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCAudioSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId { RTC_DCHECK(factory); RTC_DCHECK(source); @@ -37,7 +37,7 @@ return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack type:(RTCMediaStreamTrackType)type { NSParameterAssert(factory); @@ -46,14 +46,13 @@ return [super initWithFactory:factory nativeTrack:nativeTrack type:type]; } - -- (RTCAudioSource *)source { +- (RTC_OBJC_TYPE(RTCAudioSource) *)source { if (!_source) { rtc::scoped_refptr source = self.nativeAudioTrack->GetSource(); if (source) { - _source = - [[RTCAudioSource alloc] initWithFactory:self.factory nativeAudioSource:source.get()]; + _source = [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self.factory + nativeAudioSource:source.get()]; } } return _source; diff --git a/sdk/objc/api/peerconnection/RTCCertificate.h b/sdk/objc/api/peerconnection/RTCCertificate.h index 50c1ca55a3..5ac8984d4a 100644 --- a/sdk/objc/api/peerconnection/RTCCertificate.h +++ b/sdk/objc/api/peerconnection/RTCCertificate.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCCertificate : NSObject +@interface RTC_OBJC_TYPE (RTCCertificate) : NSObject /** Private key in PEM. */ @property(nonatomic, readonly, copy) NSString *private_key; @@ -37,7 +37,7 @@ RTC_OBJC_EXPORT * provided. * - name: "ECDSA" or "RSASSA-PKCS1-v1_5" */ -+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params; ++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params; @end diff --git a/sdk/objc/api/peerconnection/RTCCertificate.mm b/sdk/objc/api/peerconnection/RTCCertificate.mm index 250cfc4920..e5c33e407c 100644 --- a/sdk/objc/api/peerconnection/RTCCertificate.mm +++ b/sdk/objc/api/peerconnection/RTCCertificate.mm @@ -16,7 +16,7 @@ #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_identity.h" -@implementation RTCCertificate +@implementation RTC_OBJC_TYPE (RTCCertificate) @synthesize private_key = _private_key; @synthesize certificate = _certificate; @@ -35,7 +35,7 @@ return self; } -+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params { ++ (nullable RTC_OBJC_TYPE(RTCCertificate) *)generateCertificateWithParams:(NSDictionary *)params { rtc::KeyType keyType = rtc::KT_ECDSA; NSString *keyTypeString = [params valueForKey:@"name"]; if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) { @@ -63,8 +63,9 @@ RTC_LOG(LS_INFO) << "CERT PEM "; RTC_LOG(LS_INFO) << pem_certificate; - RTCCertificate *cert = [[RTCCertificate alloc] initWithPrivateKey:@(pem_private_key.c_str()) - certificate:@(pem_certificate.c_str())]; + RTC_OBJC_TYPE(RTCCertificate) *cert = + [[RTC_OBJC_TYPE(RTCCertificate) alloc] initWithPrivateKey:@(pem_private_key.c_str()) + certificate:@(pem_certificate.c_str())]; return cert; } diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Native.h b/sdk/objc/api/peerconnection/RTCConfiguration+Native.h index 54783f049a..07c0da6041 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration+Native.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration+Native.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCConfiguration () +@interface RTC_OBJC_TYPE (RTCConfiguration) +() -/** Optional TurnCustomizer. - * With this class one can modify outgoing TURN messages. - * The object passed in must remain valid until PeerConnection::Close() is - * called. - */ -@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer; + /** Optional TurnCustomizer. + * With this class one can modify outgoing TURN messages. + * The object passed in must remain valid until PeerConnection::Close() is + * called. + */ + @property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer; @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h index 845f779272..70a6532dbc 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCConfiguration () +@interface RTC_OBJC_TYPE (RTCConfiguration) +() -+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy: - (RTCIceTransportPolicy)policy; + + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy + : (RTCIceTransportPolicy)policy; + (RTCIceTransportPolicy)transportPolicyForTransportsType: (webrtc::PeerConnectionInterface::IceTransportsType)nativeType; @@ -65,8 +66,8 @@ NS_ASSUME_NONNULL_BEGIN + (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics; /** - * RTCConfiguration struct representation of this RTCConfiguration. This is - * needed to pass to the underlying C++ APIs. + * RTCConfiguration struct representation of this RTCConfiguration. + * This is needed to pass to the underlying C++ APIs. */ - (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration; diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index 44d09228a4..4e9c674ef8 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -14,7 +14,7 @@ #import "RTCCryptoOptions.h" #import "RTCMacros.h" -@class RTCIceServer; +@class RTC_OBJC_TYPE(RTCIceServer); /** * Represents the ice transport policy. This exposes the same states in C++, @@ -70,7 +70,7 @@ typedef NS_ENUM(NSInteger, RTCSdpSemantics) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCConfiguration : NSObject +@interface RTC_OBJC_TYPE (RTCConfiguration) : NSObject /** If true, allows DSCP codes to be set on outgoing packets, configured using * networkPriority field of RTCRtpEncodingParameters. Defaults to false. @@ -78,10 +78,10 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) BOOL enableDscp; /** An array of Ice Servers available to be used by ICE. */ -@property(nonatomic, copy) NSArray *iceServers; +@property(nonatomic, copy) NSArray *iceServers; /** An RTCCertificate for 're' use. */ -@property(nonatomic, nullable) RTCCertificate *certificate; +@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCertificate) * certificate; /** Which candidates the ICE agent is allowed to use. The W3C calls it * |iceTransportPolicy|, while in C++ it is called |type|. */ @@ -173,9 +173,9 @@ RTC_OBJC_EXPORT * * UnifiedPlan will cause RTCPeerConnection to create offers and answers with * multiple m= sections where each m= section maps to one RTCRtpSender and one - * RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both video. This - * will also cause RTCPeerConnection to ignore all but the first a=ssrc lines - * that form a Plan B stream. + * RTCRtpReceiver (an RTCRtpTransceiver), either both audio or both + * video. This will also cause RTCPeerConnection) to ignore all but the first a=ssrc + * lines that form a Plan B stream. * * For users who wish to send multiple audio/video streams and need to stay * interoperable with legacy WebRTC implementations or use legacy APIs, @@ -214,7 +214,7 @@ RTC_OBJC_EXPORT * frame encryption for native WebRTC. Setting this will overwrite any * options set through the PeerConnectionFactory (which is deprecated). */ -@property(nonatomic, nullable) RTCCryptoOptions *cryptoOptions; +@property(nonatomic, nullable) RTC_OBJC_TYPE(RTCCryptoOptions) * cryptoOptions; /** * Time interval between audio RTCP reports. diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index eeb94939f3..52c1450505 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -20,7 +20,7 @@ #include "rtc_base/rtc_certificate_generator.h" #include "rtc_base/ssl_identity.h" -@implementation RTCConfiguration +@implementation RTC_OBJC_TYPE (RTCConfiguration) @synthesize enableDscp = _enableDscp; @synthesize iceServers = _iceServers; @@ -70,7 +70,8 @@ _enableDscp = config.dscp(); NSMutableArray *iceServers = [NSMutableArray array]; for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) { - RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server]; + RTC_OBJC_TYPE(RTCIceServer) *iceServer = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:server]; [iceServers addObject:iceServer]; } _iceServers = iceServers; @@ -78,9 +79,9 @@ rtc::scoped_refptr native_cert; native_cert = config.certificates[0]; rtc::RTCCertificatePEM native_pem = native_cert->ToPEM(); - _certificate = - [[RTCCertificate alloc] initWithPrivateKey:@(native_pem.private_key().c_str()) - certificate:@(native_pem.certificate().c_str())]; + _certificate = [[RTC_OBJC_TYPE(RTCCertificate) alloc] + initWithPrivateKey:@(native_pem.private_key().c_str()) + certificate:@(native_pem.certificate().c_str())]; } _iceTransportPolicy = [[self class] transportPolicyForTransportsType:config.type]; @@ -122,7 +123,7 @@ _turnCustomizer = config.turn_customizer; _activeResetSrtpParams = config.active_reset_srtp_params; if (config.crypto_options) { - _cryptoOptions = [[RTCCryptoOptions alloc] + _cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:config.crypto_options->srtp .enable_gcm_crypto_suites srtpEnableAes128Sha1_32CryptoCipher:config.crypto_options->srtp @@ -140,7 +141,7 @@ } - (NSString *)description { - static NSString *formatString = @"RTCConfiguration: " + static NSString *formatString = @"RTC_OBJC_TYPE(RTCConfiguration): " @"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n" @"%d\n%@\n%d\n%d\n%d\n%d\n%d\n%@\n%d\n}\n"; @@ -181,7 +182,7 @@ webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive)); nativeConfig->set_dscp(_enableDscp); - for (RTCIceServer *iceServer in _iceServers) { + for (RTC_OBJC_TYPE(RTCIceServer) * iceServer in _iceServers) { nativeConfig->servers.push_back(iceServer.nativeServer); } nativeConfig->type = diff --git a/sdk/objc/api/peerconnection/RTCCryptoOptions.h b/sdk/objc/api/peerconnection/RTCCryptoOptions.h index b465bb5a73..759a45e037 100644 --- a/sdk/objc/api/peerconnection/RTCCryptoOptions.h +++ b/sdk/objc/api/peerconnection/RTCCryptoOptions.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN * as Objective-C doesn't support nested structures. */ RTC_OBJC_EXPORT -@interface RTCCryptoOptions : NSObject +@interface RTC_OBJC_TYPE (RTCCryptoOptions) : NSObject /** * Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used diff --git a/sdk/objc/api/peerconnection/RTCCryptoOptions.mm b/sdk/objc/api/peerconnection/RTCCryptoOptions.mm index a059f75599..fbaa1de58d 100644 --- a/sdk/objc/api/peerconnection/RTCCryptoOptions.mm +++ b/sdk/objc/api/peerconnection/RTCCryptoOptions.mm @@ -10,7 +10,7 @@ #import "RTCCryptoOptions.h" -@implementation RTCCryptoOptions +@implementation RTC_OBJC_TYPE (RTCCryptoOptions) @synthesize srtpEnableGcmCryptoSuites = _srtpEnableGcmCryptoSuites; @synthesize srtpEnableAes128Sha1_32CryptoCipher = _srtpEnableAes128Sha1_32CryptoCipher; diff --git a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h index e327fb4f3e..2cdbdabec6 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h @@ -15,27 +15,29 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCDataBuffer () +@interface RTC_OBJC_TYPE (RTCDataBuffer) +() -/** - * The native DataBuffer representation of this RTCDatabuffer object. This is - * needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer; + /** + * The native DataBuffer representation of this RTCDatabuffer object. This is + * needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer; /** Initialize an RTCDataBuffer from a native DataBuffer. */ - (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer; @end -@interface RTCDataChannel () +@interface RTC_OBJC_TYPE (RTCDataChannel) +() -/** Initialize an RTCDataChannel from a native DataChannelInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - nativeDataChannel:(rtc::scoped_refptr)nativeDataChannel - NS_DESIGNATED_INITIALIZER; + /** Initialize an RTCDataChannel from a native DataChannelInterface. */ + - (instancetype)initWithFactory + : (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel + : (rtc::scoped_refptr)nativeDataChannel NS_DESIGNATED_INITIALIZER; + (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState: (RTCDataChannelState)state; diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.h b/sdk/objc/api/peerconnection/RTCDataChannel.h index 0cc2de87f2..2d0661f136 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCDataBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCDataBuffer) : NSObject /** NSData representation of the underlying buffer. */ @property(nonatomic, readonly) NSData *data; @@ -34,20 +34,22 @@ RTC_OBJC_EXPORT @end -@class RTCDataChannel; +@class RTC_OBJC_TYPE(RTCDataChannel); RTC_OBJC_EXPORT -@protocol RTCDataChannelDelegate +@protocol RTC_OBJC_TYPE +(RTCDataChannelDelegate) -/** The data channel state changed. */ -- (void)dataChannelDidChangeState:(RTCDataChannel *)dataChannel; + /** The data channel state changed. */ + - (void)dataChannelDidChangeState : (RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel; /** The data channel successfully received a data buffer. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel - didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer; +- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel + didReceiveMessageWithBuffer:(RTC_OBJC_TYPE(RTCDataBuffer) *)buffer; @optional /** The data channel's |bufferedAmount| changed. */ -- (void)dataChannel:(RTCDataChannel *)dataChannel didChangeBufferedAmount:(uint64_t)amount; +- (void)dataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel + didChangeBufferedAmount:(uint64_t)amount; @end @@ -60,7 +62,7 @@ typedef NS_ENUM(NSInteger, RTCDataChannelState) { }; RTC_OBJC_EXPORT -@interface RTCDataChannel : NSObject +@interface RTC_OBJC_TYPE (RTCDataChannel) : NSObject /** * A label that can be used to distinguish this data channel from other data @@ -115,7 +117,7 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) uint64_t bufferedAmount; /** The delegate for this data channel. */ -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; - (instancetype)init NS_UNAVAILABLE; @@ -123,7 +125,7 @@ RTC_OBJC_EXPORT - (void)close; /** Attempt to send |data| on this data channel's underlying data transport. */ -- (BOOL)sendData:(RTCDataBuffer *)data; +- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data; @end diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.mm b/sdk/objc/api/peerconnection/RTCDataChannel.mm index 35c009eb96..4a79cefdb4 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.mm +++ b/sdk/objc/api/peerconnection/RTCDataChannel.mm @@ -18,21 +18,21 @@ namespace webrtc { class DataChannelDelegateAdapter : public DataChannelObserver { public: - DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; } + DataChannelDelegateAdapter(RTC_OBJC_TYPE(RTCDataChannel) * channel) { channel_ = channel; } void OnStateChange() override { [channel_.delegate dataChannelDidChangeState:channel_]; } void OnMessage(const DataBuffer& buffer) override { - RTCDataBuffer *data_buffer = - [[RTCDataBuffer alloc] initWithNativeBuffer:buffer]; + RTC_OBJC_TYPE(RTCDataBuffer) *data_buffer = + [[RTC_OBJC_TYPE(RTCDataBuffer) alloc] initWithNativeBuffer:buffer]; [channel_.delegate dataChannel:channel_ didReceiveMessageWithBuffer:data_buffer]; } void OnBufferedAmountChange(uint64_t previousAmount) override { - id delegate = channel_.delegate; + id delegate = channel_.delegate; SEL sel = @selector(dataChannel:didChangeBufferedAmount:); if ([delegate respondsToSelector:sel]) { [delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount]; @@ -40,12 +40,11 @@ class DataChannelDelegateAdapter : public DataChannelObserver { } private: - __weak RTCDataChannel *channel_; + __weak RTC_OBJC_TYPE(RTCDataChannel) * channel_; }; } - -@implementation RTCDataBuffer { +@implementation RTC_OBJC_TYPE (RTCDataBuffer) { std::unique_ptr _dataBuffer; } @@ -83,9 +82,8 @@ class DataChannelDelegateAdapter : public DataChannelObserver { @end - -@implementation RTCDataChannel { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCDataChannel) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeDataChannel; std::unique_ptr _observer; BOOL _isObserverRegistered; @@ -152,21 +150,20 @@ class DataChannelDelegateAdapter : public DataChannelObserver { _nativeDataChannel->Close(); } -- (BOOL)sendData:(RTCDataBuffer *)data { +- (BOOL)sendData:(RTC_OBJC_TYPE(RTCDataBuffer) *)data { return _nativeDataChannel->Send(*data.nativeDataBuffer); } - (NSString *)description { - return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDataChannel):\n%ld\n%@\n%@", (long)self.channelId, self.label, - [[self class] - stringForState:self.readyState]]; + [[self class] stringForState:self.readyState]]; } #pragma mark - Private -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeDataChannel: (rtc::scoped_refptr)nativeDataChannel { NSParameterAssert(nativeDataChannel); diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h index 244f742ccc..5aef10fcef 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h +++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration+Private.h @@ -14,9 +14,10 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCDataChannelConfiguration () +@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) +() -@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit; + @property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit; @end diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h index 96d33f4d72..9459ae0a13 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCDataChannelConfiguration : NSObject +@interface RTC_OBJC_TYPE (RTCDataChannelConfiguration) : NSObject /** Set to YES if ordered delivery is required. */ @property(nonatomic, assign) BOOL isOrdered; diff --git a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm index 198bfbbaed..bf775b1afd 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCDataChannelConfiguration.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCDataChannelConfiguration +@implementation RTC_OBJC_TYPE (RTCDataChannelConfiguration) @synthesize nativeDataChannelInit = _nativeDataChannelInit; diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h b/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h index ec054818ef..49a62164cd 100644 --- a/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h +++ b/sdk/objc/api/peerconnection/RTCDtmfSender+Private.h @@ -14,7 +14,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCDtmfSender : NSObject +@interface RTC_OBJC_TYPE (RTCDtmfSender) : NSObject @property(nonatomic, readonly) rtc::scoped_refptr nativeDtmfSender; diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender.h b/sdk/objc/api/peerconnection/RTCDtmfSender.h index 5d86d01892..0f1b6ba4da 100644 --- a/sdk/objc/api/peerconnection/RTCDtmfSender.h +++ b/sdk/objc/api/peerconnection/RTCDtmfSender.h @@ -15,14 +15,15 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@protocol RTCDtmfSender +@protocol RTC_OBJC_TYPE +(RTCDtmfSender) -/** - * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise - * returns false. To be able to send DTMF, the associated RTCRtpSender must be - * able to send packets, and a "telephone-event" codec must be negotiated. - */ -@property(nonatomic, readonly) BOOL canInsertDtmf; + /** + * Returns true if this RTCDtmfSender is capable of sending DTMF. Otherwise + * returns false. To be able to send DTMF, the associated RTCRtpSender must be + * able to send packets, and a "telephone-event" codec must be negotiated. + */ + @property(nonatomic, readonly) BOOL canInsertDtmf; /** * Queues a task that sends the DTMF tones. The tones parameter is treated diff --git a/sdk/objc/api/peerconnection/RTCDtmfSender.mm b/sdk/objc/api/peerconnection/RTCDtmfSender.mm index 77d0678275..ee3b79cd37 100644 --- a/sdk/objc/api/peerconnection/RTCDtmfSender.mm +++ b/sdk/objc/api/peerconnection/RTCDtmfSender.mm @@ -15,7 +15,7 @@ #include "rtc_base/time_utils.h" -@implementation RTCDtmfSender { +@implementation RTC_OBJC_TYPE (RTCDtmfSender) { rtc::scoped_refptr _nativeDtmfSender; } @@ -48,12 +48,11 @@ } - (NSString *)description { - return [NSString - stringWithFormat: - @"RTCDtmfSender {\n remainingTones: %@\n duration: %f sec\n interToneGap: %f sec\n}", - [self remainingTones], - [self duration], - [self interToneGap]]; + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCDtmfSender) {\n remainingTones: %@\n " + @"duration: %f sec\n interToneGap: %f sec\n}", + [self remainingTones], + [self duration], + [self interToneGap]]; } #pragma mark - Private @@ -67,7 +66,8 @@ NSParameterAssert(nativeDtmfSender); if (self = [super init]) { _nativeDtmfSender = nativeDtmfSender; - RTCLogInfo(@"RTCDtmfSender(%p): created DTMF sender: %@", self, self.description); + RTCLogInfo( + @"RTC_OBJC_TYPE(RTCDtmfSender)(%p): created DTMF sender: %@", self, self.description); } return self; } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h index e96ce7bc8e..a078b0aded 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCEncodedImage (Private) +@interface RTC_OBJC_TYPE (RTCEncodedImage) +(Private) -- (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encodedImage; + - (instancetype)initWithNativeEncodedImage : (const webrtc::EncodedImage &)encodedImage; - (webrtc::EncodedImage)nativeEncodedImage; @end diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index 36d4d5a54c..f9e4346350 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -56,9 +56,10 @@ class ObjCEncodedImageBuffer : public webrtc::EncodedImageBufferInterface { } @end -@implementation RTCEncodedImage (Private) +@implementation RTC_OBJC_TYPE (RTCEncodedImage) +(Private) -- (rtc::scoped_refptr)encodedData { + - (rtc::scoped_refptr)encodedData { RTCWrappedEncodedImageBuffer *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.h b/sdk/objc/api/peerconnection/RTCFileLogger.h index cd5c1c466b..853e673a05 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.h +++ b/sdk/objc/api/peerconnection/RTCFileLogger.h @@ -34,7 +34,7 @@ NS_ASSUME_NONNULL_BEGIN // For kRTCFileLoggerTypeApp, the oldest log is overwritten. // This class is not threadsafe. RTC_OBJC_EXPORT -@interface RTCFileLogger : NSObject +@interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject // The severity level to capture. The default is kRTCFileLoggerSeverityInfo. @property(nonatomic, assign) RTCFileLoggerSeverity severity; diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.mm b/sdk/objc/api/peerconnection/RTCFileLogger.mm index 2532fcf36f..9562245611 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.mm +++ b/sdk/objc/api/peerconnection/RTCFileLogger.mm @@ -21,7 +21,7 @@ NSString *const kDefaultLogDirName = @"webrtc_logs"; NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB. const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log"; -@implementation RTCFileLogger { +@implementation RTC_OBJC_TYPE (RTCFileLogger) { BOOL _hasStarted; NSString *_dirPath; NSUInteger _maxFileSize; diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h b/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h index 8c9156c402..409e16b608 100644 --- a/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h +++ b/sdk/objc/api/peerconnection/RTCIceCandidate+Private.h @@ -16,13 +16,14 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCIceCandidate () +@interface RTC_OBJC_TYPE (RTCIceCandidate) +() -/** - * The native IceCandidateInterface representation of this RTCIceCandidate - * object. This is needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) std::unique_ptr nativeCandidate; + /** + * The native IceCandidateInterface representation of this RTCIceCandidate + * object. This is needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, readonly) std::unique_ptr nativeCandidate; /** * Initialize an RTCIceCandidate from a native IceCandidateInterface. No diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate.h b/sdk/objc/api/peerconnection/RTCIceCandidate.h index 3e305cc418..f84843af6c 100644 --- a/sdk/objc/api/peerconnection/RTCIceCandidate.h +++ b/sdk/objc/api/peerconnection/RTCIceCandidate.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCIceCandidate : NSObject +@interface RTC_OBJC_TYPE (RTCIceCandidate) : NSObject /** * If present, the identifier of the "media stream identification" for the media diff --git a/sdk/objc/api/peerconnection/RTCIceCandidate.mm b/sdk/objc/api/peerconnection/RTCIceCandidate.mm index cbae3f3ddf..48385ef5b4 100644 --- a/sdk/objc/api/peerconnection/RTCIceCandidate.mm +++ b/sdk/objc/api/peerconnection/RTCIceCandidate.mm @@ -15,7 +15,7 @@ #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" -@implementation RTCIceCandidate +@implementation RTC_OBJC_TYPE (RTCIceCandidate) @synthesize sdpMid = _sdpMid; @synthesize sdpMLineIndex = _sdpMLineIndex; @@ -35,7 +35,7 @@ } - (NSString *)description { - return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceCandidate):\n%@\n%d\n%@\n%@", _sdpMid, _sdpMLineIndex, _sdp, @@ -50,7 +50,7 @@ std::string sdp; candidate->ToString(&sdp); - RTCIceCandidate *rtcCandidate = + RTC_OBJC_TYPE(RTCIceCandidate) *rtcCandidate = [self initWithSdp:[NSString stringForStdString:sdp] sdpMLineIndex:candidate->sdp_mline_index() sdpMid:[NSString stringForStdString:candidate->sdp_mid()]]; diff --git a/sdk/objc/api/peerconnection/RTCIceServer+Private.h b/sdk/objc/api/peerconnection/RTCIceServer+Private.h index 53fbb45dc2..3eee819965 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer+Private.h +++ b/sdk/objc/api/peerconnection/RTCIceServer+Private.h @@ -14,13 +14,14 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCIceServer () +@interface RTC_OBJC_TYPE (RTCIceServer) +() -/** - * IceServer struct representation of this RTCIceServer object's data. - * This is needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer; + /** + * IceServer struct representation of this RTCIceServer object's data. + * This is needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer; /** Initialize an RTCIceServer from a native IceServer. */ - (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer; diff --git a/sdk/objc/api/peerconnection/RTCIceServer.h b/sdk/objc/api/peerconnection/RTCIceServer.h index ab5fc4a9ed..dd66c61a0b 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.h +++ b/sdk/objc/api/peerconnection/RTCIceServer.h @@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCIceServer : NSObject +@interface RTC_OBJC_TYPE (RTCIceServer) : NSObject /** URI(s) for this server represented as NSStrings. */ @property(nonatomic, readonly) NSArray *urlStrings; diff --git a/sdk/objc/api/peerconnection/RTCIceServer.mm b/sdk/objc/api/peerconnection/RTCIceServer.mm index 2138e4c16a..19a0a7e9e8 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.mm +++ b/sdk/objc/api/peerconnection/RTCIceServer.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCIceServer +@implementation RTC_OBJC_TYPE (RTCIceServer) @synthesize urlStrings = _urlStrings; @synthesize username = _username; @@ -97,7 +97,7 @@ } - (NSString *)description { - return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCIceServer):\n%@\n%@\n%@\n%@\n%@\n%@\n%@", _urlStrings, _username, _credential, diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h index d87659d4d6..faa7962821 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCLegacyStatsReport () +@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) +() -/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */ -- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport; + /** Initialize an RTCLegacyStatsReport object from a native StatsReport. */ + - (instancetype)initWithNativeReport : (const webrtc::StatsReport &)nativeReport; @end diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h index 85f2b8fb3d..b3bd12c5d7 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** This does not currently conform to the spec. */ RTC_OBJC_EXPORT -@interface RTCLegacyStatsReport : NSObject +@interface RTC_OBJC_TYPE (RTCLegacyStatsReport) : NSObject /** Time since 1970-01-01T00:00:00Z in milliseconds. */ @property(nonatomic, readonly) CFTimeInterval timestamp; diff --git a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm index 89e1b85a69..bd7a1ad9c9 100644 --- a/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm +++ b/sdk/objc/api/peerconnection/RTCLegacyStatsReport.mm @@ -15,7 +15,7 @@ #include "rtc_base/checks.h" -@implementation RTCLegacyStatsReport +@implementation RTC_OBJC_TYPE (RTCLegacyStatsReport) @synthesize timestamp = _timestamp; @synthesize type = _type; @@ -23,7 +23,7 @@ @synthesize values = _values; - (NSString *)description { - return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCLegacyStatsReport):\n%@\n%@\n%f\n%@", _reportId, _type, _timestamp, diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h b/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h index b3e1b10a88..97eee8307d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints+Private.h @@ -16,13 +16,14 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMediaConstraints () +@interface RTC_OBJC_TYPE (RTCMediaConstraints) +() -/** - * A MediaConstraints representation of this RTCMediaConstraints object. This is - * needed to pass to the underlying C++ APIs. - */ -- (std::unique_ptr)nativeConstraints; + /** + * A MediaConstraints representation of this RTCMediaConstraints object. This is + * needed to pass to the underlying C++ APIs. + */ + - (std::unique_ptr)nativeConstraints; /** Return a native Constraints object representing these constraints */ + (webrtc::MediaConstraints::Constraints)nativeConstraintsForConstraints: diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/sdk/objc/api/peerconnection/RTCMediaConstraints.h index 5c1a12e33a..c5baf20c1d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.h +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.h @@ -31,7 +31,7 @@ RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue; RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse; RTC_OBJC_EXPORT -@interface RTCMediaConstraints : NSObject +@interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm index bfdbdde6c5..0f46e4b8fe 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm @@ -28,7 +28,7 @@ NSString *const kRTCMediaConstraintsVoiceActivityDetection = NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue); NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse); -@implementation RTCMediaConstraints { +@implementation RTC_OBJC_TYPE (RTCMediaConstraints) { NSDictionary *_mandatory; NSDictionary *_optional; } @@ -47,9 +47,8 @@ NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kVa } - (NSString *)description { - return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@", - _mandatory, - _optional]; + return [NSString + stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaConstraints):\n%@\n%@", _mandatory, _optional]; } #pragma mark - Private diff --git a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h index 7d69aaae7c..edda892e50 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h @@ -14,18 +14,20 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); typedef NS_ENUM(NSInteger, RTCMediaSourceType) { RTCMediaSourceTypeAudio, RTCMediaSourceTypeVideo, }; -@interface RTCMediaSource () +@interface RTC_OBJC_TYPE (RTCMediaSource) +() -@property(nonatomic, readonly) rtc::scoped_refptr nativeMediaSource; + @property(nonatomic, + readonly) rtc::scoped_refptr nativeMediaSource; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.h b/sdk/objc/api/peerconnection/RTCMediaSource.h index 838c783208..ba19c2a352 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource.h @@ -22,7 +22,7 @@ typedef NS_ENUM(NSInteger, RTCSourceState) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCMediaSource : NSObject +@interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject /** The current state of the RTCMediaSource. */ @property(nonatomic, readonly) RTCSourceState state; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.mm b/sdk/objc/api/peerconnection/RTCMediaSource.mm index 6ec41c3b50..61472a782a 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.mm +++ b/sdk/objc/api/peerconnection/RTCMediaSource.mm @@ -12,14 +12,14 @@ #include "rtc_base/checks.h" -@implementation RTCMediaSource { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCMediaSource) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; RTCMediaSourceType _type; } @synthesize nativeMediaSource = _nativeMediaSource; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type { RTC_DCHECK(factory); diff --git a/sdk/objc/api/peerconnection/RTCMediaStream+Private.h b/sdk/objc/api/peerconnection/RTCMediaStream+Private.h index 23149ce56e..6c8a602766 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaStream+Private.h @@ -14,19 +14,22 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMediaStream () +@interface RTC_OBJC_TYPE (RTCMediaStream) +() -/** - * MediaStreamInterface representation of this RTCMediaStream object. This is - * needed to pass to the underlying C++ APIs. - */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeMediaStream; + /** + * MediaStreamInterface representation of this RTCMediaStream object. This is + * needed to pass to the underlying C++ APIs. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeMediaStream; /** Initialize an RTCMediaStream with an id. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory streamId:(NSString *)streamId; +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + streamId:(NSString *)streamId; /** Initialize an RTCMediaStream from a native MediaStreamInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaStream:(rtc::scoped_refptr)nativeMediaStream; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.h b/sdk/objc/api/peerconnection/RTCMediaStream.h index bb9bec690a..2d56f15c7d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream.h +++ b/sdk/objc/api/peerconnection/RTCMediaStream.h @@ -14,18 +14,18 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioTrack; -@class RTCPeerConnectionFactory; -@class RTCVideoTrack; +@class RTC_OBJC_TYPE(RTCAudioTrack); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@class RTC_OBJC_TYPE(RTCVideoTrack); RTC_OBJC_EXPORT -@interface RTCMediaStream : NSObject +@interface RTC_OBJC_TYPE (RTCMediaStream) : NSObject /** The audio tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *audioTracks; +@property(nonatomic, strong, readonly) NSArray *audioTracks; /** The video tracks in this stream. */ -@property(nonatomic, strong, readonly) NSArray *videoTracks; +@property(nonatomic, strong, readonly) NSArray *videoTracks; /** An identifier for this media stream. */ @property(nonatomic, readonly) NSString *streamId; @@ -33,16 +33,16 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; /** Adds the given audio track to this media stream. */ -- (void)addAudioTrack:(RTCAudioTrack *)audioTrack; +- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack; /** Adds the given video track to this media stream. */ -- (void)addVideoTrack:(RTCVideoTrack *)videoTrack; +- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack; /** Removes the given audio track to this media stream. */ -- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack; +- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack; /** Removes the given video track to this media stream. */ -- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack; +- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.mm b/sdk/objc/api/peerconnection/RTCMediaStream.mm index c1a402a648..a6292b547c 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStream.mm @@ -18,14 +18,14 @@ #import "RTCVideoTrack+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCMediaStream { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCMediaStream) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; NSMutableArray *_audioTracks; NSMutableArray *_videoTracks; rtc::scoped_refptr _nativeMediaStream; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory streamId:(NSString *)streamId { NSParameterAssert(factory); NSParameterAssert(streamId.length); @@ -35,11 +35,11 @@ return [self initWithFactory:factory nativeMediaStream:stream]; } -- (NSArray *)audioTracks { +- (NSArray *)audioTracks { return [_audioTracks copy]; } -- (NSArray *)videoTracks { +- (NSArray *)videoTracks { return [_videoTracks copy]; } @@ -47,32 +47,32 @@ return [NSString stringForStdString:_nativeMediaStream->id()]; } -- (void)addAudioTrack:(RTCAudioTrack *)audioTrack { +- (void)addAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack { if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) { [_audioTracks addObject:audioTrack]; } } -- (void)addVideoTrack:(RTCVideoTrack *)videoTrack { +- (void)addVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack { if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) { [_videoTracks addObject:videoTrack]; } } -- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack { +- (void)removeAudioTrack:(RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrack { NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack]; NSAssert(index != NSNotFound, - @"|removeAudioTrack| called on unexpected RTCAudioTrack"); + @"|removeAudioTrack| called on unexpected RTC_OBJC_TYPE(RTCAudioTrack)"); if (index != NSNotFound && _nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) { [_audioTracks removeObjectAtIndex:index]; } } -- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack { +- (void)removeVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrack { NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack]; NSAssert(index != NSNotFound, - @"|removeVideoTrack| called on unexpected RTCVideoTrack"); + @"|removeVideoTrack| called on unexpected RTC_OBJC_TYPE(RTCVideoTrack)"); if (index != NSNotFound && _nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) { [_videoTracks removeObjectAtIndex:index]; @@ -80,7 +80,7 @@ } - (NSString *)description { - return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStream):\n%@\nA=%lu\nV=%lu", self.streamId, (unsigned long)self.audioTracks.count, (unsigned long)self.videoTracks.count]; @@ -92,7 +92,7 @@ return _nativeMediaStream; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaStream: (rtc::scoped_refptr)nativeMediaStream { NSParameterAssert(nativeMediaStream); @@ -108,15 +108,19 @@ for (auto &track : audioTracks) { RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio; - RTCAudioTrack *audioTrack = - [[RTCAudioTrack alloc] initWithFactory:_factory nativeTrack:track type:type]; + RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack = + [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory + nativeTrack:track + type:type]; [_audioTracks addObject:audioTrack]; } for (auto &track : videoTracks) { RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo; - RTCVideoTrack *videoTrack = - [[RTCVideoTrack alloc] initWithFactory:_factory nativeTrack:track type:type]; + RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack = + [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory + nativeTrack:track + type:type]; [_videoTracks addObject:videoTrack]; } } diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h index 176bb73d85..ee51e27b2d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h @@ -19,11 +19,13 @@ typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) { NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCMediaStreamTrack () +@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) +() -@property(nonatomic, readonly) RTCPeerConnectionFactory *factory; + @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * + factory; /** * The native MediaStreamTrackInterface passed in or created during @@ -34,14 +36,14 @@ NS_ASSUME_NONNULL_BEGIN /** * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack; -- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track; +- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track; + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState: (RTCMediaStreamTrackState)state; @@ -51,9 +53,9 @@ NS_ASSUME_NONNULL_BEGIN + (NSString *)stringForState:(RTCMediaStreamTrackState)state; -+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack: - (rtc::scoped_refptr)nativeTrack - factory:(RTCPeerConnectionFactory *)factory; ++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *) + mediaTrackForNativeTrack:(rtc::scoped_refptr)nativeTrack + factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h index d1ea0f28f3..2200122ccd 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h @@ -26,7 +26,7 @@ RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio; RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo; RTC_OBJC_EXPORT -@interface RTCMediaStreamTrack : NSObject +@interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject /** * The kind of track. For example, "audio" if this track represents an audio diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm index 07992a0202..f1e128ca60 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm @@ -19,8 +19,8 @@ NSString * const kRTCMediaStreamTrackKindAudio = NSString * const kRTCMediaStreamTrackKindVideo = @(webrtc::MediaStreamTrackInterface::kVideoKind); -@implementation RTCMediaStreamTrack { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeTrack; RTCMediaStreamTrackType _type; } @@ -47,7 +47,7 @@ NSString * const kRTCMediaStreamTrackKindVideo = - (NSString *)description { NSString *readyState = [[self class] stringForState:self.readyState]; - return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCMediaStreamTrack):\n%@\n%@\n%@\n%@", self.kind, self.trackId, self.isEnabled ? @"enabled" : @"disabled", @@ -61,7 +61,7 @@ NSString * const kRTCMediaStreamTrackKindVideo = if (![object isMemberOfClass:[self class]]) { return NO; } - return [self isEqualToTrack:(RTCMediaStreamTrack *)object]; + return [self isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)object]; } - (NSUInteger)hash { @@ -76,7 +76,7 @@ NSString * const kRTCMediaStreamTrackKindVideo = @synthesize factory = _factory; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack type:(RTCMediaStreamTrackType)type { NSParameterAssert(nativeTrack); @@ -89,7 +89,7 @@ NSString * const kRTCMediaStreamTrackKindVideo = return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack { NSParameterAssert(nativeTrack); if (nativeTrack->kind() == @@ -103,7 +103,7 @@ NSString * const kRTCMediaStreamTrackKindVideo = return nil; } -- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track { +- (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { if (!track) { return NO; } @@ -139,21 +139,22 @@ NSString * const kRTCMediaStreamTrackKindVideo = } } -+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack: - (rtc::scoped_refptr)nativeTrack - factory:(RTCPeerConnectionFactory *)factory { ++ (RTC_OBJC_TYPE(RTCMediaStreamTrack) *) + mediaTrackForNativeTrack:(rtc::scoped_refptr)nativeTrack + factory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory { NSParameterAssert(nativeTrack); NSParameterAssert(factory); if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) { - return [[RTCAudioTrack alloc] initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeAudio]; + return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack + type:RTCMediaStreamTrackTypeAudio]; } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { - return [[RTCVideoTrack alloc] initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeVideo]; + return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack + type:RTCMediaStreamTrackTypeVideo]; } else { - return [[RTCMediaStreamTrack alloc] initWithFactory:factory nativeTrack:nativeTrack]; + return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack]; } } diff --git a/sdk/objc/api/peerconnection/RTCMetrics.h b/sdk/objc/api/peerconnection/RTCMetrics.h index 6629fdacec..fddbb27c90 100644 --- a/sdk/objc/api/peerconnection/RTCMetrics.h +++ b/sdk/objc/api/peerconnection/RTCMetrics.h @@ -20,4 +20,4 @@ RTC_EXTERN void RTCEnableMetrics(void); /** Gets and clears native histograms. */ -RTC_EXTERN NSArray* RTCGetAndResetMetrics(void); +RTC_EXTERN NSArray* RTCGetAndResetMetrics(void); diff --git a/sdk/objc/api/peerconnection/RTCMetrics.mm b/sdk/objc/api/peerconnection/RTCMetrics.mm index 8ca9d965bd..b3ad352084 100644 --- a/sdk/objc/api/peerconnection/RTCMetrics.mm +++ b/sdk/objc/api/peerconnection/RTCMetrics.mm @@ -16,7 +16,7 @@ void RTCEnableMetrics(void) { webrtc::metrics::Enable(); } -NSArray *RTCGetAndResetMetrics(void) { +NSArray *RTCGetAndResetMetrics(void) { std::map> histograms; webrtc::metrics::GetAndReset(&histograms); @@ -24,8 +24,8 @@ NSArray *RTCGetAndResetMetrics(void) { NSMutableArray *metrics = [NSMutableArray arrayWithCapacity:histograms.size()]; for (auto const &histogram : histograms) { - RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc] - initWithNativeSampleInfo:*histogram.second]; + RTC_OBJC_TYPE(RTCMetricsSampleInfo) *metric = + [[RTC_OBJC_TYPE(RTCMetricsSampleInfo) alloc] initWithNativeSampleInfo:*histogram.second]; [metrics addObject:metric]; } return metrics; diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h index c465b1c756..e4aa41f6c7 100644 --- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h +++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMetricsSampleInfo () +@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) +() -/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */ -- (instancetype)initWithNativeSampleInfo:(const webrtc::metrics::SampleInfo &)info; + /** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */ + - (instancetype)initWithNativeSampleInfo : (const webrtc::metrics::SampleInfo &)info; @end diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h index cd38ab9a91..47a877b6fb 100644 --- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h +++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCMetricsSampleInfo : NSObject +@interface RTC_OBJC_TYPE (RTCMetricsSampleInfo) : NSObject /** * Example of RTCMetricsSampleInfo: diff --git a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm index a4937fbeac..e4be94e90a 100644 --- a/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm +++ b/sdk/objc/api/peerconnection/RTCMetricsSampleInfo.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCMetricsSampleInfo +@implementation RTC_OBJC_TYPE (RTCMetricsSampleInfo) @synthesize name = _name; @synthesize min = _min; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm index 6c84fa3f61..1ded45d670 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+DataChannel.mm @@ -14,10 +14,12 @@ #import "RTCDataChannelConfiguration+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCPeerConnection (DataChannel) +@implementation RTC_OBJC_TYPE (RTCPeerConnection) +(DataChannel) -- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label - configuration:(RTCDataChannelConfiguration *)configuration { + - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel + : (NSString *)label configuration + : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration { std::string labelString = [NSString stdStringForString:label]; const webrtc::DataChannelInit nativeInit = configuration.nativeDataChannelInit; @@ -27,7 +29,8 @@ if (!dataChannel) { return nil; } - return [[RTCDataChannel alloc] initWithFactory:self.factory nativeDataChannel:dataChannel]; + return [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:self.factory + nativeDataChannel:dataChannel]; } @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h index 93b4ec7c3f..735881025a 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h @@ -22,7 +22,7 @@ namespace webrtc { */ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { public: - PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection); + PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection); ~PeerConnectionDelegateAdapter() override; void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override; @@ -58,15 +58,17 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { void OnRemoveTrack(rtc::scoped_refptr receiver) override; private: - __weak RTCPeerConnection *peer_connection_; + __weak RTC_OBJC_TYPE(RTCPeerConnection) * peer_connection_; }; } // namespace webrtc -@interface RTCPeerConnection () +@interface RTC_OBJC_TYPE (RTCPeerConnection) +() -/** The factory used to create this RTCPeerConnection */ -@property(nonatomic, readonly) RTCPeerConnectionFactory *factory; + /** The factory used to create this RTCPeerConnection */ + @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * + factory; /** The native PeerConnectionInterface created during construction. */ @property(nonatomic, readonly) rtc::scoped_refptr @@ -75,20 +77,20 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { /** Initialize an RTCPeerConnection with a configuration, constraints, and * delegate. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - configuration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate:(nullable id)delegate; +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(nullable id)delegate; /** Initialize an RTCPeerConnection with a configuration, constraints, * delegate and PeerConnectionDependencies. */ -- (instancetype)initWithDependencies:(RTCPeerConnectionFactory *)factory - configuration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints +- (instancetype)initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints dependencies: (std::unique_ptr)dependencies - delegate:(nullable id)delegate + delegate:(nullable id)delegate NS_DESIGNATED_INITIALIZER; + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState: diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm index e2965ebab7..46a6e3c780 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm @@ -39,8 +39,8 @@ class StatsCollectorCallbackAdapter : public RTCStatsCollectorCallback { class StatsObserverAdapter : public StatsObserver { public: - StatsObserverAdapter(void (^completionHandler) - (NSArray *stats)) { + StatsObserverAdapter( + void (^completionHandler)(NSArray *stats)) { completion_handler_ = completionHandler; } @@ -50,8 +50,8 @@ class StatsObserverAdapter : public StatsObserver { RTC_DCHECK(completion_handler_); NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()]; for (const auto* report : reports) { - RTCLegacyStatsReport *statsReport = - [[RTCLegacyStatsReport alloc] initWithNativeReport:*report]; + RTC_OBJC_TYPE(RTCLegacyStatsReport) *statsReport = + [[RTC_OBJC_TYPE(RTCLegacyStatsReport) alloc] initWithNativeReport:*report]; [stats addObject:statsReport]; } completion_handler_(stats); @@ -59,20 +59,21 @@ class StatsObserverAdapter : public StatsObserver { } private: - void (^completion_handler_)(NSArray *stats); + void (^completion_handler_)(NSArray *stats); }; } // namespace webrtc -@implementation RTCPeerConnection (Stats) +@implementation RTC_OBJC_TYPE (RTCPeerConnection) +(Stats) -- (void)statisticsForSender:(RTCRtpSender *)sender - completionHandler:(RTCStatisticsCompletionHandler)completionHandler { + - (void)statisticsForSender : (RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler + : (RTCStatisticsCompletionHandler)completionHandler { rtc::scoped_refptr collector( new rtc::RefCountedObject(completionHandler)); self.nativePeerConnection->GetStats(sender.nativeRtpSender, collector); } -- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver +- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver completionHandler:(RTCStatisticsCompletionHandler)completionHandler { rtc::scoped_refptr collector( new rtc::RefCountedObject(completionHandler)); @@ -85,10 +86,10 @@ class StatsObserverAdapter : public StatsObserver { self.nativePeerConnection->GetStats(collector); } -- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack +- (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel completionHandler: - (void (^)(NSArray *stats))completionHandler { + (void (^)(NSArray *stats))completionHandler { rtc::scoped_refptr observer( new rtc::RefCountedObject (completionHandler)); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.h b/sdk/objc/api/peerconnection/RTCPeerConnection.h index 012295c241..cfc0a3d824 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.h @@ -12,21 +12,21 @@ #import "RTCMacros.h" -@class RTCConfiguration; -@class RTCDataChannel; -@class RTCDataChannelConfiguration; -@class RTCIceCandidate; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCMediaStreamTrack; -@class RTCPeerConnectionFactory; -@class RTCRtpReceiver; -@class RTCRtpSender; -@class RTCRtpTransceiver; -@class RTCRtpTransceiverInit; -@class RTCSessionDescription; +@class RTC_OBJC_TYPE(RTCConfiguration); +@class RTC_OBJC_TYPE(RTCDataChannel); +@class RTC_OBJC_TYPE(RTCDataChannelConfiguration); +@class RTC_OBJC_TYPE(RTCIceCandidate); +@class RTC_OBJC_TYPE(RTCMediaConstraints); +@class RTC_OBJC_TYPE(RTCMediaStream); +@class RTC_OBJC_TYPE(RTCMediaStreamTrack); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@class RTC_OBJC_TYPE(RTCRtpReceiver); +@class RTC_OBJC_TYPE(RTCRtpSender); +@class RTC_OBJC_TYPE(RTCRtpTransceiver); +@class RTC_OBJC_TYPE(RTCRtpTransceiverInit); +@class RTC_OBJC_TYPE(RTCSessionDescription); @class RTCStatisticsReport; -@class RTCLegacyStatsReport; +@class RTC_OBJC_TYPE(RTCLegacyStatsReport); typedef NS_ENUM(NSInteger, RTCRtpMediaType); @@ -81,45 +81,49 @@ typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) { RTCStatsOutputLevelDebug, }; -@class RTCPeerConnection; +@class RTC_OBJC_TYPE(RTCPeerConnection); RTC_OBJC_EXPORT -@protocol RTCPeerConnectionDelegate +@protocol RTC_OBJC_TYPE +(RTCPeerConnectionDelegate) -/** Called when the SignalingState changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeSignalingState:(RTCSignalingState)stateChanged; + /** Called when the SignalingState changed. */ + - (void)peerConnection + : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState + : (RTCSignalingState)stateChanged; /** Called when media is received on a new stream from remote peer. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Called when a remote peer closes a stream. * This is not called when RTCSdpSemanticsUnifiedPlan is specified. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Called when negotiation is needed, for example ICE has restarted. */ -- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection; +- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection; /** Called any time the IceConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState; /** Called any time the IceGatheringState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState; /** New ice candidate has been found. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didGenerateIceCandidate:(RTCIceCandidate *)candidate; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate; /** Called when a group of local Ice candidates have been removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveIceCandidates:(NSArray *)candidates; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveIceCandidates:(NSArray *)candidates; /** New data channel has been opened. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didOpenDataChannel:(RTCDataChannel *)dataChannel; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel; /** Called when signaling indicates a transceiver will be receiving media from * the remote endpoint. @@ -128,72 +132,72 @@ RTC_OBJC_EXPORT @optional /** Called any time the IceConnectionState changes following standardized * transition. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState; /** Called any time the PeerConnectionState changes. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeConnectionState:(RTCPeerConnectionState)newState; -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver; /** Called when a receiver and its track are created. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didAddReceiver:(RTCRtpReceiver *)rtpReceiver - streams:(NSArray *)mediaStreams; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didAddReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver + streams:(NSArray *)mediaStreams; /** Called when the receiver and its track are removed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didRemoveReceiver:(RTCRtpReceiver *)rtpReceiver; +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didRemoveReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver; /** Called when the selected ICE candidate pair is changed. */ -- (void)peerConnection:(RTCPeerConnection *)peerConnection - didChangeLocalCandidate:(RTCIceCandidate *)local - remoteCandidate:(RTCIceCandidate *)remote +- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection + didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local + remoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote lastReceivedMs:(int)lastDataReceivedMs changeReason:(NSString *)reason; @end RTC_OBJC_EXPORT -@interface RTCPeerConnection : NSObject +@interface RTC_OBJC_TYPE (RTCPeerConnection) : NSObject /** The object that will be notifed about events such as state changes and * streams being added or removed. */ -@property(nonatomic, weak, nullable) id delegate; +@property(nonatomic, weak, nullable) id delegate; /** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use * |senders| instead. */ -@property(nonatomic, readonly) NSArray *localStreams; -@property(nonatomic, readonly, nullable) RTCSessionDescription *localDescription; -@property(nonatomic, readonly, nullable) RTCSessionDescription *remoteDescription; +@property(nonatomic, readonly) NSArray *localStreams; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription; @property(nonatomic, readonly) RTCSignalingState signalingState; @property(nonatomic, readonly) RTCIceConnectionState iceConnectionState; @property(nonatomic, readonly) RTCPeerConnectionState connectionState; @property(nonatomic, readonly) RTCIceGatheringState iceGatheringState; -@property(nonatomic, readonly, copy) RTCConfiguration *configuration; +@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration; /** Gets all RTCRtpSenders associated with this peer connection. * Note: reading this property returns different instances of RTCRtpSender. * Use isEqual: instead of == to compare RTCRtpSender instances. */ -@property(nonatomic, readonly) NSArray *senders; +@property(nonatomic, readonly) NSArray *senders; /** Gets all RTCRtpReceivers associated with this peer connection. * Note: reading this property returns different instances of RTCRtpReceiver. * Use isEqual: instead of == to compare RTCRtpReceiver instances. */ -@property(nonatomic, readonly) NSArray *receivers; +@property(nonatomic, readonly) NSArray *receivers; /** Gets all RTCRtpTransceivers associated with this peer connection. * Note: reading this property returns different instances of - * RTCRtpTransceiver. Use isEqual: instead of == to compare RTCRtpTransceiver - * instances. - * This is only available with RTCSdpSemanticsUnifiedPlan specified. + * RTCRtpTransceiver. Use isEqual: instead of == to compare + * RTCRtpTransceiver instances. This is only available with + * RTCSdpSemanticsUnifiedPlan specified. */ -@property(nonatomic, readonly) NSArray *transceivers; +@property(nonatomic, readonly) NSArray *transceivers; - (instancetype)init NS_UNAVAILABLE; @@ -203,38 +207,39 @@ RTC_OBJC_EXPORT * new ICE credentials. Note that the BUNDLE and RTCP-multiplexing policies * cannot be changed with this method. */ -- (BOOL)setConfiguration:(RTCConfiguration *)configuration; +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration; /** Terminate all media and close the transport. */ - (void)close; /** Provide a remote candidate to the ICE Agent. */ -- (void)addIceCandidate:(RTCIceCandidate *)candidate; +- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate; /** Remove a group of remote candidates from the ICE Agent. */ -- (void)removeIceCandidates:(NSArray *)candidates; +- (void)removeIceCandidates:(NSArray *)candidates; /** Add a new media stream to be sent on this peer connection. * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use * addTrack instead. */ -- (void)addStream:(RTCMediaStream *)stream; +- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Remove the given media stream from this peer connection. * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use * removeTrack instead. */ -- (void)removeStream:(RTCMediaStream *)stream; +- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream; /** Add a new media stream track to be sent on this peer connection, and return - * the newly created RTCRtpSender. The RTCRtpSender will be associated with - * the streams specified in the |streamIds| list. + * the newly created RTCRtpSender. The RTCRtpSender will be + * associated with the streams specified in the |streamIds| list. * * Errors: If an error occurs, returns nil. An error can occur if: * - A sender already exists for the track. * - The peer connection is closed. */ -- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray *)streamIds; +- (RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + streamIds:(NSArray *)streamIds; /** With PlanB semantics, removes an RTCRtpSender from this peer connection. * @@ -243,7 +248,7 @@ RTC_OBJC_EXPORT * * Returns YES on success. */ -- (BOOL)removeTrack:(RTCRtpSender *)sender; +- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender; /** addTransceiver creates a new RTCRtpTransceiver and adds it to the set of * transceivers. Adding a transceiver will cause future calls to CreateOffer @@ -266,33 +271,37 @@ RTC_OBJC_EXPORT * of the transceiver (and sender/receiver) will be derived from the kind of * the track. */ -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track; -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track - init:(RTCRtpTransceiverInit *)init; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack: + (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *) + addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init; /** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio * or RTCRtpMediaTypeVideo. */ -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType; -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType - init:(RTCRtpTransceiverInit *)init; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *) + init; /** Generate an SDP offer. */ -- (void)offerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp, +- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler:(nullable void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * _Nullable sdp, NSError *_Nullable error))completionHandler; /** Generate an SDP answer. */ -- (void)answerForConstraints:(RTCMediaConstraints *)constraints - completionHandler:(nullable void (^)(RTCSessionDescription *_Nullable sdp, - NSError *_Nullable error))completionHandler; +- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler: + (nullable void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * _Nullable sdp, + NSError *_Nullable error))completionHandler; /** Apply the supplied RTCSessionDescription as the local description. */ -- (void)setLocalDescription:(RTCSessionDescription *)sdp +- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler; /** Apply the supplied RTCSessionDescription as the remote description. */ -- (void)setRemoteDescription:(RTCSessionDescription *)sdp +- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(nullable void (^)(NSError *_Nullable error))completionHandler; /** Limits the bandwidth allocated for all RTP streams sent by this @@ -310,35 +319,40 @@ RTC_OBJC_EXPORT @end -@interface RTCPeerConnection (Media) +@interface RTC_OBJC_TYPE (RTCPeerConnection) +(Media) -/** Create an RTCRtpSender with the specified kind and media stream ID. - * See RTCMediaStreamTrack.h for available kinds. - * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use - * addTransceiver instead. - */ -- (RTCRtpSender *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId; + /** Create an RTCRtpSender with the specified kind and media stream ID. + * See RTCMediaStreamTrack.h for available kinds. + * This method is not supported with RTCSdpSemanticsUnifiedPlan. Please use + * addTransceiver instead. + */ + - (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind : (NSString *)kind streamId + : (NSString *)streamId; @end -@interface RTCPeerConnection (DataChannel) +@interface RTC_OBJC_TYPE (RTCPeerConnection) +(DataChannel) -/** Create a new data channel with the given label and configuration. */ -- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label - configuration:(RTCDataChannelConfiguration *)configuration; + /** Create a new data channel with the given label and configuration. */ + - (nullable RTC_OBJC_TYPE(RTCDataChannel) *)dataChannelForLabel + : (NSString *)label configuration : (RTC_OBJC_TYPE(RTCDataChannelConfiguration) *)configuration; @end typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *); -@interface RTCPeerConnection (Stats) +@interface RTC_OBJC_TYPE (RTCPeerConnection) +(Stats) -/** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil - * statistics are gathered for all tracks. - */ -- (void)statsForTrack:(nullable RTCMediaStreamTrack *)mediaStreamTrack - statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel - completionHandler:(nullable void (^)(NSArray *stats))completionHandler; + /** Gather stats for the given RTCMediaStreamTrack. If |mediaStreamTrack| is nil + * statistics are gathered for all tracks. + */ + - (void)statsForTrack + : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel + : (RTCStatsOutputLevel)statsOutputLevel completionHandler + : (nullable void (^)(NSArray *stats))completionHandler; /** Gather statistic through the v2 statistics API. */ - (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completionHandler; @@ -346,13 +360,13 @@ typedef void (^RTCStatisticsCompletionHandler)(RTCStatisticsReport *); /** Spec-compliant getStats() performing the stats selection algorithm with the * sender. */ -- (void)statisticsForSender:(RTCRtpSender *)sender +- (void)statisticsForSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender completionHandler:(RTCStatisticsCompletionHandler)completionHandler; /** Spec-compliant getStats() performing the stats selection algorithm with the * receiver. */ -- (void)statisticsForReceiver:(RTCRtpReceiver *)receiver +- (void)statisticsForReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver completionHandler:(RTCStatisticsCompletionHandler)completionHandler; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm index ebdd12033f..fa68d08e74 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm @@ -33,8 +33,7 @@ #include "rtc_base/checks.h" #include "rtc_base/numerics/safe_conversions.h" -NSString * const kRTCPeerConnectionErrorDomain = - @"org.webrtc.RTCPeerConnection"; +NSString *const kRTCPeerConnectionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)"; int const kRTCPeerConnnectionSessionDescriptionError = -1; namespace webrtc { @@ -42,9 +41,8 @@ namespace webrtc { class CreateSessionDescriptionObserverAdapter : public CreateSessionDescriptionObserver { public: - CreateSessionDescriptionObserverAdapter( - void (^completionHandler)(RTCSessionDescription *sessionDescription, - NSError *error)) { + CreateSessionDescriptionObserverAdapter(void (^completionHandler)( + RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, NSError *error)) { completion_handler_ = completionHandler; } @@ -54,9 +52,8 @@ class CreateSessionDescriptionObserverAdapter RTC_DCHECK(completion_handler_); std::unique_ptr description = std::unique_ptr(desc); - RTCSessionDescription* session = - [[RTCSessionDescription alloc] initWithNativeDescription: - description.get()]; + RTC_OBJC_TYPE(RTCSessionDescription) *session = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description.get()]; completion_handler_(session, nil); completion_handler_ = nil; } @@ -74,8 +71,8 @@ class CreateSessionDescriptionObserverAdapter } private: - void (^completion_handler_) - (RTCSessionDescription *sessionDescription, NSError *error); + void (^completion_handler_)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, + NSError *error); }; class SetSessionDescriptionObserverAdapter : @@ -110,8 +107,8 @@ class SetSessionDescriptionObserverAdapter : void (^completion_handler_)(NSError *error); }; -PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter( - RTCPeerConnection *peerConnection) { +PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(RTC_OBJC_TYPE(RTCPeerConnection) * + peerConnection) { peer_connection_ = peerConnection; } @@ -122,26 +119,28 @@ PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() { void PeerConnectionDelegateAdapter::OnSignalingChange( PeerConnectionInterface::SignalingState new_state) { RTCSignalingState state = - [[RTCPeerConnection class] signalingStateForNativeState:new_state]; - RTCPeerConnection *peer_connection = peer_connection_; + [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didChangeSignalingState:state]; } void PeerConnectionDelegateAdapter::OnAddStream( rtc::scoped_refptr stream) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCMediaStream *mediaStream = - [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCMediaStream) *mediaStream = + [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory + nativeMediaStream:stream]; [peer_connection.delegate peerConnection:peer_connection didAddStream:mediaStream]; } void PeerConnectionDelegateAdapter::OnRemoveStream( rtc::scoped_refptr stream) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCMediaStream *mediaStream = - [[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCMediaStream) *mediaStream = + [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory + nativeMediaStream:stream]; [peer_connection.delegate peerConnection:peer_connection didRemoveStream:mediaStream]; @@ -149,10 +148,10 @@ void PeerConnectionDelegateAdapter::OnRemoveStream( void PeerConnectionDelegateAdapter::OnTrack( rtc::scoped_refptr nativeTransceiver) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCRtpTransceiver *transceiver = - [[RTCRtpTransceiver alloc] initWithFactory:peer_connection.factory - nativeRtpTransceiver:nativeTransceiver]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = + [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:peer_connection.factory + nativeRtpTransceiver:nativeTransceiver]; if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) { [peer_connection.delegate peerConnection:peer_connection @@ -162,21 +161,23 @@ void PeerConnectionDelegateAdapter::OnTrack( void PeerConnectionDelegateAdapter::OnDataChannel( rtc::scoped_refptr data_channel) { - RTCPeerConnection *peer_connection = peer_connection_; - RTCDataChannel *dataChannel = [[RTCDataChannel alloc] initWithFactory:peer_connection.factory - nativeDataChannel:data_channel]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCDataChannel) *dataChannel = + [[RTC_OBJC_TYPE(RTCDataChannel) alloc] initWithFactory:peer_connection.factory + nativeDataChannel:data_channel]; [peer_connection.delegate peerConnection:peer_connection didOpenDataChannel:dataChannel]; } void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() { - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnectionShouldNegotiate:peer_connection]; } void PeerConnectionDelegateAdapter::OnIceConnectionChange( PeerConnectionInterface::IceConnectionState new_state) { - RTCIceConnectionState state = [RTCPeerConnection iceConnectionStateForNativeState:new_state]; + RTCIceConnectionState state = + [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeIceConnectionState:state]; } @@ -184,7 +185,8 @@ void PeerConnectionDelegateAdapter::OnStandardizedIceConnectionChange( PeerConnectionInterface::IceConnectionState new_state) { if ([peer_connection_.delegate respondsToSelector:@selector(peerConnection:didChangeStandardizedIceConnectionState:)]) { - RTCIceConnectionState state = [RTCPeerConnection iceConnectionStateForNativeState:new_state]; + RTCIceConnectionState state = + [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeStandardizedIceConnectionState:state]; } @@ -194,7 +196,8 @@ void PeerConnectionDelegateAdapter::OnConnectionChange( PeerConnectionInterface::PeerConnectionState new_state) { if ([peer_connection_.delegate respondsToSelector:@selector(peerConnection:didChangeConnectionState:)]) { - RTCPeerConnectionState state = [RTCPeerConnection connectionStateForNativeState:new_state]; + RTCPeerConnectionState state = + [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeConnectionState:state]; } } @@ -202,17 +205,17 @@ void PeerConnectionDelegateAdapter::OnConnectionChange( void PeerConnectionDelegateAdapter::OnIceGatheringChange( PeerConnectionInterface::IceGatheringState new_state) { RTCIceGatheringState state = - [[RTCPeerConnection class] iceGatheringStateForNativeState:new_state]; - RTCPeerConnection *peer_connection = peer_connection_; + [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didChangeIceGatheringState:state]; } void PeerConnectionDelegateAdapter::OnIceCandidate( const IceCandidateInterface *candidate) { - RTCIceCandidate *iceCandidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:candidate]; - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didGenerateIceCandidate:iceCandidate]; } @@ -224,11 +227,11 @@ void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved( for (const auto& candidate : candidates) { std::unique_ptr candidate_wrapper( new JsepIceCandidate(candidate.transport_name(), -1, candidate)); - RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc] - initWithNativeCandidate:candidate_wrapper.get()]; + RTC_OBJC_TYPE(RTCIceCandidate) *ice_candidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:candidate_wrapper.get()]; [ice_candidates addObject:ice_candidate]; } - RTCPeerConnection* peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection didRemoveIceCandidates:ice_candidates]; } @@ -238,13 +241,13 @@ void PeerConnectionDelegateAdapter::OnIceSelectedCandidatePairChanged( const auto &selected_pair = event.selected_candidate_pair; auto local_candidate_wrapper = std::make_unique( selected_pair.local_candidate().transport_name(), -1, selected_pair.local_candidate()); - RTCIceCandidate *local_candidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:local_candidate_wrapper.release()]; + RTC_OBJC_TYPE(RTCIceCandidate) *local_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] + initWithNativeCandidate:local_candidate_wrapper.release()]; auto remote_candidate_wrapper = std::make_unique( selected_pair.remote_candidate().transport_name(), -1, selected_pair.remote_candidate()); - RTCIceCandidate *remote_candidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:remote_candidate_wrapper.release()]; - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCIceCandidate) *remote_candidate = [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] + initWithNativeCandidate:remote_candidate_wrapper.release()]; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; NSString *nsstr_reason = [NSString stringForStdString:event.reason]; if ([peer_connection.delegate respondsToSelector:@selector @@ -260,17 +263,19 @@ void PeerConnectionDelegateAdapter::OnIceSelectedCandidatePairChanged( void PeerConnectionDelegateAdapter::OnAddTrack( rtc::scoped_refptr receiver, const std::vector> &streams) { - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; if ([peer_connection.delegate respondsToSelector:@selector(peerConnection: didAddReceiver:streams:)]) { NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()]; for (const auto &nativeStream : streams) { - RTCMediaStream *mediaStream = [[RTCMediaStream alloc] initWithFactory:peer_connection.factory - nativeMediaStream:nativeStream]; + RTC_OBJC_TYPE(RTCMediaStream) *mediaStream = + [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:peer_connection.factory + nativeMediaStream:nativeStream]; [mediaStreams addObject:mediaStream]; } - RTCRtpReceiver *rtpReceiver = [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory - nativeRtpReceiver:receiver]; + RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory + nativeRtpReceiver:receiver]; [peer_connection.delegate peerConnection:peer_connection didAddReceiver:rtpReceiver @@ -280,19 +285,20 @@ void PeerConnectionDelegateAdapter::OnAddTrack( void PeerConnectionDelegateAdapter::OnRemoveTrack( rtc::scoped_refptr receiver) { - RTCPeerConnection *peer_connection = peer_connection_; + RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) { - RTCRtpReceiver *rtpReceiver = [[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory - nativeRtpReceiver:receiver]; + RTC_OBJC_TYPE(RTCRtpReceiver) *rtpReceiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:peer_connection.factory + nativeRtpReceiver:receiver]; [peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver]; } } } // namespace webrtc -@implementation RTCPeerConnection { - RTCPeerConnectionFactory *_factory; - NSMutableArray *_localStreams; +@implementation RTC_OBJC_TYPE (RTCPeerConnection) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; + NSMutableArray *_localStreams; std::unique_ptr _observer; rtc::scoped_refptr _peerConnection; std::unique_ptr _nativeConstraints; @@ -302,10 +308,10 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( @synthesize delegate = _delegate; @synthesize factory = _factory; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - configuration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate:(id)delegate { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(id)delegate { NSParameterAssert(factory); std::unique_ptr dependencies = std::make_unique(nullptr); @@ -316,12 +322,12 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( delegate:delegate]; } -- (instancetype)initWithDependencies:(RTCPeerConnectionFactory *)factory - configuration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints +- (instancetype)initWithDependencies:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + configuration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints dependencies: (std::unique_ptr)dependencies - delegate:(id)delegate { + delegate:(id)delegate { NSParameterAssert(factory); NSParameterAssert(dependencies.get()); std::unique_ptr config( @@ -348,24 +354,24 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( return self; } -- (NSArray *)localStreams { +- (NSArray *)localStreams { return [_localStreams copy]; } -- (RTCSessionDescription *)localDescription { +- (RTC_OBJC_TYPE(RTCSessionDescription) *)localDescription { const webrtc::SessionDescriptionInterface *description = _peerConnection->local_description(); return description ? - [[RTCSessionDescription alloc] initWithNativeDescription:description] - : nil; + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] : + nil; } -- (RTCSessionDescription *)remoteDescription { +- (RTC_OBJC_TYPE(RTCSessionDescription) *)remoteDescription { const webrtc::SessionDescriptionInterface *description = _peerConnection->remote_description(); return description ? - [[RTCSessionDescription alloc] initWithNativeDescription:description] - : nil; + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:description] : + nil; } - (RTCSignalingState)signalingState { @@ -387,7 +393,7 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( _peerConnection->ice_gathering_state()]; } -- (BOOL)setConfiguration:(RTCConfiguration *)configuration { +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration { std::unique_ptr config( [configuration createNativeConfiguration]); if (!config) { @@ -398,25 +404,25 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( return _peerConnection->SetConfiguration(*config).ok(); } -- (RTCConfiguration *)configuration { +- (RTC_OBJC_TYPE(RTCConfiguration) *)configuration { webrtc::PeerConnectionInterface::RTCConfiguration config = _peerConnection->GetConfiguration(); - return [[RTCConfiguration alloc] initWithNativeConfiguration:config]; + return [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:config]; } - (void)close { _peerConnection->Close(); } -- (void)addIceCandidate:(RTCIceCandidate *)candidate { +- (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate { std::unique_ptr iceCandidate( candidate.nativeCandidate); _peerConnection->AddIceCandidate(iceCandidate.get()); } -- (void)removeIceCandidates:(NSArray *)iceCandidates { +- (void)removeIceCandidates:(NSArray *)iceCandidates { std::vector candidates; - for (RTCIceCandidate *iceCandidate in iceCandidates) { + for (RTC_OBJC_TYPE(RTCIceCandidate) * iceCandidate in iceCandidates) { std::unique_ptr candidate( iceCandidate.nativeCandidate); if (candidate) { @@ -430,7 +436,7 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( } } -- (void)addStream:(RTCMediaStream *)stream { +- (void)addStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream { if (!_peerConnection->AddStream(stream.nativeMediaStream)) { RTCLogError(@"Failed to add stream: %@", stream); return; @@ -438,12 +444,13 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( [_localStreams addObject:stream]; } -- (void)removeStream:(RTCMediaStream *)stream { +- (void)removeStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream { _peerConnection->RemoveStream(stream.nativeMediaStream); [_localStreams removeObject:stream]; } -- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray *)streamIds { +- (RTC_OBJC_TYPE(RTCRtpSender) *)addTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + streamIds:(NSArray *)streamIds { std::vector nativeStreamIds; for (NSString *streamId in streamIds) { nativeStreamIds.push_back([streamId UTF8String]); @@ -454,11 +461,11 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( RTCLogError(@"Failed to add track %@: %s", track, nativeSenderOrError.error().message()); return nil; } - return [[RTCRtpSender alloc] initWithFactory:self.factory - nativeRtpSender:nativeSenderOrError.MoveValue()]; + return [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory + nativeRtpSender:nativeSenderOrError.MoveValue()]; } -- (BOOL)removeTrack:(RTCRtpSender *)sender { +- (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender { bool result = _peerConnection->RemoveTrack(sender.nativeRtpSender); if (!result) { RTCLogError(@"Failed to remote track %@", sender); @@ -466,12 +473,15 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( return result; } -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track { - return [self addTransceiverWithTrack:track init:[[RTCRtpTransceiverInit alloc] init]]; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverWithTrack: + (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { + return [self addTransceiverWithTrack:track + init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; } -- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track - init:(RTCRtpTransceiverInit *)init { +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *) + addTransceiverWithTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init { webrtc::RTCErrorOr> nativeTransceiverOrError = _peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit); if (!nativeTransceiverOrError.ok()) { @@ -479,33 +489,36 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( @"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message()); return nil; } - return [[RTCRtpTransceiver alloc] initWithFactory:self.factory - nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; + return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] + initWithFactory:self.factory + nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; } -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType { - return [self addTransceiverOfType:mediaType init:[[RTCRtpTransceiverInit alloc] init]]; +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType { + return [self addTransceiverOfType:mediaType + init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; } -- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType - init:(RTCRtpTransceiverInit *)init { +- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType + init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *) + init { webrtc::RTCErrorOr> nativeTransceiverOrError = - _peerConnection->AddTransceiver([RTCRtpReceiver nativeMediaTypeForMediaType:mediaType], - init.nativeInit); + _peerConnection->AddTransceiver( + [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:mediaType], init.nativeInit); if (!nativeTransceiverOrError.ok()) { RTCLogError(@"Failed to add transceiver %@: %s", - [RTCRtpReceiver stringForMediaType:mediaType], + [RTC_OBJC_TYPE(RTCRtpReceiver) stringForMediaType:mediaType], nativeTransceiverOrError.error().message()); return nil; } - return [[RTCRtpTransceiver alloc] initWithFactory:self.factory - nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; + return [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] + initWithFactory:self.factory + nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; } -- (void)offerForConstraints:(RTCMediaConstraints *)constraints - completionHandler: - (void (^)(RTCSessionDescription *sessionDescription, - NSError *error))completionHandler { +- (void)offerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler:(void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, + NSError *error))completionHandler { rtc::scoped_refptr observer(new rtc::RefCountedObject (completionHandler)); @@ -515,10 +528,9 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( _peerConnection->CreateOffer(observer, options); } -- (void)answerForConstraints:(RTCMediaConstraints *)constraints - completionHandler: - (void (^)(RTCSessionDescription *sessionDescription, - NSError *error))completionHandler { +- (void)answerForConstraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + completionHandler:(void (^)(RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription, + NSError *error))completionHandler { rtc::scoped_refptr observer(new rtc::RefCountedObject (completionHandler)); @@ -528,7 +540,7 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( _peerConnection->CreateAnswer(observer, options); } -- (void)setLocalDescription:(RTCSessionDescription *)sdp +- (void)setLocalDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(void (^)(NSError *error))completionHandler { rtc::scoped_refptr observer( new rtc::RefCountedObject( @@ -536,7 +548,7 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( _peerConnection->SetLocalDescription(observer, sdp.nativeDescription); } -- (void)setRemoteDescription:(RTCSessionDescription *)sdp +- (void)setRemoteDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp completionHandler:(void (^)(NSError *error))completionHandler { rtc::scoped_refptr observer( new rtc::RefCountedObject( @@ -588,48 +600,50 @@ void PeerConnectionDelegateAdapter::OnRemoveTrack( _hasStartedRtcEventLog = NO; } -- (RTCRtpSender *)senderWithKind:(NSString *)kind - streamId:(NSString *)streamId { +- (RTC_OBJC_TYPE(RTCRtpSender) *)senderWithKind:(NSString *)kind streamId:(NSString *)streamId { std::string nativeKind = [NSString stdStringForString:kind]; std::string nativeStreamId = [NSString stdStringForString:streamId]; rtc::scoped_refptr nativeSender( _peerConnection->CreateSender(nativeKind, nativeStreamId)); - return nativeSender ? - [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender] : - nil; + return nativeSender ? [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory + nativeRtpSender:nativeSender] : + nil; } -- (NSArray *)senders { +- (NSArray *)senders { std::vector> nativeSenders( _peerConnection->GetSenders()); NSMutableArray *senders = [[NSMutableArray alloc] init]; for (const auto &nativeSender : nativeSenders) { - RTCRtpSender *sender = - [[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender]; + RTC_OBJC_TYPE(RTCRtpSender) *sender = + [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:self.factory + nativeRtpSender:nativeSender]; [senders addObject:sender]; } return senders; } -- (NSArray *)receivers { +- (NSArray *)receivers { std::vector> nativeReceivers( _peerConnection->GetReceivers()); NSMutableArray *receivers = [[NSMutableArray alloc] init]; for (const auto &nativeReceiver : nativeReceivers) { - RTCRtpReceiver *receiver = - [[RTCRtpReceiver alloc] initWithFactory:self.factory nativeRtpReceiver:nativeReceiver]; + RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:self.factory + nativeRtpReceiver:nativeReceiver]; [receivers addObject:receiver]; } return receivers; } -- (NSArray *)transceivers { +- (NSArray *)transceivers { std::vector> nativeTransceivers( _peerConnection->GetTransceivers()); NSMutableArray *transceivers = [[NSMutableArray alloc] init]; for (const auto &nativeTransceiver : nativeTransceivers) { - RTCRtpTransceiver *transceiver = [[RTCRtpTransceiver alloc] initWithFactory:self.factory - nativeRtpTransceiver:nativeTransceiver]; + RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = + [[RTC_OBJC_TYPE(RTCRtpTransceiver) alloc] initWithFactory:self.factory + nativeRtpTransceiver:nativeTransceiver]; [transceivers addObject:transceiver]; } return transceivers; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index 7922c91b4b..c2aab0be56 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -31,9 +31,10 @@ NS_ASSUME_NONNULL_BEGIN /** * This class extension exposes methods that work directly with injectable C++ components. */ -@interface RTCPeerConnectionFactory () +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) +() -- (instancetype)initNative NS_DESIGNATED_INITIALIZER; + - (instancetype)initNative NS_DESIGNATED_INITIALIZER; /* Initializer used when WebRTC is compiled with no media support */ - (instancetype)initWithNoMedia; @@ -84,19 +85,19 @@ NS_ASSUME_NONNULL_BEGIN mediaTransportFactory: (std::unique_ptr)mediaTransportFactory; -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory - mediaTransportFactory: - (std::unique_ptr)mediaTransportFactory; +- (instancetype) + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + mediaTransportFactory:(std::unique_ptr)mediaTransportFactory; /** Initialize an RTCPeerConnection with a configuration, constraints, and * dependencies. */ -- (RTCPeerConnection *) - peerConnectionWithDependencies:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints dependencies:(std::unique_ptr)dependencies - delegate:(nullable id)delegate; + delegate:(nullable id)delegate; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h index db7829c977..ef61c2ed01 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Private.h @@ -15,16 +15,16 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactory () +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) +() -/** - * PeerConnectionFactoryInterface created and held by this - * RTCPeerConnectionFactory object. This is needed to pass to the underlying - * C++ APIs. - */ -@property(nonatomic, readonly) - rtc::scoped_refptr - nativeFactory; + /** + * PeerConnectionFactoryInterface created and held by this + * RTCPeerConnectionFactory object. This is needed to pass to the underlying + * C++ APIs. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeFactory; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index c808218b54..3dcd3b6495 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -14,61 +14,69 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioSource; -@class RTCAudioTrack; -@class RTCConfiguration; -@class RTCMediaConstraints; -@class RTCMediaStream; -@class RTCPeerConnection; -@class RTCVideoSource; -@class RTCVideoTrack; -@class RTCPeerConnectionFactoryOptions; -@protocol RTCPeerConnectionDelegate; -@protocol RTCVideoDecoderFactory; -@protocol RTCVideoEncoderFactory; +@class RTC_OBJC_TYPE(RTCAudioSource); +@class RTC_OBJC_TYPE(RTCAudioTrack); +@class RTC_OBJC_TYPE(RTCConfiguration); +@class RTC_OBJC_TYPE(RTCMediaConstraints); +@class RTC_OBJC_TYPE(RTCMediaStream); +@class RTC_OBJC_TYPE(RTCPeerConnection); +@class RTC_OBJC_TYPE(RTCVideoSource); +@class RTC_OBJC_TYPE(RTCVideoTrack); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); +@protocol RTC_OBJC_TYPE +(RTCPeerConnectionDelegate); +@protocol RTC_OBJC_TYPE +(RTCVideoDecoderFactory); +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderFactory); RTC_OBJC_EXPORT -@interface RTCPeerConnectionFactory : NSObject +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject /* Initialize object with default H264 video encoder/decoder factories */ - (instancetype)init; /* Initialize object with injectable video encoder/decoder factories */ -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory; +- (instancetype) + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory; /** Initialize an RTCAudioSource with constraints. */ -- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints; +- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: + (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; -/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source with no - * constraints. +/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source + * with no constraints. */ -- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId; /** Initialize an RTCAudioTrack with a source and an id. */ -- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source trackId:(NSString *)trackId; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source + trackId:(NSString *)trackId; -/** Initialize a generic RTCVideoSource. The RTCVideoSource should be passed to a RTCVideoCapturer - * implementation, e.g. RTCCameraVideoCapturer, in order to produce frames. +/** Initialize a generic RTCVideoSource. The RTCVideoSource should be + * passed to a RTCVideoCapturer implementation, e.g. + * RTCCameraVideoCapturer, in order to produce frames. */ -- (RTCVideoSource *)videoSource; +- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource; /** Initialize an RTCVideoTrack with a source and an id. */ -- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source trackId:(NSString *)trackId; +- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source + trackId:(NSString *)trackId; /** Initialize an RTCMediaStream with an id. */ -- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId; +- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId; /** Initialize an RTCPeerConnection with a configuration, constraints, and * delegate. */ -- (RTCPeerConnection *)peerConnectionWithConfiguration:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints - delegate: - (nullable id)delegate; +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(nullable id)delegate; /** Set the options to be used for subsequently created RTCPeerConnections */ -- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options; +- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options; /** Start an AecDump recording. This API call will likely change in the future. */ - (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index b2e12d33e2..2e34b05fed 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -55,7 +55,7 @@ #include "api/transport/media/media_transport_interface.h" #include "media/engine/webrtc_media_engine.h" // nogncheck -@implementation RTCPeerConnectionFactory { +@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _networkThread; std::unique_ptr _workerThread; std::unique_ptr _signalingThread; @@ -76,22 +76,23 @@ #ifdef HAVE_NO_MEDIA return [self initWithNoMedia]; #else - return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() - nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() - nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory( - [[RTCVideoEncoderFactoryH264 alloc] init]) - nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory( - [[RTCVideoDecoderFactoryH264 alloc] init]) - audioDeviceModule:[self audioDeviceModule] - audioProcessingModule:nullptr - mediaTransportFactory:nullptr]; + return [self + initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE( + RTCVideoEncoderFactoryH264) alloc] init]) + nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( + RTCVideoDecoderFactoryH264) alloc] init]) + audioDeviceModule:[self audioDeviceModule] + audioProcessingModule:nullptr + mediaTransportFactory:nullptr]; #endif } -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory - mediaTransportFactory: - (std::unique_ptr)mediaTransportFactory { +- (instancetype) + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + mediaTransportFactory:(std::unique_ptr)mediaTransportFactory { #ifdef HAVE_NO_MEDIA return [self initWithNoMedia]; #else @@ -112,8 +113,9 @@ mediaTransportFactory:std::move(mediaTransportFactory)]; #endif } -- (instancetype)initWithEncoderFactory:(nullable id)encoderFactory - decoderFactory:(nullable id)decoderFactory { +- (instancetype) + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory { return [self initWithEncoderFactory:encoderFactory decoderFactory:decoderFactory mediaTransportFactory:nullptr]; @@ -241,7 +243,8 @@ return self; } -- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints { +- (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: + (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints { std::unique_ptr nativeConstraints; if (constraints) { nativeConstraints = constraints.nativeConstraints; @@ -251,64 +254,58 @@ rtc::scoped_refptr source = _nativeFactory->CreateAudioSource(options); - return [[RTCAudioSource alloc] initWithFactory:self nativeAudioSource:source]; + return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self nativeAudioSource:source]; } -- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId { - RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil]; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId { + RTC_OBJC_TYPE(RTCAudioSource) *audioSource = [self audioSourceWithConstraints:nil]; return [self audioTrackWithSource:audioSource trackId:trackId]; } -- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source - trackId:(NSString *)trackId { - return [[RTCAudioTrack alloc] initWithFactory:self - source:source - trackId:trackId]; +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source + trackId:(NSString *)trackId { + return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self source:source trackId:trackId]; } -- (RTCVideoSource *)videoSource { - return [[RTCVideoSource alloc] initWithFactory:self - signalingThread:_signalingThread.get() - workerThread:_workerThread.get()]; +- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource { + return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self + signalingThread:_signalingThread.get() + workerThread:_workerThread.get()]; } -- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source - trackId:(NSString *)trackId { - return [[RTCVideoTrack alloc] initWithFactory:self - source:source - trackId:trackId]; +- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source + trackId:(NSString *)trackId { + return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self source:source trackId:trackId]; } -- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId { - return [[RTCMediaStream alloc] initWithFactory:self - streamId:streamId]; +- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId { + return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self streamId:streamId]; } -- (RTCPeerConnection *)peerConnectionWithConfiguration: - (RTCConfiguration *)configuration - constraints: - (RTCMediaConstraints *)constraints - delegate: - (nullable id)delegate { - return [[RTCPeerConnection alloc] initWithFactory:self - configuration:configuration - constraints:constraints - delegate:delegate]; +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate: + (nullable id)delegate { + return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self + configuration:configuration + constraints:constraints + delegate:delegate]; } -- (RTCPeerConnection *) - peerConnectionWithDependencies:(RTCConfiguration *)configuration - constraints:(RTCMediaConstraints *)constraints +- (RTC_OBJC_TYPE(RTCPeerConnection) *) + peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints dependencies:(std::unique_ptr)dependencies - delegate:(id)delegate { - return [[RTCPeerConnection alloc] initWithDependencies:self - configuration:configuration - constraints:constraints - dependencies:std::move(dependencies) - delegate:delegate]; + delegate:(id)delegate { + return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithDependencies:self + configuration:configuration + constraints:constraints + dependencies:std::move(dependencies) + delegate:delegate]; } -- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options { +- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options { RTC_DCHECK(options != nil); _nativeFactory->SetOptions(options.nativeOptions); } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index 3bb75eec68..522e520e12 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -32,12 +32,12 @@ auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory(); [builder setAudioDecoderFactory:audioDecoderFactory]; - auto videoEncoderFactory = - webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]); + auto videoEncoderFactory = webrtc::ObjCToNativeVideoEncoderFactory( + [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); [builder setVideoEncoderFactory:std::move(videoEncoderFactory)]; - auto videoDecoderFactory = - webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]); + auto videoDecoderFactory = webrtc::ObjCToNativeVideoDecoderFactory( + [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); [builder setVideoDecoderFactory:std::move(videoDecoderFactory)]; #if defined(WEBRTC_IOS) diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index 189eb736b2..f0b0de156a 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -29,7 +29,7 @@ NS_ASSUME_NONNULL_BEGIN + (RTCPeerConnectionFactoryBuilder *)builder; -- (RTCPeerConnectionFactory *)createPeerConnectionFactory; +- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; - (void)setVideoEncoderFactory:(std::unique_ptr)videoEncoderFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index af3d259e68..8f52bea8e3 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -33,8 +33,9 @@ return [[RTCPeerConnectionFactoryBuilder alloc] init]; } -- (RTCPeerConnectionFactory *)createPeerConnectionFactory { - RTCPeerConnectionFactory *factory = [RTCPeerConnectionFactory alloc]; +- (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc]; return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory nativeAudioDecoderFactory:_audioDecoderFactory nativeVideoEncoderFactory:std::move(_videoEncoderFactory) diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h index 986b0e698d..8832b23695 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h @@ -14,12 +14,12 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryOptions () +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) +() -/** Returns the equivalent native PeerConnectionFactoryInterface::Options - * structure. */ -@property(nonatomic, readonly) - webrtc::PeerConnectionFactoryInterface::Options nativeOptions; + /** Returns the equivalent native PeerConnectionFactoryInterface::Options + * structure. */ + @property(nonatomic, readonly) webrtc::PeerConnectionFactoryInterface::Options nativeOptions; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h index 4bec8695bd..bfc54a5d7b 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCPeerConnectionFactoryOptions : NSObject +@interface RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) : NSObject @property(nonatomic, assign) BOOL disableEncryption; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm index f0cc6a6c81..5467bd5fc9 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm @@ -25,7 +25,7 @@ void setNetworkBit(webrtc::PeerConnectionFactoryInterface::Options* options, } } // namespace -@implementation RTCPeerConnectionFactoryOptions +@implementation RTC_OBJC_TYPE (RTCPeerConnectionFactoryOptions) @synthesize disableEncryption = _disableEncryption; @synthesize disableNetworkMonitor = _disableNetworkMonitor; diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h index 5471bf4d62..94c1f92956 100644 --- a/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtcpParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtcpParameters () +@interface RTC_OBJC_TYPE (RTCRtcpParameters) +() -/** Returns the equivalent native RtcpParameters structure. */ -@property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters; + /** Returns the equivalent native RtcpParameters structure. */ + @property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters; /** Initialize the object with a native RtcpParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters.h b/sdk/objc/api/peerconnection/RTCRtcpParameters.h index 5c265806b1..1bbaedcf7e 100644 --- a/sdk/objc/api/peerconnection/RTCRtcpParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtcpParameters.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCRtcpParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtcpParameters) : NSObject /** The Canonical Name used by RTCP. */ @property(nonatomic, readonly, copy) NSString *cname; diff --git a/sdk/objc/api/peerconnection/RTCRtcpParameters.mm b/sdk/objc/api/peerconnection/RTCRtcpParameters.mm index 0c33dda961..4d6084b90d 100644 --- a/sdk/objc/api/peerconnection/RTCRtcpParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtcpParameters.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCRtcpParameters +@implementation RTC_OBJC_TYPE (RTCRtcpParameters) @synthesize cname = _cname; @synthesize isReducedSize = _isReducedSize; diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h index 1b297edeba..7833068837 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpCodecParameters () +@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) +() -/** Returns the equivalent native RtpCodecParameters structure. */ -@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters; + /** Returns the equivalent native RtpCodecParameters structure. */ + @property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters; /** Initialize the object with a native RtpCodecParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 5d3cac5c96..a68d9eb873 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -31,9 +31,9 @@ RTC_EXTERN const NSString *const kRTCVp8CodecName; RTC_EXTERN const NSString *const kRTCVp9CodecName; RTC_EXTERN const NSString *const kRTCH264CodecName; -/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTCRtpCodecParameters */ +/** Defined in http://w3c.github.io/webrtc-pc/#idl-def-RTC_OBJC_TYPE(RTCRtpCodecParameters) */ RTC_OBJC_EXPORT -@interface RTCRtpCodecParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtpCodecParameters) : NSObject /** The RTP payload type. */ @property(nonatomic, assign) int payloadType; diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index f25679e329..f61b93cce2 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -34,7 +34,7 @@ const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName); const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); -@implementation RTCRtpCodecParameters +@implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) @synthesize payloadType = _payloadType; @synthesize name = _name; diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h index e3684d3ca5..074c9b175b 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpEncodingParameters () +@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) +() -/** Returns the equivalent native RtpEncodingParameters structure. */ -@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters; + /** Returns the equivalent native RtpEncodingParameters structure. */ + @property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters; /** Initialize the object with a native RtpEncodingParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index 1bbb88dba3..facd7e5129 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -23,7 +23,7 @@ typedef NS_ENUM(NSInteger, RTCPriority) { }; RTC_OBJC_EXPORT -@interface RTCRtpEncodingParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtpEncodingParameters) : NSObject /** The idenfifier for the encoding layer. This is used in simulcast. */ @property(nonatomic, copy, nullable) NSString *rid; diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index 4468fb37d6..eec6ce4015 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCRtpEncodingParameters +@implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize rid = _rid; @synthesize isActive = _isActive; @@ -58,8 +58,8 @@ _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } _bitratePriority = nativeParameters.bitrate_priority; - _networkPriority = - [RTCRtpEncodingParameters priorityFromNativePriority:nativeParameters.network_priority]; + _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) + priorityFromNativePriority:nativeParameters.network_priority]; } return self; } @@ -91,7 +91,7 @@ } parameters.bitrate_priority = _bitratePriority; parameters.network_priority = - [RTCRtpEncodingParameters nativePriorityFromPriority:_networkPriority]; + [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; return parameters; } diff --git a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h index cfb7fb1145..0b0bce556f 100644 --- a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.h @@ -15,10 +15,11 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCRtpFragmentationHeader (Private) +@interface RTC_OBJC_TYPE (RTCRtpFragmentationHeader) +(Private) -- (instancetype)initWithNativeFragmentationHeader: - (const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader; + - (instancetype)initWithNativeFragmentationHeader + : (const webrtc::RTPFragmentationHeader *__nullable)fragmentationHeader; - (std::unique_ptr)createNativeFragmentationHeader; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm index 3a4415a342..e514cf69c6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm +++ b/sdk/objc/api/peerconnection/RTCRtpFragmentationHeader+Private.mm @@ -12,10 +12,11 @@ #include "modules/include/module_common_types.h" -@implementation RTCRtpFragmentationHeader (Private) +@implementation RTC_OBJC_TYPE (RTCRtpFragmentationHeader) +(Private) -- (instancetype)initWithNativeFragmentationHeader: - (const webrtc::RTPFragmentationHeader *)fragmentationHeader { + - (instancetype)initWithNativeFragmentationHeader + : (const webrtc::RTPFragmentationHeader *)fragmentationHeader { if (self = [super init]) { if (fragmentationHeader) { int count = fragmentationHeader->fragmentationVectorSize; diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h index 8a2a2311e4..6255847fb9 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpHeaderExtension () +@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) +() -/** Returns the equivalent native RtpExtension structure. */ -@property(nonatomic, readonly) webrtc::RtpExtension nativeParameters; + /** Returns the equivalent native RtpExtension structure. */ + @property(nonatomic, readonly) webrtc::RtpExtension nativeParameters; /** Initialize the object with a native RtpExtension structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h index 32114499ce..15be5af56c 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.h @@ -15,7 +15,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCRtpHeaderExtension : NSObject +@interface RTC_OBJC_TYPE (RTCRtpHeaderExtension) : NSObject /** The URI of the RTP header extension, as defined in RFC5285. */ @property(nonatomic, readonly, copy) NSString *uri; diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm index afc47868fe..a19228e629 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtension.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTCRtpHeaderExtension +@implementation RTC_OBJC_TYPE (RTCRtpHeaderExtension) @synthesize uri = _uri; @synthesize id = _id; diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h b/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h index a88ccfa75e..369475a81d 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpParameters+Private.h @@ -14,10 +14,11 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCRtpParameters () +@interface RTC_OBJC_TYPE (RTCRtpParameters) +() -/** Returns the equivalent native RtpParameters structure. */ -@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters; + /** Returns the equivalent native RtpParameters structure. */ + @property(nonatomic, readonly) webrtc::RtpParameters nativeParameters; /** Initialize the object with a native RtpParameters structure. */ - (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters; diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.h b/sdk/objc/api/peerconnection/RTCRtpParameters.h index 8ee8d712e0..fff6a85886 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.h @@ -27,22 +27,23 @@ typedef NS_ENUM(NSInteger, RTCDegradationPreference) { }; RTC_OBJC_EXPORT -@interface RTCRtpParameters : NSObject +@interface RTC_OBJC_TYPE (RTCRtpParameters) : NSObject /** A unique identifier for the last set of parameters applied. */ @property(nonatomic, copy) NSString *transactionId; /** Parameters used for RTCP. */ -@property(nonatomic, readonly, copy) RTCRtcpParameters *rtcp; +@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCRtcpParameters) * rtcp; /** An array containing parameters for RTP header extensions. */ -@property(nonatomic, readonly, copy) NSArray *headerExtensions; +@property(nonatomic, readonly, copy) + NSArray *headerExtensions; /** The currently active encodings in the order of preference. */ -@property(nonatomic, copy) NSArray *encodings; +@property(nonatomic, copy) NSArray *encodings; /** The negotiated set of send codecs in order of preference. */ -@property(nonatomic, copy) NSArray *codecs; +@property(nonatomic, copy) NSArray *codecs; /** * Degradation preference in case of CPU adaptation or constrained bandwidth. diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/sdk/objc/api/peerconnection/RTCRtpParameters.mm index cbb4576ec7..2236b9aa36 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.mm @@ -16,7 +16,7 @@ #import "RTCRtpHeaderExtension+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCRtpParameters +@implementation RTC_OBJC_TYPE (RTCRtpParameters) @synthesize transactionId = _transactionId; @synthesize rtcp = _rtcp; @@ -33,30 +33,31 @@ (const webrtc::RtpParameters &)nativeParameters { if (self = [self init]) { _transactionId = [NSString stringForStdString:nativeParameters.transaction_id]; - _rtcp = [[RTCRtcpParameters alloc] initWithNativeParameters:nativeParameters.rtcp]; + _rtcp = + [[RTC_OBJC_TYPE(RTCRtcpParameters) alloc] initWithNativeParameters:nativeParameters.rtcp]; NSMutableArray *headerExtensions = [[NSMutableArray alloc] init]; for (const auto &headerExtension : nativeParameters.header_extensions) { - [headerExtensions - addObject:[[RTCRtpHeaderExtension alloc] initWithNativeParameters:headerExtension]]; + [headerExtensions addObject:[[RTC_OBJC_TYPE(RTCRtpHeaderExtension) alloc] + initWithNativeParameters:headerExtension]]; } _headerExtensions = headerExtensions; NSMutableArray *encodings = [[NSMutableArray alloc] init]; for (const auto &encoding : nativeParameters.encodings) { - [encodings addObject:[[RTCRtpEncodingParameters alloc] + [encodings addObject:[[RTC_OBJC_TYPE(RTCRtpEncodingParameters) alloc] initWithNativeParameters:encoding]]; } _encodings = encodings; NSMutableArray *codecs = [[NSMutableArray alloc] init]; for (const auto &codec : nativeParameters.codecs) { - [codecs addObject:[[RTCRtpCodecParameters alloc] - initWithNativeParameters:codec]]; + [codecs + addObject:[[RTC_OBJC_TYPE(RTCRtpCodecParameters) alloc] initWithNativeParameters:codec]]; } _codecs = codecs; - _degradationPreference = [RTCRtpParameters + _degradationPreference = [RTC_OBJC_TYPE(RTCRtpParameters) degradationPreferenceFromNativeDegradationPreference:nativeParameters .degradation_preference]; } @@ -67,17 +68,17 @@ webrtc::RtpParameters parameters; parameters.transaction_id = [NSString stdStringForString:_transactionId]; parameters.rtcp = [_rtcp nativeParameters]; - for (RTCRtpHeaderExtension *headerExtension in _headerExtensions) { + for (RTC_OBJC_TYPE(RTCRtpHeaderExtension) * headerExtension in _headerExtensions) { parameters.header_extensions.push_back(headerExtension.nativeParameters); } - for (RTCRtpEncodingParameters *encoding in _encodings) { + for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in _encodings) { parameters.encodings.push_back(encoding.nativeParameters); } - for (RTCRtpCodecParameters *codec in _codecs) { + for (RTC_OBJC_TYPE(RTCRtpCodecParameters) * codec in _codecs) { parameters.codecs.push_back(codec.nativeParameters); } if (_degradationPreference) { - parameters.degradation_preference = [RTCRtpParameters + parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters) nativeDegradationPreferenceFromDegradationPreference:(RTCDegradationPreference) _degradationPreference.intValue]; } diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h index e085529527..c15ce70079 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Native.h @@ -18,13 +18,14 @@ NS_ASSUME_NONNULL_BEGIN /** * This class extension exposes methods that work directly with injectable C++ components. */ -@interface RTCRtpReceiver () +@interface RTC_OBJC_TYPE (RTCRtpReceiver) +() -/** Sets a user defined frame decryptor that will decrypt the entire frame. - * This will decrypt the entire frame using the user provided decryption - * mechanism regardless of whether SRTP is enabled or not. - */ -- (void)setFrameDecryptor:(rtc::scoped_refptr)frameDecryptor; + /** Sets a user defined frame decryptor that will decrypt the entire frame. + * This will decrypt the entire frame using the user provided decryption + * mechanism regardless of whether SRTP is enabled or not. + */ + - (void)setFrameDecryptor : (rtc::scoped_refptr)frameDecryptor; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h index 6f56739f0b..6aed0b4bc5 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h @@ -14,28 +14,30 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); namespace webrtc { class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface { public: - RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver); + RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver); void OnFirstPacketReceived(cricket::MediaType media_type) override; private: - __weak RTCRtpReceiver* receiver_; + __weak RTC_OBJC_TYPE(RTCRtpReceiver) * receiver_; }; } // namespace webrtc -@interface RTCRtpReceiver () +@interface RTC_OBJC_TYPE (RTCRtpReceiver) +() -@property(nonatomic, readonly) rtc::scoped_refptr nativeRtpReceiver; + @property(nonatomic, + readonly) rtc::scoped_refptr nativeRtpReceiver; /** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpReceiver:(rtc::scoped_refptr)nativeRtpReceiver NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/sdk/objc/api/peerconnection/RTCRtpReceiver.h index 7a7dacea2b..7ab2cfae72 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.h @@ -23,33 +23,36 @@ typedef NS_ENUM(NSInteger, RTCRtpMediaType) { RTCRtpMediaTypeData, }; -@class RTCRtpReceiver; +@class RTC_OBJC_TYPE(RTCRtpReceiver); RTC_OBJC_EXPORT -@protocol RTCRtpReceiverDelegate +@protocol RTC_OBJC_TYPE +(RTCRtpReceiverDelegate) -/** Called when the first RTP packet is received. - * - * Note: Currently if there are multiple RtpReceivers of the same media type, - * they will all call OnFirstPacketReceived at once. - * - * For example, if we create three audio receivers, A/B/C, they will listen to - * the same signal from the underneath network layer. Whenever the first audio packet - * is received, the underneath signal will be fired. All the receivers A/B/C will be - * notified and the callback of the receiver's delegate will be called. - * - * The process is the same for video receivers. - */ -- (void)rtpReceiver:(RTCRtpReceiver *)rtpReceiver - didReceiveFirstPacketForMediaType:(RTCRtpMediaType)mediaType; + /** Called when the first RTP packet is received. + * + * Note: Currently if there are multiple RtpReceivers of the same media type, + * they will all call OnFirstPacketReceived at once. + * + * For example, if we create three audio receivers, A/B/C, they will listen to + * the same signal from the underneath network layer. Whenever the first audio packet + * is received, the underneath signal will be fired. All the receivers A/B/C will be + * notified and the callback of the receiver's delegate will be called. + * + * The process is the same for video receivers. + */ + - (void)rtpReceiver + : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType + : (RTCRtpMediaType)mediaType; @end RTC_OBJC_EXPORT -@protocol RTCRtpReceiver +@protocol RTC_OBJC_TYPE +(RTCRtpReceiver) -/** A unique identifier for this receiver. */ -@property(nonatomic, readonly) NSString *receiverId; + /** A unique identifier for this receiver. */ + @property(nonatomic, readonly) NSString *receiverId; /** The currently active RTCRtpParameters, as defined in * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. @@ -58,22 +61,22 @@ RTC_OBJC_EXPORT * but this API also applies them to receivers, similar to ORTC: * http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. */ -@property(nonatomic, readonly) RTCRtpParameters *parameters; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpParameters) * parameters; /** The RTCMediaStreamTrack associated with the receiver. * Note: reading this property returns a new instance of * RTCMediaStreamTrack. Use isEqual: instead of == to compare * RTCMediaStreamTrack instances. */ -@property(nonatomic, readonly, nullable) RTCMediaStreamTrack *track; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track; /** The delegate for this RtpReceiver. */ -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; @end RTC_OBJC_EXPORT -@interface RTCRtpReceiver : NSObject +@interface RTC_OBJC_TYPE (RTCRtpReceiver) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm index deeb4cb21b..3e00935694 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm @@ -20,8 +20,7 @@ namespace webrtc { -RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter( - RTCRtpReceiver *receiver) { +RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(RTC_OBJC_TYPE(RTCRtpReceiver) * receiver) { RTC_CHECK(receiver); receiver_ = receiver; } @@ -29,15 +28,15 @@ RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter( void RtpReceiverDelegateAdapter::OnFirstPacketReceived( cricket::MediaType media_type) { RTCRtpMediaType packet_media_type = - [RTCRtpReceiver mediaTypeForNativeMediaType:media_type]; - RTCRtpReceiver *receiver = receiver_; + [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type]; + RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_; [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type]; } } // namespace webrtc -@implementation RTCRtpReceiver { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCRtpReceiver) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeRtpReceiver; std::unique_ptr _observer; } @@ -48,23 +47,24 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived( return [NSString stringForStdString:_nativeRtpReceiver->id()]; } -- (RTCRtpParameters *)parameters { - return [[RTCRtpParameters alloc] +- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters { + return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc] initWithNativeParameters:_nativeRtpReceiver->GetParameters()]; } -- (nullable RTCMediaStreamTrack *)track { +- (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { rtc::scoped_refptr nativeTrack( _nativeRtpReceiver->track()); if (nativeTrack) { - return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory]; + return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack + factory:_factory]; } return nil; } - (NSString *)description { - return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}", - self.receiverId]; + return [NSString + stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpReceiver) {\n receiverId: %@\n}", self.receiverId]; } - (void)dealloc { @@ -83,7 +83,7 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived( if (![object isMemberOfClass:[self class]]) { return NO; } - RTCRtpReceiver *receiver = (RTCRtpReceiver *)object; + RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = (RTC_OBJC_TYPE(RTCRtpReceiver) *)object; return _nativeRtpReceiver == receiver.nativeRtpReceiver; } @@ -103,14 +103,13 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived( return _nativeRtpReceiver; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpReceiver: (rtc::scoped_refptr)nativeRtpReceiver { if (self = [super init]) { _factory = factory; _nativeRtpReceiver = nativeRtpReceiver; - RTCLogInfo( - @"RTCRtpReceiver(%p): created receiver: %@", self, self.description); + RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpReceiver)(%p): created receiver: %@", self, self.description); _observer.reset(new webrtc::RtpReceiverDelegateAdapter(self)); _nativeRtpReceiver->SetObserver(_observer.get()); } diff --git a/sdk/objc/api/peerconnection/RTCRtpSender+Native.h b/sdk/objc/api/peerconnection/RTCRtpSender+Native.h index 89a691cd54..249d5c5e09 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender+Native.h +++ b/sdk/objc/api/peerconnection/RTCRtpSender+Native.h @@ -18,14 +18,15 @@ NS_ASSUME_NONNULL_BEGIN /** * This class extension exposes methods that work directly with injectable C++ components. */ -@interface RTCRtpSender () +@interface RTC_OBJC_TYPE (RTCRtpSender) +() -/** Sets a defined frame encryptor that will encrypt the entire frame - * before it is sent across the network. This will encrypt the entire frame - * using the user provided encryption mechanism regardless of whether SRTP is - * enabled or not. - */ -- (void)setFrameEncryptor:(rtc::scoped_refptr)frameEncryptor; + /** Sets a defined frame encryptor that will encrypt the entire frame + * before it is sent across the network. This will encrypt the entire frame + * using the user provided encryption mechanism regardless of whether SRTP is + * enabled or not. + */ + - (void)setFrameEncryptor : (rtc::scoped_refptr)frameEncryptor; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpSender+Private.h b/sdk/objc/api/peerconnection/RTCRtpSender+Private.h index 389b833ffa..6fdb42bb22 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpSender+Private.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCRtpSender () +@interface RTC_OBJC_TYPE (RTCRtpSender) +() -@property(nonatomic, readonly) rtc::scoped_refptr nativeRtpSender; + @property(nonatomic, readonly) rtc::scoped_refptr nativeRtpSender; /** Initialize an RTCRtpSender with a native RtpSenderInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpSender:(rtc::scoped_refptr)nativeRtpSender NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.h b/sdk/objc/api/peerconnection/RTCRtpSender.h index c03b4cc88c..41bb083d2e 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender.h +++ b/sdk/objc/api/peerconnection/RTCRtpSender.h @@ -18,33 +18,34 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@protocol RTCRtpSender +@protocol RTC_OBJC_TYPE +(RTCRtpSender) -/** A unique identifier for this sender. */ -@property(nonatomic, readonly) NSString *senderId; + /** A unique identifier for this sender. */ + @property(nonatomic, readonly) NSString *senderId; /** The currently active RTCRtpParameters, as defined in * https://www.w3.org/TR/webrtc/#idl-def-RTCRtpParameters. */ -@property(nonatomic, copy) RTCRtpParameters *parameters; +@property(nonatomic, copy) RTC_OBJC_TYPE(RTCRtpParameters) * parameters; /** The RTCMediaStreamTrack associated with the sender. * Note: reading this property returns a new instance of * RTCMediaStreamTrack. Use isEqual: instead of == to compare * RTCMediaStreamTrack instances. */ -@property(nonatomic, copy, nullable) RTCMediaStreamTrack *track; +@property(nonatomic, copy, nullable) RTC_OBJC_TYPE(RTCMediaStreamTrack) * track; /** IDs of streams associated with the RTP sender */ @property(nonatomic, copy) NSArray *streamIds; /** The RTCDtmfSender accociated with the RTP sender. */ -@property(nonatomic, readonly, nullable) id dtmfSender; +@property(nonatomic, readonly, nullable) id dtmfSender; @end RTC_OBJC_EXPORT -@interface RTCRtpSender : NSObject +@interface RTC_OBJC_TYPE (RTCRtpSender) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpSender.mm b/sdk/objc/api/peerconnection/RTCRtpSender.mm index d29265102b..1ca9360ab8 100644 --- a/sdk/objc/api/peerconnection/RTCRtpSender.mm +++ b/sdk/objc/api/peerconnection/RTCRtpSender.mm @@ -19,8 +19,8 @@ #include "api/media_stream_interface.h" -@implementation RTCRtpSender { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCRtpSender) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeRtpSender; } @@ -30,30 +30,30 @@ return [NSString stringForStdString:_nativeRtpSender->id()]; } -- (RTCRtpParameters *)parameters { - return [[RTCRtpParameters alloc] +- (RTC_OBJC_TYPE(RTCRtpParameters) *)parameters { + return [[RTC_OBJC_TYPE(RTCRtpParameters) alloc] initWithNativeParameters:_nativeRtpSender->GetParameters()]; } -- (void)setParameters:(RTCRtpParameters *)parameters { +- (void)setParameters:(RTC_OBJC_TYPE(RTCRtpParameters) *)parameters { if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) { - RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self, - parameters); + RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set parameters: %@", self, parameters); } } -- (RTCMediaStreamTrack *)track { +- (RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { rtc::scoped_refptr nativeTrack( _nativeRtpSender->track()); if (nativeTrack) { - return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory]; + return [RTC_OBJC_TYPE(RTCMediaStreamTrack) mediaTrackForNativeTrack:nativeTrack + factory:_factory]; } return nil; } -- (void)setTrack:(RTCMediaStreamTrack *)track { +- (void)setTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { if (!_nativeRtpSender->SetTrack(track.nativeTrack)) { - RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track); + RTCLogError(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): Failed to set track %@", self, track); } } @@ -75,8 +75,8 @@ } - (NSString *)description { - return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}", - self.senderId]; + return [NSString + stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpSender) {\n senderId: %@\n}", self.senderId]; } - (BOOL)isEqual:(id)object { @@ -89,7 +89,7 @@ if (![object isMemberOfClass:[self class]]) { return NO; } - RTCRtpSender *sender = (RTCRtpSender *)object; + RTC_OBJC_TYPE(RTCRtpSender) *sender = (RTC_OBJC_TYPE(RTCRtpSender) *)object; return _nativeRtpSender == sender.nativeRtpSender; } @@ -109,7 +109,7 @@ return _nativeRtpSender; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpSender:(rtc::scoped_refptr)nativeRtpSender { NSParameterAssert(factory); NSParameterAssert(nativeRtpSender); @@ -119,9 +119,10 @@ rtc::scoped_refptr nativeDtmfSender( _nativeRtpSender->GetDtmfSender()); if (nativeDtmfSender) { - _dtmfSender = [[RTCDtmfSender alloc] initWithNativeDtmfSender:nativeDtmfSender]; + _dtmfSender = + [[RTC_OBJC_TYPE(RTCDtmfSender) alloc] initWithNativeDtmfSender:nativeDtmfSender]; } - RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description); + RTCLogInfo(@"RTC_OBJC_TYPE(RTCRtpSender)(%p): created sender: %@", self, self.description); } return self; } diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h index d7f6b585e4..65d45fb88e 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h @@ -14,21 +14,23 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCPeerConnectionFactory; +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTCRtpTransceiverInit () +@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) +() -@property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit; + @property(nonatomic, readonly) webrtc::RtpTransceiverInit nativeInit; @end -@interface RTCRtpTransceiver () +@interface RTC_OBJC_TYPE (RTCRtpTransceiver) +() -@property(nonatomic, readonly) rtc::scoped_refptr - nativeRtpTransceiver; + @property(nonatomic, + readonly) rtc::scoped_refptr nativeRtpTransceiver; /** Initialize an RTCRtpTransceiver with a native RtpTransceiverInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpTransceiver: (rtc::scoped_refptr)nativeRtpTransceiver NS_DESIGNATED_INITIALIZER; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index 968dba395a..f8996ccafb 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -30,7 +30,7 @@ typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) { * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit */ RTC_OBJC_EXPORT -@interface RTCRtpTransceiverInit : NSObject +@interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject /** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */ @property(nonatomic) RTCRtpTransceiverDirection direction; @@ -39,14 +39,14 @@ RTC_OBJC_EXPORT @property(nonatomic) NSArray *streamIds; /** TODO(bugs.webrtc.org/7600): Not implemented. */ -@property(nonatomic) NSArray *sendEncodings; +@property(nonatomic) NSArray *sendEncodings; @end -@class RTCRtpTransceiver; +@class RTC_OBJC_TYPE(RTCRtpTransceiver); -/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the WebRTC - * specification. A transceiver represents a combination of an RTCRtpSender +/** The RTCRtpTransceiver maps to the RTCRtpTransceiver defined by the + * WebRTC specification. A transceiver represents a combination of an RTCRtpSender * and an RTCRtpReceiver that share a common mid. As defined in JSEP, an * RTCRtpTransceiver is said to be associated with a media description if its * mid property is non-nil; otherwise, it is said to be disassociated. @@ -59,12 +59,13 @@ RTC_OBJC_EXPORT * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver */ RTC_OBJC_EXPORT -@protocol RTCRtpTransceiver +@protocol RTC_OBJC_TYPE +(RTCRtpTransceiver) -/** Media type of the transceiver. The sender and receiver will also have this - * type. - */ -@property(nonatomic, readonly) RTCRtpMediaType mediaType; + /** Media type of the transceiver. The sender and receiver will also have this + * type. + */ + @property(nonatomic, readonly) RTCRtpMediaType mediaType; /** The mid attribute is the mid negotiated and present in the local and * remote descriptions. Before negotiation is complete, the mid value may be @@ -78,14 +79,14 @@ RTC_OBJC_EXPORT * present, regardless of the direction of media. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender */ -@property(nonatomic, readonly) RTCRtpSender *sender; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpSender) * sender; /** The receiver attribute exposes the RTCRtpReceiver corresponding to the RTP * media that may be received with the transceiver's mid. The receiver is * always present, regardless of the direction of media. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver */ -@property(nonatomic, readonly) RTCRtpReceiver *receiver; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpReceiver) * receiver; /** The isStopped attribute indicates that the sender of this transceiver will * no longer send, and that the receiver will no longer receive. It is true if @@ -121,7 +122,7 @@ RTC_OBJC_EXPORT @end RTC_OBJC_EXPORT -@interface RTCRtpTransceiver : NSObject +@interface RTC_OBJC_TYPE (RTCRtpTransceiver) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index 74ea456fea..2995e5fceb 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -17,7 +17,7 @@ #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" -@implementation RTCRtpTransceiverInit +@implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) @synthesize direction = _direction; @synthesize streamIds = _streamIds; @@ -32,11 +32,12 @@ - (webrtc::RtpTransceiverInit)nativeInit { webrtc::RtpTransceiverInit init; - init.direction = [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:_direction]; + init.direction = + [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:_direction]; for (NSString *streamId in _streamIds) { init.stream_ids.push_back([streamId UTF8String]); } - for (RTCRtpEncodingParameters *sendEncoding in _sendEncodings) { + for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * sendEncoding in _sendEncodings) { init.send_encodings.push_back(sendEncoding.nativeParameters); } return init; @@ -44,13 +45,14 @@ @end -@implementation RTCRtpTransceiver { - RTCPeerConnectionFactory *_factory; +@implementation RTC_OBJC_TYPE (RTCRtpTransceiver) { + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeRtpTransceiver; } - (RTCRtpMediaType)mediaType { - return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()]; + return [RTC_OBJC_TYPE(RTCRtpReceiver) + mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()]; } - (NSString *)mid { @@ -69,18 +71,18 @@ } - (RTCRtpTransceiverDirection)direction { - return [RTCRtpTransceiver + return [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()]; } - (void)setDirection:(RTCRtpTransceiverDirection)direction { _nativeRtpTransceiver->SetDirection( - [RTCRtpTransceiver nativeRtpTransceiverDirectionFromDirection:direction]); + [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]); } - (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut { if (_nativeRtpTransceiver->current_direction()) { - *currentDirectionOut = [RTCRtpTransceiver + *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()]; return YES; } else { @@ -94,7 +96,9 @@ - (NSString *)description { return [NSString - stringWithFormat:@"RTCRtpTransceiver {\n sender: %@\n receiver: %@\n}", _sender, _receiver]; + stringWithFormat:@"RTC_OBJC_TYPE(RTCRtpTransceiver) {\n sender: %@\n receiver: %@\n}", + _sender, + _receiver]; } - (BOOL)isEqual:(id)object { @@ -107,7 +111,7 @@ if (![object isMemberOfClass:[self class]]) { return NO; } - RTCRtpTransceiver *transceiver = (RTCRtpTransceiver *)object; + RTC_OBJC_TYPE(RTCRtpTransceiver) *transceiver = (RTC_OBJC_TYPE(RTCRtpTransceiver) *)object; return _nativeRtpTransceiver == transceiver.nativeRtpTransceiver; } @@ -121,7 +125,7 @@ return _nativeRtpTransceiver; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeRtpTransceiver: (rtc::scoped_refptr)nativeRtpTransceiver { NSParameterAssert(factory); @@ -129,11 +133,13 @@ if (self = [super init]) { _factory = factory; _nativeRtpTransceiver = nativeRtpTransceiver; - _sender = [[RTCRtpSender alloc] initWithFactory:_factory - nativeRtpSender:nativeRtpTransceiver->sender()]; - _receiver = [[RTCRtpReceiver alloc] initWithFactory:_factory - nativeRtpReceiver:nativeRtpTransceiver->receiver()]; - RTCLogInfo(@"RTCRtpTransceiver(%p): created transceiver: %@", self, self.description); + _sender = [[RTC_OBJC_TYPE(RTCRtpSender) alloc] initWithFactory:_factory + nativeRtpSender:nativeRtpTransceiver->sender()]; + _receiver = + [[RTC_OBJC_TYPE(RTCRtpReceiver) alloc] initWithFactory:_factory + nativeRtpReceiver:nativeRtpTransceiver->receiver()]; + RTCLogInfo( + @"RTC_OBJC_TYPE(RTCRtpTransceiver)(%p): created transceiver: %@", self, self.description); } return self; } diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h index cc255cd582..0f0a06a887 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCSessionDescription () +@interface RTC_OBJC_TYPE (RTCSessionDescription) +() -/** - * The native SessionDescriptionInterface representation of this - * RTCSessionDescription object. This is needed to pass to the underlying C++ - * APIs. - */ -@property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription; + /** + * The native SessionDescriptionInterface representation of this + * RTCSessionDescription object. This is needed to pass to the underlying C++ + * APIs. + */ + @property(nonatomic, readonly, nullable) webrtc::SessionDescriptionInterface *nativeDescription; /** * Initialize an RTCSessionDescription from a native diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.h b/sdk/objc/api/peerconnection/RTCSessionDescription.h index b9bcab1a46..6bd118db13 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.h @@ -25,7 +25,7 @@ typedef NS_ENUM(NSInteger, RTCSdpType) { NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCSessionDescription : NSObject +@interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject /** The type of session description. */ @property(nonatomic, readonly) RTCSdpType type; diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/sdk/objc/api/peerconnection/RTCSessionDescription.mm index 21e5e42f66..a62870e088 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.mm +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.mm @@ -15,7 +15,7 @@ #include "rtc_base/checks.h" -@implementation RTCSessionDescription +@implementation RTC_OBJC_TYPE (RTCSessionDescription) @synthesize type = _type; @synthesize sdp = _sdp; @@ -40,7 +40,7 @@ } - (NSString *)description { - return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@", + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCSessionDescription):\n%@\n%@", [[self class] stringForType:_type], _sdp]; } diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h index 9c2178fb6b..5eff996c4f 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interface for converting to/from internal C++ formats. */ -@interface RTCVideoCodecInfo (Private) +@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) +(Private) -- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format; + - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format; - (webrtc::SdpVideoFormat)nativeSdpVideoFormat; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm index 21aacf6281..2eb8d366d2 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm @@ -12,9 +12,10 @@ #import "helpers/NSString+StdString.h" -@implementation RTCVideoCodecInfo (Private) +@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) +(Private) -- (instancetype)initWithNativeSdpVideoFormat:(webrtc::SdpVideoFormat)format { + - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format { NSMutableDictionary *params = [NSMutableDictionary dictionary]; for (auto it = format.parameters.begin(); it != format.parameters.end(); ++it) { [params setObject:[NSString stringForStdString:it->second] diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h index 5b062455bc..8323b18dc1 100644 --- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCVideoEncoderSettings (Private) +@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) +(Private) -- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *__nullable)videoCodec; + - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *__nullable)videoCodec; - (webrtc::VideoCodec)nativeVideoCodec; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm index fe7e690517..dec3a61090 100644 --- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm @@ -12,9 +12,10 @@ #import "helpers/NSString+StdString.h" -@implementation RTCVideoEncoderSettings (Private) +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings) +(Private) -- (instancetype)initWithNativeVideoCodec:(const webrtc::VideoCodec *)videoCodec { + - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCodec { if (self = [super init]) { if (videoCodec) { const char *codecName = CodecTypeToPayloadString(videoCodec->codecType); diff --git a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h index 1827e6b924..03908463db 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h @@ -17,26 +17,27 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoSource () +@interface RTC_OBJC_TYPE (RTCVideoSource) +() -/** - * The VideoTrackSourceInterface object passed to this RTCVideoSource during - * construction. - */ -@property(nonatomic, readonly) rtc::scoped_refptr - nativeVideoSource; + /** + * The VideoTrackSourceInterface object passed to this RTCVideoSource during + * construction. + */ + @property(nonatomic, + readonly) rtc::scoped_refptr nativeVideoSource; /** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeVideoSource: (rtc::scoped_refptr)nativeVideoSource NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type NS_UNAVAILABLE; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory signalingThread:(rtc::Thread *)signalingThread workerThread:(rtc::Thread *)workerThread; diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.h b/sdk/objc/api/peerconnection/RTCVideoSource.h index ec8a45c1c2..cdef8b89a1 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.h +++ b/sdk/objc/api/peerconnection/RTCVideoSource.h @@ -18,7 +18,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@interface RTCVideoSource : RTCMediaSource +@interface RTC_OBJC_TYPE (RTCVideoSource) : RTC_OBJC_TYPE(RTCMediaSource) - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index 789c8436e5..15b0d6f1be 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -24,11 +24,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource( // TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once // RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more // info. -@implementation RTCVideoSource { +@implementation RTC_OBJC_TYPE (RTCVideoSource) { rtc::scoped_refptr _nativeVideoSource; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeVideoSource: (rtc::scoped_refptr)nativeVideoSource { RTC_DCHECK(factory); @@ -41,14 +41,14 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource( return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource type:(RTCMediaSourceType)type { RTC_NOTREACHED(); return nil; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory signalingThread:(rtc::Thread *)signalingThread workerThread:(rtc::Thread *)workerThread { rtc::scoped_refptr objCVideoTrackSource( @@ -61,10 +61,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource( - (NSString *)description { NSString *stateString = [[self class] stringForState:self.state]; - return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString]; + return [NSString stringWithFormat:@"RTC_OBJC_TYPE(RTCVideoSource)( %p ): %@", self, stateString]; } -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame); } diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h b/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h index dd3d172820..f1a8d7e4ed 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack+Private.h @@ -14,14 +14,15 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoTrack () +@interface RTC_OBJC_TYPE (RTCVideoTrack) +() -/** VideoTrackInterface created or passed in at construction. */ -@property(nonatomic, readonly) rtc::scoped_refptr nativeVideoTrack; + /** VideoTrackInterface created or passed in at construction. */ + @property(nonatomic, readonly) rtc::scoped_refptr nativeVideoTrack; /** Initialize an RTCVideoTrack with its source and an id. */ -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCVideoSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCVideoSource) *)source trackId:(NSString *)trackId; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h index b946889eb4..5382b7169f 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h @@ -14,23 +14,24 @@ NS_ASSUME_NONNULL_BEGIN -@protocol RTCVideoRenderer; -@class RTCPeerConnectionFactory; -@class RTCVideoSource; +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); +@class RTC_OBJC_TYPE(RTCVideoSource); RTC_OBJC_EXPORT -@interface RTCVideoTrack : RTCMediaStreamTrack +@interface RTC_OBJC_TYPE (RTCVideoTrack) : RTC_OBJC_TYPE(RTCMediaStreamTrack) /** The video source for this video track. */ -@property(nonatomic, readonly) RTCVideoSource *source; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source; - (instancetype)init NS_UNAVAILABLE; /** Register a renderer that will render all frames received on this track. */ -- (void)addRenderer:(id)renderer; +- (void)addRenderer:(id)renderer; /** Deregister a renderer. */ -- (void)removeRenderer:(id)renderer; +- (void)removeRenderer:(id)renderer; @end diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index 77936a640b..3f38dd51a9 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -16,14 +16,14 @@ #import "api/RTCVideoRendererAdapter+Private.h" #import "helpers/NSString+StdString.h" -@implementation RTCVideoTrack { +@implementation RTC_OBJC_TYPE (RTCVideoTrack) { NSMutableArray *_adapters; } @synthesize source = _source; -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory - source:(RTCVideoSource *)source +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + source:(RTC_OBJC_TYPE(RTCVideoSource) *)source trackId:(NSString *)trackId { NSParameterAssert(factory); NSParameterAssert(source); @@ -38,7 +38,7 @@ return self; } -- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack: (rtc::scoped_refptr)nativeMediaTrack type:(RTCMediaStreamTrackType)type { @@ -57,19 +57,19 @@ } } -- (RTCVideoSource *)source { +- (RTC_OBJC_TYPE(RTCVideoSource) *)source { if (!_source) { rtc::scoped_refptr source = self.nativeVideoTrack->GetSource(); if (source) { - _source = - [[RTCVideoSource alloc] initWithFactory:self.factory nativeVideoSource:source.get()]; + _source = [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self.factory + nativeVideoSource:source.get()]; } } return _source; } -- (void)addRenderer:(id)renderer { +- (void)addRenderer:(id)renderer { // Make sure we don't have this renderer yet. for (RTCVideoRendererAdapter *adapter in _adapters) { if (adapter.videoRenderer == renderer) { @@ -85,7 +85,7 @@ rtc::VideoSinkWants()); } -- (void)removeRenderer:(id)renderer { +- (void)removeRenderer:(id)renderer { __block NSUInteger indexToRemove = NSNotFound; [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, NSUInteger idx, diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h index 00786dc514..a118b25ed7 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.h @@ -14,12 +14,12 @@ #import "RTCVideoDecoder.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderVP8 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderVP8) : NSObject /* This returns a VP8 decoder that can be returned from a RTCVideoDecoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp8Decoder; ++ (id)vp8Decoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm index 9750bd8bab..91ca3b7aec 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP8.mm @@ -16,9 +16,9 @@ #include "modules/video_coding/codecs/vp8/include/vp8.h" -@implementation RTCVideoDecoderVP8 +@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP8) -+ (id)vp8Decoder { ++ (id)vp8Decoder { return [[RTCWrappedNativeVideoDecoder alloc] initWithNativeDecoder:std::unique_ptr(webrtc::VP8Decoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h index b74c1ef999..b3a1743057 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.h @@ -14,12 +14,12 @@ #import "RTCVideoDecoder.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderVP9 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderVP9) : NSObject /* This returns a VP9 decoder that can be returned from a RTCVideoDecoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoDecoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp9Decoder; ++ (id)vp9Decoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm index 48582fedf9..56041a27eb 100644 --- a/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoDecoderVP9.mm @@ -16,9 +16,9 @@ #include "modules/video_coding/codecs/vp9/include/vp9.h" -@implementation RTCVideoDecoderVP9 +@implementation RTC_OBJC_TYPE (RTCVideoDecoderVP9) -+ (id)vp9Decoder { ++ (id)vp9Decoder { return [[RTCWrappedNativeVideoDecoder alloc] initWithNativeDecoder:std::unique_ptr(webrtc::VP9Decoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h index 8d87a89893..e136a5bda8 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.h @@ -14,12 +14,12 @@ #import "RTCVideoEncoder.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderVP8 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderVP8) : NSObject /* This returns a VP8 encoder that can be returned from a RTCVideoEncoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp8Encoder; ++ (id)vp8Encoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm index 677f6ddf5f..135512723e 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP8.mm @@ -16,9 +16,9 @@ #include "modules/video_coding/codecs/vp8/include/vp8.h" -@implementation RTCVideoEncoderVP8 +@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP8) -+ (id)vp8Encoder { ++ (id)vp8Encoder { return [[RTCWrappedNativeVideoEncoder alloc] initWithNativeEncoder:std::unique_ptr(webrtc::VP8Encoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h index 9efea4be2a..8f961ef337 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h @@ -14,12 +14,12 @@ #import "RTCVideoEncoder.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderVP9 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderVP9) : NSObject /* This returns a VP9 encoder that can be returned from a RTCVideoEncoderFactory injected into * RTCPeerConnectionFactory. Even though it implements the RTCVideoEncoder protocol, it can not be * used independently from the RTCPeerConnectionFactory. */ -+ (id)vp9Encoder; ++ (id)vp9Encoder; @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm index a5d84084d0..ec9e75a5ed 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm @@ -16,9 +16,9 @@ #include "modules/video_coding/codecs/vp9/include/vp9.h" -@implementation RTCVideoEncoderVP9 +@implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9) -+ (id)vp9Encoder { ++ (id)vp9Encoder { return [[RTCWrappedNativeVideoEncoder alloc] initWithNativeEncoder:std::unique_ptr(webrtc::VP9Encoder::Create())]; } diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h index b5694c7d94..2241c0c056 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.h @@ -15,7 +15,7 @@ #include "api/video_codecs/video_decoder.h" #include "media/base/codec.h" -@interface RTCWrappedNativeVideoDecoder : NSObject +@interface RTCWrappedNativeVideoDecoder : NSObject - (instancetype)initWithNativeDecoder:(std::unique_ptr)decoder; diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm index dce479c890..e4d8dc357d 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoDecoder.mm @@ -29,7 +29,7 @@ return std::move(_wrappedDecoder); } -#pragma mark - RTCVideoDecoder +#pragma mark - RTC_OBJC_TYPE(RTCVideoDecoder) - (void)setCallback:(RTCVideoDecoderCallback)callback { RTC_NOTREACHED(); @@ -45,9 +45,9 @@ return 0; } -- (NSInteger)decode:(RTCEncodedImage *)encodedImage +- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info + codecSpecificInfo:(nullable id)info renderTimeMs:(int64_t)renderTimeMs { RTC_NOTREACHED(); return 0; diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h index b4ef88264b..ec16793f8c 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.h @@ -16,7 +16,7 @@ #include "api/video_codecs/video_encoder.h" #include "media/base/codec.h" -@interface RTCWrappedNativeVideoEncoder : NSObject +@interface RTCWrappedNativeVideoEncoder : NSObject - (instancetype)initWithNativeEncoder:(std::unique_ptr)encoder; diff --git a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm index 9afd54f55f..6feecabd07 100644 --- a/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm +++ b/sdk/objc/api/video_codec/RTCWrappedNativeVideoEncoder.mm @@ -29,13 +29,13 @@ return std::move(_wrappedEncoder); } -#pragma mark - RTCVideoEncoder +#pragma mark - RTC_OBJC_TYPE(RTCVideoEncoder) - (void)setCallback:(RTCVideoEncoderCallback)callback { RTC_NOTREACHED(); } -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings +- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores { RTC_NOTREACHED(); return 0; @@ -46,8 +46,8 @@ return 0; } -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)info +- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame + codecSpecificInfo:(nullable id)info frameTypes:(NSArray *)frameTypes { RTC_NOTREACHED(); return 0; @@ -63,7 +63,7 @@ return nil; } -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings { +- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { RTC_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h index fad08c2453..20dc807991 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h +++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer+Private.h @@ -14,7 +14,8 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCI420Buffer () { +@interface RTC_OBJC_TYPE (RTCI420Buffer) +() { @protected rtc::scoped_refptr _i420Buffer; } diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h index 9a904f5396..3afe2090a2 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h +++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN /** RTCI420Buffer implements the RTCI420Buffer protocol */ RTC_OBJC_EXPORT -@interface RTCI420Buffer : NSObject +@interface RTC_OBJC_TYPE (RTCI420Buffer) : NSObject @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm index d9d5d15716..f82f206e91 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm +++ b/sdk/objc/api/video_frame_buffer/RTCNativeI420Buffer.mm @@ -17,7 +17,7 @@ #include "third_party/libyuv/include/libyuv.h" #endif -@implementation RTCI420Buffer +@implementation RTC_OBJC_TYPE (RTCI420Buffer) - (instancetype)initWithWidth:(int)width height:(int)height { if (self = [super init]) { @@ -99,7 +99,7 @@ return _i420Buffer->DataV(); } -- (id)toI420 { +- (id)toI420 { return self; } diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h index 6cd5110460..053a10a304 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h +++ b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h @@ -18,7 +18,7 @@ NS_ASSUME_NONNULL_BEGIN /** Mutable version of RTCI420Buffer */ RTC_OBJC_EXPORT -@interface RTCMutableI420Buffer : RTCI420Buffer +@interface RTC_OBJC_TYPE (RTCMutableI420Buffer) : RTC_OBJC_TYPE(RTCI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm index 5c6c1ffb64..1e669bcb9c 100644 --- a/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm +++ b/sdk/objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.mm @@ -14,7 +14,7 @@ #include "api/video/i420_buffer.h" -@implementation RTCMutableI420Buffer +@implementation RTC_OBJC_TYPE (RTCMutableI420Buffer) - (uint8_t *)mutableDataY { return static_cast(_i420Buffer.get())->MutableDataY(); diff --git a/sdk/objc/base/RTCCodecSpecificInfo.h b/sdk/objc/base/RTCCodecSpecificInfo.h index e2ae4cafa1..5e7800e524 100644 --- a/sdk/objc/base/RTCCodecSpecificInfo.h +++ b/sdk/objc/base/RTCCodecSpecificInfo.h @@ -18,7 +18,7 @@ NS_ASSUME_NONNULL_BEGIN * Corresponds to webrtc::CodecSpecificInfo. */ RTC_OBJC_EXPORT -@protocol RTCCodecSpecificInfo -@end +@protocol RTC_OBJC_TYPE +(RTCCodecSpecificInfo) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCEncodedImage.h b/sdk/objc/base/RTCEncodedImage.h index 670c7276ff..5fec8a220a 100644 --- a/sdk/objc/base/RTCEncodedImage.h +++ b/sdk/objc/base/RTCEncodedImage.h @@ -31,7 +31,7 @@ typedef NS_ENUM(NSUInteger, RTCVideoContentType) { /** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */ RTC_OBJC_EXPORT -@interface RTCEncodedImage : NSObject +@interface RTC_OBJC_TYPE (RTCEncodedImage) : NSObject @property(nonatomic, strong) NSData *buffer; @property(nonatomic, assign) int32_t encodedWidth; diff --git a/sdk/objc/base/RTCEncodedImage.m b/sdk/objc/base/RTCEncodedImage.m index 024a57c541..dec9630539 100644 --- a/sdk/objc/base/RTCEncodedImage.m +++ b/sdk/objc/base/RTCEncodedImage.m @@ -10,7 +10,7 @@ #import "RTCEncodedImage.h" -@implementation RTCEncodedImage +@implementation RTC_OBJC_TYPE (RTCEncodedImage) @synthesize buffer = _buffer; @synthesize encodedWidth = _encodedWidth; diff --git a/sdk/objc/base/RTCI420Buffer.h b/sdk/objc/base/RTCI420Buffer.h index a6c7e41bcb..b97f05a5ba 100644 --- a/sdk/objc/base/RTCI420Buffer.h +++ b/sdk/objc/base/RTCI420Buffer.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** Protocol for RTCYUVPlanarBuffers containing I420 data */ RTC_OBJC_EXPORT -@protocol RTCI420Buffer -@end +@protocol RTC_OBJC_TYPE +(RTCI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCMacros.h b/sdk/objc/base/RTCMacros.h index 7f7e64cb76..e527ff6bc4 100644 --- a/sdk/objc/base/RTCMacros.h +++ b/sdk/objc/base/RTCMacros.h @@ -11,6 +11,30 @@ #ifndef SDK_OBJC_BASE_RTCMACROS_H_ #define SDK_OBJC_BASE_RTCMACROS_H_ +// Internal macros used to correctly concatenate symbols. +#define RTC_SYMBOL_CONCAT_HELPER(a, b) a##b +#define RTC_SYMBOL_CONCAT(a, b) RTC_SYMBOL_CONCAT_HELPER(a, b) + +// RTC_OBJC_TYPE_PREFIX +// +// Macro used to prepend a prefix to the API types that are exported with +// RTC_OBJC_EXPORT. +// +// Clients can patch the definition of this macro locally and build +// WebRTC.framework with their own prefix in case symbol clashing is a +// problem. +// +// This macro must only be defined here and not on via compiler flag to +// ensure it has a unique value. +#define RTC_OBJC_TYPE_PREFIX + +// RCT_OBJC_TYPE +// +// Macro used internally to declare API types. Declaring an API type without +// using this macro will not include the declared type in the set of types +// that will be affected by the configurable RTC_OBJC_TYPE_PREFIX. +#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name) + #define RTC_OBJC_EXPORT __attribute__((visibility("default"))) #if defined(__cplusplus) diff --git a/sdk/objc/base/RTCMutableI420Buffer.h b/sdk/objc/base/RTCMutableI420Buffer.h index 098fb9a66f..cde721980b 100644 --- a/sdk/objc/base/RTCMutableI420Buffer.h +++ b/sdk/objc/base/RTCMutableI420Buffer.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN /** Extension of the I420 buffer with mutable data access */ RTC_OBJC_EXPORT -@protocol RTCMutableI420Buffer -@end +@protocol RTC_OBJC_TYPE +(RTCMutableI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCMutableYUVPlanarBuffer.h b/sdk/objc/base/RTCMutableYUVPlanarBuffer.h index 00dfcd94ca..bd14e3bca3 100644 --- a/sdk/objc/base/RTCMutableYUVPlanarBuffer.h +++ b/sdk/objc/base/RTCMutableYUVPlanarBuffer.h @@ -16,9 +16,10 @@ NS_ASSUME_NONNULL_BEGIN /** Extension of the YUV planar data buffer with mutable data access */ RTC_OBJC_EXPORT -@protocol RTCMutableYUVPlanarBuffer +@protocol RTC_OBJC_TYPE +(RTCMutableYUVPlanarBuffer) -@property(nonatomic, readonly) uint8_t *mutableDataY; + @property(nonatomic, readonly) uint8_t *mutableDataY; @property(nonatomic, readonly) uint8_t *mutableDataU; @property(nonatomic, readonly) uint8_t *mutableDataV; diff --git a/sdk/objc/base/RTCRtpFragmentationHeader.h b/sdk/objc/base/RTCRtpFragmentationHeader.h index 2e26b08b8a..001b4e9deb 100644 --- a/sdk/objc/base/RTCRtpFragmentationHeader.h +++ b/sdk/objc/base/RTCRtpFragmentationHeader.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** Information for header. Corresponds to webrtc::RTPFragmentationHeader. */ RTC_OBJC_EXPORT -@interface RTCRtpFragmentationHeader : NSObject +@interface RTC_OBJC_TYPE (RTCRtpFragmentationHeader) : NSObject @property(nonatomic, strong) NSArray *fragmentationOffset; @property(nonatomic, strong) NSArray *fragmentationLength; diff --git a/sdk/objc/base/RTCRtpFragmentationHeader.m b/sdk/objc/base/RTCRtpFragmentationHeader.m index 8049abc411..60e2f5d1e6 100644 --- a/sdk/objc/base/RTCRtpFragmentationHeader.m +++ b/sdk/objc/base/RTCRtpFragmentationHeader.m @@ -10,11 +10,11 @@ #import "RTCRtpFragmentationHeader.h" -@implementation RTCRtpFragmentationHeader +@implementation RTC_OBJC_TYPE (RTCRtpFragmentationHeader) @synthesize fragmentationOffset = _fragmentationOffset; @synthesize fragmentationLength = _fragmentationLength; @synthesize fragmentationTimeDiff = _fragmentationTimeDiff; @synthesize fragmentationPlType = _fragmentationPlType; -@end \ No newline at end of file +@end diff --git a/sdk/objc/base/RTCVideoCapturer.h b/sdk/objc/base/RTCVideoCapturer.h index 5212627692..a1ffdcf38e 100644 --- a/sdk/objc/base/RTCVideoCapturer.h +++ b/sdk/objc/base/RTCVideoCapturer.h @@ -14,19 +14,21 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCVideoCapturer; +@class RTC_OBJC_TYPE(RTCVideoCapturer); RTC_OBJC_EXPORT -@protocol RTCVideoCapturerDelegate -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame; +@protocol RTC_OBJC_TYPE +(RTCVideoCapturerDelegate) - + (void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame + : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end RTC_OBJC_EXPORT -@interface RTCVideoCapturer : NSObject +@interface RTC_OBJC_TYPE (RTCVideoCapturer) : NSObject -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; -- (instancetype)initWithDelegate:(id)delegate; +- (instancetype)initWithDelegate:(id)delegate; @end diff --git a/sdk/objc/base/RTCVideoCapturer.m b/sdk/objc/base/RTCVideoCapturer.m index 39cc377746..ca31a731f0 100644 --- a/sdk/objc/base/RTCVideoCapturer.m +++ b/sdk/objc/base/RTCVideoCapturer.m @@ -10,11 +10,11 @@ #import "RTCVideoCapturer.h" -@implementation RTCVideoCapturer +@implementation RTC_OBJC_TYPE (RTCVideoCapturer) @synthesize delegate = _delegate; -- (instancetype)initWithDelegate:(id)delegate { +- (instancetype)initWithDelegate:(id)delegate { if (self = [super init]) { _delegate = delegate; } diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index 2162caaa21..fa28958f25 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */ RTC_OBJC_EXPORT -@interface RTCVideoCodecInfo : NSObject +@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject - (instancetype)init NS_UNAVAILABLE; @@ -26,7 +26,7 @@ RTC_OBJC_EXPORT parameters:(nullable NSDictionary *)parameters NS_DESIGNATED_INITIALIZER; -- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info; +- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; @property(nonatomic, readonly) NSString *name; @property(nonatomic, readonly) NSDictionary *parameters; diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index 7fb17ca7d2..ce26ae1de3 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -10,7 +10,7 @@ #import "RTCVideoCodecInfo.h" -@implementation RTCVideoCodecInfo +@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) @synthesize name = _name; @synthesize parameters = _parameters; @@ -29,7 +29,7 @@ return self; } -- (BOOL)isEqualToCodecInfo:(RTCVideoCodecInfo *)info { +- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if (!info || ![self.name isEqualToString:info.name] || ![self.parameters isEqualToDictionary:info.parameters]) { diff --git a/sdk/objc/base/RTCVideoDecoder.h b/sdk/objc/base/RTCVideoDecoder.h index 8077c698e4..ccddd42d42 100644 --- a/sdk/objc/base/RTCVideoDecoder.h +++ b/sdk/objc/base/RTCVideoDecoder.h @@ -19,18 +19,19 @@ NS_ASSUME_NONNULL_BEGIN /** Callback block for decoder. */ -typedef void (^RTCVideoDecoderCallback)(RTCVideoFrame *frame); +typedef void (^RTCVideoDecoderCallback)(RTC_OBJC_TYPE(RTCVideoFrame) * frame); /** Protocol for decoder implementations. */ RTC_OBJC_EXPORT -@protocol RTCVideoDecoder +@protocol RTC_OBJC_TYPE +(RTCVideoDecoder) -- (void)setCallback:(RTCVideoDecoderCallback)callback; + - (void)setCallback : (RTCVideoDecoderCallback)callback; - (NSInteger)startDecodeWithNumberOfCores:(int)numberOfCores; - (NSInteger)releaseDecoder; -- (NSInteger)decode:(RTCEncodedImage *)encodedImage +- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)encodedImage missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info + codecSpecificInfo:(nullable id)info renderTimeMs:(int64_t)renderTimeMs; - (NSString *)implementationName; diff --git a/sdk/objc/base/RTCVideoDecoderFactory.h b/sdk/objc/base/RTCVideoDecoderFactory.h index 3e24153b82..8d90138521 100644 --- a/sdk/objc/base/RTCVideoDecoderFactory.h +++ b/sdk/objc/base/RTCVideoDecoderFactory.h @@ -16,12 +16,16 @@ NS_ASSUME_NONNULL_BEGIN -/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. */ +/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. + */ RTC_OBJC_EXPORT -@protocol RTCVideoDecoderFactory +@protocol RTC_OBJC_TYPE +(RTCVideoDecoderFactory) -- (nullable id)createDecoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? + - (nullable id)createDecoder + : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (NSArray *) + supportedCodecs; // TODO(andersc): "supportedFormats" instead? @end diff --git a/sdk/objc/base/RTCVideoEncoder.h b/sdk/objc/base/RTCVideoEncoder.h index c5257674d8..7d1a7afd7f 100644 --- a/sdk/objc/base/RTCVideoEncoder.h +++ b/sdk/objc/base/RTCVideoEncoder.h @@ -21,20 +21,21 @@ NS_ASSUME_NONNULL_BEGIN /** Callback block for encoder. */ -typedef BOOL (^RTCVideoEncoderCallback)(RTCEncodedImage *frame, - id info, - RTCRtpFragmentationHeader *header); +typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame, + id info, + RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header); /** Protocol for encoder implementations. */ RTC_OBJC_EXPORT -@protocol RTCVideoEncoder +@protocol RTC_OBJC_TYPE +(RTCVideoEncoder) -- (void)setCallback:(RTCVideoEncoderCallback)callback; -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings + - (void)setCallback : (RTCVideoEncoderCallback)callback; +- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores; - (NSInteger)releaseEncoder; -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)info +- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame + codecSpecificInfo:(nullable id)info frameTypes:(NSArray *)frameTypes; - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate; - (NSString *)implementationName; @@ -42,7 +43,7 @@ RTC_OBJC_EXPORT /** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to * keep the QP from the encoded images within the given range. Returning nil from this function * disables quality scaling. */ -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings; +- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings; @end diff --git a/sdk/objc/base/RTCVideoEncoderFactory.h b/sdk/objc/base/RTCVideoEncoderFactory.h index 6ea78a54a0..b115b2ad39 100644 --- a/sdk/objc/base/RTCVideoEncoderFactory.h +++ b/sdk/objc/base/RTCVideoEncoderFactory.h @@ -20,24 +20,29 @@ NS_ASSUME_NONNULL_BEGIN webrtc::VideoEncoderFactory::VideoEncoderSelector. */ RTC_OBJC_EXPORT -@protocol RTCVideoEncoderSelector +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderSelector) -- (void)registerCurrentEncoderInfo:(RTCVideoCodecInfo *)info; -- (nullable RTCVideoCodecInfo *)encoderForBitrate:(NSInteger)bitrate; -- (nullable RTCVideoCodecInfo *)encoderForBrokenEncoder; + - (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate; +- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder; @end -/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. */ +/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. + */ RTC_OBJC_EXPORT -@protocol RTCVideoEncoderFactory +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderFactory) -- (nullable id)createEncoder:(RTCVideoCodecInfo *)info; -- (NSArray *)supportedCodecs; // TODO(andersc): "supportedFormats" instead? + - (nullable id)createEncoder + : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (NSArray *) + supportedCodecs; // TODO(andersc): "supportedFormats" instead? @optional -- (NSArray *)implementations; -- (nullable id)encoderSelector; +- (NSArray *)implementations; +- (nullable id)encoderSelector; @end diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.h b/sdk/objc/base/RTCVideoEncoderQpThresholds.h index 2b48f45ce0..1a6e9e88ab 100644 --- a/sdk/objc/base/RTCVideoEncoderQpThresholds.h +++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */ RTC_OBJC_EXPORT -@interface RTCVideoEncoderQpThresholds : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject - (instancetype)initWithThresholdsLow:(NSInteger)low high:(NSInteger)high; diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.m b/sdk/objc/base/RTCVideoEncoderQpThresholds.m index 5bd06ffd8d..fb7012f44f 100644 --- a/sdk/objc/base/RTCVideoEncoderQpThresholds.m +++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.m @@ -10,7 +10,7 @@ #import "RTCVideoEncoderQpThresholds.h" -@implementation RTCVideoEncoderQpThresholds +@implementation RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) @synthesize low = _low; @synthesize high = _high; diff --git a/sdk/objc/base/RTCVideoEncoderSettings.h b/sdk/objc/base/RTCVideoEncoderSettings.h index a9403f8dec..ae792eab71 100644 --- a/sdk/objc/base/RTCVideoEncoderSettings.h +++ b/sdk/objc/base/RTCVideoEncoderSettings.h @@ -21,7 +21,7 @@ typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) { /** Settings for encoder. Corresponds to webrtc::VideoCodec. */ RTC_OBJC_EXPORT -@interface RTCVideoEncoderSettings : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderSettings) : NSObject @property(nonatomic, strong) NSString *name; diff --git a/sdk/objc/base/RTCVideoEncoderSettings.m b/sdk/objc/base/RTCVideoEncoderSettings.m index f68bc8cb56..f66cd2cf77 100644 --- a/sdk/objc/base/RTCVideoEncoderSettings.m +++ b/sdk/objc/base/RTCVideoEncoderSettings.m @@ -10,7 +10,7 @@ #import "RTCVideoEncoderSettings.h" -@implementation RTCVideoEncoderSettings +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSettings) @synthesize name = _name; @synthesize width = _width; diff --git a/sdk/objc/base/RTCVideoFrame.h b/sdk/objc/base/RTCVideoFrame.h index 9aca7433f3..f5638d27cf 100644 --- a/sdk/objc/base/RTCVideoFrame.h +++ b/sdk/objc/base/RTCVideoFrame.h @@ -22,11 +22,12 @@ typedef NS_ENUM(NSInteger, RTCVideoRotation) { RTCVideoRotation_270 = 270, }; -@protocol RTCVideoFrameBuffer; +@protocol RTC_OBJC_TYPE +(RTCVideoFrameBuffer); // RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame. RTC_OBJC_EXPORT -@interface RTCVideoFrame : NSObject +@interface RTC_OBJC_TYPE (RTCVideoFrame) : NSObject /** Width without rotation applied. */ @property(nonatomic, readonly) int width; @@ -41,7 +42,7 @@ RTC_OBJC_EXPORT /** Timestamp 90 kHz. */ @property(nonatomic, assign) int32_t timeStamp; -@property(nonatomic, readonly) id buffer; +@property(nonatomic, readonly) id buffer; - (instancetype)init NS_UNAVAILABLE; - (instancetype) new NS_UNAVAILABLE; @@ -71,14 +72,14 @@ RTC_OBJC_EXPORT /** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. */ -- (instancetype)initWithBuffer:(id)frameBuffer +- (instancetype)initWithBuffer:(id)frameBuffer rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs; /** Return a frame that is guaranteed to be I420, i.e. it is possible to access * the YUV data on it. */ -- (RTCVideoFrame *)newI420VideoFrame; +- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame; @end diff --git a/sdk/objc/base/RTCVideoFrame.mm b/sdk/objc/base/RTCVideoFrame.mm index 0a44b04e6b..e162238d73 100644 --- a/sdk/objc/base/RTCVideoFrame.mm +++ b/sdk/objc/base/RTCVideoFrame.mm @@ -13,7 +13,7 @@ #import "RTCI420Buffer.h" #import "RTCVideoFrameBuffer.h" -@implementation RTCVideoFrame { +@implementation RTC_OBJC_TYPE (RTCVideoFrame) { RTCVideoRotation _rotation; int64_t _timeStampNs; } @@ -37,10 +37,10 @@ return _timeStampNs; } -- (RTCVideoFrame *)newI420VideoFrame { - return [[RTCVideoFrame alloc] initWithBuffer:[_buffer toI420] - rotation:_rotation - timeStampNs:_timeStampNs]; +- (RTC_OBJC_TYPE(RTCVideoFrame) *)newI420VideoFrame { + return [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:[_buffer toI420] + rotation:_rotation + timeStampNs:_timeStampNs]; } - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer @@ -63,7 +63,7 @@ return nil; } -- (instancetype)initWithBuffer:(id)buffer +- (instancetype)initWithBuffer:(id)buffer rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs { if (self = [super init]) { diff --git a/sdk/objc/base/RTCVideoFrameBuffer.h b/sdk/objc/base/RTCVideoFrameBuffer.h index bb9e6fba63..82d057eea0 100644 --- a/sdk/objc/base/RTCVideoFrameBuffer.h +++ b/sdk/objc/base/RTCVideoFrameBuffer.h @@ -14,16 +14,18 @@ NS_ASSUME_NONNULL_BEGIN -@protocol RTCI420Buffer; +@protocol RTC_OBJC_TYPE +(RTCI420Buffer); // RTCVideoFrameBuffer is an ObjectiveC version of webrtc::VideoFrameBuffer. RTC_OBJC_EXPORT -@protocol RTCVideoFrameBuffer +@protocol RTC_OBJC_TYPE +(RTCVideoFrameBuffer) -@property(nonatomic, readonly) int width; + @property(nonatomic, readonly) int width; @property(nonatomic, readonly) int height; -- (id)toI420; +- (id)toI420; @end diff --git a/sdk/objc/base/RTCVideoRenderer.h b/sdk/objc/base/RTCVideoRenderer.h index 7b359a35c2..0f763295ad 100644 --- a/sdk/objc/base/RTCVideoRenderer.h +++ b/sdk/objc/base/RTCVideoRenderer.h @@ -17,23 +17,26 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCVideoFrame; +@class RTC_OBJC_TYPE(RTCVideoFrame); RTC_OBJC_EXPORT -@protocol RTCVideoRenderer +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer) -/** The size of the frame. */ -- (void)setSize:(CGSize)size; + /** The size of the frame. */ + - (void)setSize : (CGSize)size; /** The frame to be displayed. */ -- (void)renderFrame:(nullable RTCVideoFrame *)frame; +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end RTC_OBJC_EXPORT -@protocol RTCVideoViewDelegate +@protocol RTC_OBJC_TYPE +(RTCVideoViewDelegate) -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size; + - (void)videoView : (id)videoView didChangeVideoSize + : (CGSize)size; @end diff --git a/sdk/objc/base/RTCYUVPlanarBuffer.h b/sdk/objc/base/RTCYUVPlanarBuffer.h index 8ceb66c99d..be01b915f5 100644 --- a/sdk/objc/base/RTCYUVPlanarBuffer.h +++ b/sdk/objc/base/RTCYUVPlanarBuffer.h @@ -17,9 +17,10 @@ NS_ASSUME_NONNULL_BEGIN /** Protocol for RTCVideoFrameBuffers containing YUV planar data. */ RTC_OBJC_EXPORT -@protocol RTCYUVPlanarBuffer +@protocol RTC_OBJC_TYPE +(RTCYUVPlanarBuffer) -@property(nonatomic, readonly) int chromaWidth; + @property(nonatomic, readonly) int chromaWidth; @property(nonatomic, readonly) int chromaHeight; @property(nonatomic, readonly) const uint8_t *dataY; @property(nonatomic, readonly) const uint8_t *dataU; diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index c81ce1b916..b2753f282e 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -13,17 +13,18 @@ #import "base/RTCLogging.h" -@implementation RTCAudioSession (Configuration) +@implementation RTC_OBJC_TYPE (RTCAudioSession) +(Configuration) -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration - error:(NSError **)outError { + - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error + : (NSError **)outError { return [self setConfiguration:configuration active:NO shouldSetActive:NO error:outError]; } -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration active:(BOOL)active error:(NSError **)outError { return [self setConfiguration:configuration @@ -34,7 +35,7 @@ #pragma mark - Private -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration active:(BOOL)active shouldSetActive:(BOOL)shouldSetActive error:(NSError **)outError { diff --git a/sdk/objc/components/audio/RTCAudioSession+Private.h b/sdk/objc/components/audio/RTCAudioSession+Private.h index 8cf9339377..4c1eb1c44a 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Private.h +++ b/sdk/objc/components/audio/RTCAudioSession+Private.h @@ -12,14 +12,15 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCAudioSessionConfiguration; +@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration); -@interface RTCAudioSession () +@interface RTC_OBJC_TYPE (RTCAudioSession) +() -/** Number of times setActive:YES has succeeded without a balanced call to - * setActive:NO. - */ -@property(nonatomic, readonly) int activationCount; + /** Number of times setActive:YES has succeeded without a balanced call to + * setActive:NO. + */ + @property(nonatomic, readonly) int activationCount; /** The number of times |beginWebRTCSession| was called without a balanced call * to |endWebRTCSession|. @@ -40,7 +41,7 @@ NS_ASSUME_NONNULL_BEGIN * the list. This delegate will be notified before other delegates of * audio events. */ -- (void)pushDelegate:(id)delegate; +- (void)pushDelegate:(id)delegate; /** Signals RTCAudioSession that a WebRTC session is about to begin and * audio configuration is needed. Will configure the audio session for WebRTC diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h index b5bba2f21e..f917e327a4 100644 --- a/sdk/objc/components/audio/RTCAudioSession.h +++ b/sdk/objc/components/audio/RTCAudioSession.h @@ -21,78 +21,81 @@ extern NSInteger const kRTCAudioSessionErrorLockRequired; /** Unknown configuration error occurred. */ extern NSInteger const kRTCAudioSessionErrorConfiguration; -@class RTCAudioSession; -@class RTCAudioSessionConfiguration; +@class RTC_OBJC_TYPE(RTCAudioSession); +@class RTC_OBJC_TYPE(RTCAudioSessionConfiguration); // Surfaces AVAudioSession events. WebRTC will listen directly for notifications // from AVAudioSession and handle them before calling these delegate methods, // at which point applications can perform additional processing if required. RTC_OBJC_EXPORT -@protocol RTCAudioSessionDelegate +@protocol RTC_OBJC_TYPE +(RTCAudioSessionDelegate) -@optional + @optional /** Called on a system notification thread when AVAudioSession starts an * interruption event. */ -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session; +- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called on a system notification thread when AVAudioSession ends an * interruption event. */ -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session +- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session shouldResumeSession:(BOOL)shouldResumeSession; /** Called on a system notification thread when AVAudioSession changes the * route. */ -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session +- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session reason:(AVAudioSessionRouteChangeReason)reason previousRoute:(AVAudioSessionRouteDescription *)previousRoute; /** Called on a system notification thread when AVAudioSession media server * terminates. */ -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session; +- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called on a system notification thread when AVAudioSession media server * restarts. */ -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session; +- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session; // TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification. -- (void)audioSession:(RTCAudioSession *)session didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session + didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord; /** Called on a WebRTC thread when the audio device is notified to begin * playback or recording. */ -- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session; +- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called on a WebRTC thread when the audio device is notified to stop * playback or recording. */ -- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session; +- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session; /** Called when the AVAudioSession output volume value changes. */ -- (void)audioSession:(RTCAudioSession *)audioSession didChangeOutputVolume:(float)outputVolume; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession + didChangeOutputVolume:(float)outputVolume; /** Called when the audio device detects a playout glitch. The argument is the * number of glitches detected so far in the current audio playout session. */ -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches; /** Called when the audio session is about to change the active state. */ -- (void)audioSession:(RTCAudioSession *)audioSession willSetActive:(BOOL)active; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession willSetActive:(BOOL)active; /** Called after the audio session sucessfully changed the active state. */ -- (void)audioSession:(RTCAudioSession *)audioSession didSetActive:(BOOL)active; +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didSetActive:(BOOL)active; /** Called after the audio session failed to change the active state. */ -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession failedToSetActive:(BOOL)active error:(NSError *)error; @@ -103,10 +106,11 @@ RTC_OBJC_EXPORT * case of this is when CallKit activates the audio session for the application */ RTC_OBJC_EXPORT -@protocol RTCAudioSessionActivationDelegate +@protocol RTC_OBJC_TYPE +(RTCAudioSessionActivationDelegate) -/** Called when the audio session is activated outside of the app by iOS. */ -- (void)audioSessionDidActivate:(AVAudioSession *)session; + /** Called when the audio session is activated outside of the app by iOS. */ + - (void)audioSessionDidActivate : (AVAudioSession *)session; /** Called when the audio session is deactivated outside of the app by iOS. */ - (void)audioSessionDidDeactivate:(AVAudioSession *)session; @@ -121,7 +125,7 @@ RTC_OBJC_EXPORT * activated only once. See |setActive:error:|. */ RTC_OBJC_EXPORT -@interface RTCAudioSession : NSObject +@interface RTC_OBJC_TYPE (RTCAudioSession) : NSObject /** Convenience property to access the AVAudioSession singleton. Callers should * not call setters on AVAudioSession directly, but other method invocations @@ -196,9 +200,9 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; /** Adds a delegate, which is held weakly. */ -- (void)addDelegate:(id)delegate; +- (void)addDelegate:(id)delegate; /** Removes an added delegate. */ -- (void)removeDelegate:(id)delegate; +- (void)removeDelegate:(id)delegate; /** Request exclusive access to the audio session for configuration. This call * will block if the lock is held by another object. @@ -237,19 +241,21 @@ RTC_OBJC_EXPORT error:(NSError **)outError; @end -@interface RTCAudioSession (Configuration) +@interface RTC_OBJC_TYPE (RTCAudioSession) +(Configuration) -/** Applies the configuration to the current session. Attempts to set all - * properties even if previous ones fail. Only the last error will be - * returned. - * |lockForConfiguration| must be called first. - */ -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration error:(NSError **)outError; + /** Applies the configuration to the current session. Attempts to set all + * properties even if previous ones fail. Only the last error will be + * returned. + * |lockForConfiguration| must be called first. + */ + - (BOOL)setConfiguration : (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration error + : (NSError **)outError; /** Convenience method that calls both setConfiguration and setActive. * |lockForConfiguration| must be called first. */ -- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration +- (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration active:(BOOL)active error:(NSError **)outError; diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 260529db7e..74b57acd61 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -21,20 +21,20 @@ #import "RTCAudioSessionConfiguration.h" #import "base/RTCLogging.h" - -NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession"; +NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; NSInteger const kRTCAudioSessionErrorLockRequired = -1; NSInteger const kRTCAudioSessionErrorConfiguration = -2; NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; -@interface RTCAudioSession () -@property(nonatomic, readonly) std::vector<__weak id > delegates; +@interface RTC_OBJC_TYPE (RTCAudioSession) +() @property(nonatomic, + readonly) std::vector<__weak id > delegates; @end // This class needs to be thread-safe because it is accessed from many threads. // TODO(tkchin): Consider more granular locking. We're not expecting a lot of // lock contention so coarse locks should be fine for now. -@implementation RTCAudioSession { +@implementation RTC_OBJC_TYPE (RTCAudioSession) { rtc::CriticalSection _crit; AVAudioSession *_session; volatile int _activationCount; @@ -54,7 +54,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; + (instancetype)sharedInstance { static dispatch_once_t onceToken; - static RTCAudioSession *sharedInstance = nil; + static RTC_OBJC_TYPE(RTCAudioSession) *sharedInstance = nil; dispatch_once(&onceToken, ^{ sharedInstance = [[self alloc] init]; }); @@ -102,9 +102,9 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; [_session addObserver:self forKeyPath:kRTCAudioSessionOutputVolumeSelector options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld - context:(__bridge void*)RTCAudioSession.class]; + context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; - RTCLog(@"RTCAudioSession (%p): init.", self); + RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self); } return self; } @@ -113,25 +113,24 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; [[NSNotificationCenter defaultCenter] removeObserver:self]; [_session removeObserver:self forKeyPath:kRTCAudioSessionOutputVolumeSelector - context:(__bridge void*)RTCAudioSession.class]; - RTCLog(@"RTCAudioSession (%p): dealloc.", self); + context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; + RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self); } - (NSString *)description { - NSString *format = - @"RTCAudioSession: {\n" - " category: %@\n" - " categoryOptions: %ld\n" - " mode: %@\n" - " isActive: %d\n" - " sampleRate: %.2f\n" - " IOBufferDuration: %f\n" - " outputNumberOfChannels: %ld\n" - " inputNumberOfChannels: %ld\n" - " outputLatency: %f\n" - " inputLatency: %f\n" - " outputVolume: %f\n" - "}"; + NSString *format = @"RTC_OBJC_TYPE(RTCAudioSession): {\n" + " category: %@\n" + " categoryOptions: %ld\n" + " mode: %@\n" + " isActive: %d\n" + " sampleRate: %.2f\n" + " IOBufferDuration: %f\n" + " outputNumberOfChannels: %ld\n" + " inputNumberOfChannels: %ld\n" + " outputLatency: %f\n" + " inputLatency: %f\n" + " outputVolume: %f\n" + "}"; NSString *description = [NSString stringWithFormat:format, self.category, (long)self.categoryOptions, self.mode, self.isActive, self.sampleRate, self.IOBufferDuration, @@ -206,7 +205,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; } // TODO(tkchin): Check for duplicates. -- (void)addDelegate:(id)delegate { +- (void)addDelegate:(id)delegate { RTCLog(@"Adding delegate: (%p)", delegate); if (!delegate) { return; @@ -217,7 +216,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; } } -- (void)removeDelegate:(id)delegate { +- (void)removeDelegate:(id)delegate { RTCLog(@"Removing delegate: (%p)", delegate); if (!delegate) { return; @@ -621,7 +620,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; return error; } -- (std::vector<__weak id >)delegates { +- (std::vector<__weak id >)delegates { @synchronized(self) { // Note: this returns a copy. return _delegates; @@ -629,7 +628,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; } // TODO(tkchin): check for duplicates. -- (void)pushDelegate:(id)delegate { +- (void)pushDelegate:(id)delegate { @synchronized(self) { _delegates.insert(_delegates.begin(), delegate); } @@ -687,7 +686,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; // acquire lock if it hasn't already been called. if (!self.isLocked) { if (outError) { - *outError = [RTCAudioSession lockError]; + *outError = [RTC_OBJC_TYPE(RTCAudioSession) lockError]; } return NO; } @@ -730,8 +729,8 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; // Configure the AVAudioSession and activate it. // Provide an error even if there isn't one so we can log it. NSError *error = nil; - RTCAudioSessionConfiguration *webRTCConfig = - [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; if (![self setConfiguration:webRTCConfig active:YES error:&error]) { RTCLogError(@"Failed to set WebRTC audio configuration: %@", error.localizedDescription); @@ -866,7 +865,7 @@ NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; ofObject:(id)object change:(NSDictionary *)change context:(void *)context { - if (context == (__bridge void*)RTCAudioSession.class) { + if (context == (__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class) { if (object == _session) { NSNumber *newVolume = change[NSKeyValueChangeNewKey]; RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue); diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h index 9f3765da22..4582b80557 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h @@ -23,7 +23,7 @@ RTC_EXTERN const double kRTCAudioSessionLowComplexityIOBufferDuration; // Struct to hold configuration values. RTC_OBJC_EXPORT -@interface RTCAudioSessionConfiguration : NSObject +@interface RTC_OBJC_TYPE (RTCAudioSessionConfiguration) : NSObject @property(nonatomic, strong) NSString *category; @property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions; @@ -41,7 +41,7 @@ RTC_OBJC_EXPORT /** Returns the configuration that WebRTC needs. */ + (instancetype)webRTCConfiguration; /** Provide a way to override the default configuration. */ -+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration; ++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration; @end diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 2247e65ab5..39e9ac13ec 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -51,9 +51,9 @@ const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02; // TODO(henrika): monitor this size and determine if it should be modified. const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06; -static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil; +static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil; -@implementation RTCAudioSessionConfiguration +@implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) @synthesize category = _category; @synthesize categoryOptions = _categoryOptions; @@ -105,9 +105,9 @@ static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil; } + (instancetype)currentConfiguration { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; - RTCAudioSessionConfiguration *config = - [[RTCAudioSessionConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *config = + [[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init]; config.category = session.category; config.categoryOptions = session.categoryOptions; config.mode = session.mode; @@ -120,11 +120,11 @@ static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil; + (instancetype)webRTCConfiguration { @synchronized(self) { - return (RTCAudioSessionConfiguration *)gWebRTCConfiguration; + return (RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)gWebRTCConfiguration; } } -+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration { ++ (void)setWebRTCConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configuration { @synchronized(self) { gWebRTCConfiguration = configuration; } diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index 7ca2d757eb..e28f26f9ae 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,7 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter : NSObject +@interface RTCNativeAudioSessionDelegateAdapter : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index aef97b9fe5..daddf314a4 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -26,20 +26,20 @@ return self; } -#pragma mark - RTCAudioSessionDelegate +#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate) -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session { +- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session { _observer->OnInterruptionBegin(); } -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session +- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session shouldResumeSession:(BOOL)shouldResumeSession { _observer->OnInterruptionEnd(); } -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session - reason:(AVAudioSessionRouteChangeReason)reason - previousRoute:(AVAudioSessionRouteDescription *)previousRoute { +- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session + reason:(AVAudioSessionRouteChangeReason)reason + previousRoute:(AVAudioSessionRouteDescription *)previousRoute { switch (reason) { case AVAudioSessionRouteChangeReasonUnknown: case AVAudioSessionRouteChangeReasonNewDeviceAvailable: @@ -64,24 +64,24 @@ } } -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session { +- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session { +- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSession:(RTCAudioSession *)session +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)session didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord { _observer->OnCanPlayOrRecordChange(canPlayOrRecord); } -- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session { +- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session { +- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didChangeOutputVolume:(float)outputVolume { _observer->OnChangedOutputVolume(); } diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 2b5e56f4ec..fed5a37827 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -17,10 +17,10 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -// Camera capture that implements RTCVideoCapturer. Delivers frames to a RTCVideoCapturerDelegate -// (usually RTCVideoSource). +// Camera capture that implements RTCVideoCapturer. Delivers frames to a +// RTCVideoCapturerDelegate (usually RTCVideoSource). NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") -@interface RTCCameraVideoCapturer : RTCVideoCapturer +@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) // Capture session that is used for capturing. Valid from initialization to dealloc. @property(readonly, nonatomic) AVCaptureSession *captureSession; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 5cfb616f5f..6edcec88e9 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -25,8 +25,9 @@ const int64_t kNanosecondsPerSecond = 1000000000; -@interface RTCCameraVideoCapturer () -@property(nonatomic, readonly) dispatch_queue_t frameQueue; +@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) +() @property(nonatomic, + readonly) dispatch_queue_t frameQueue; @property(nonatomic, strong) AVCaptureDevice *currentDevice; @property(nonatomic, assign) BOOL hasRetriedOnFatalError; @property(nonatomic, assign) BOOL isRunning; @@ -34,7 +35,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; @property(nonatomic, assign) BOOL willBeRunning; @end -@implementation RTCCameraVideoCapturer { +@implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { AVCaptureVideoDataOutput *_videoDataOutput; AVCaptureSession *_captureSession; FourCharCode _preferredOutputPixelFormat; @@ -57,12 +58,12 @@ const int64_t kNanosecondsPerSecond = 1000000000; return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]]; } -- (instancetype)initWithDelegate:(__weak id)delegate { +- (instancetype)initWithDelegate:(__weak id)delegate { return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]]; } // This initializer is used for testing. -- (instancetype)initWithDelegate:(__weak id)delegate +- (instancetype)initWithDelegate:(__weak id)delegate captureSession:(AVCaptureSession *)captureSession { if (self = [super initWithDelegate:delegate]) { // Create the capture session and all relevant inputs and outputs. We need @@ -110,9 +111,9 @@ const int64_t kNanosecondsPerSecond = 1000000000; } - (void)dealloc { - NSAssert( - !_willBeRunning, - @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?"); + NSAssert(!_willBeRunning, + @"Session was still running in RTC_OBJC_TYPE(RTCCameraVideoCapturer) dealloc. Forgot to " + @"call stopCapture?"); [[NSNotificationCenter defaultCenter] removeObserver:self]; } @@ -154,7 +155,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; fps:(NSInteger)fps completionHandler:(nullable void (^)(NSError *))completionHandler { _willBeRunning = YES; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps); @@ -196,7 +197,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { _willBeRunning = NO; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ RTCLogInfo("Stop"); @@ -225,10 +226,10 @@ const int64_t kNanosecondsPerSecond = 1000000000; #if TARGET_OS_IPHONE - (void)deviceOrientationDidChange:(NSNotification *)notification { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - [self updateOrientation]; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + [self updateOrientation]; + }]; } #endif @@ -287,12 +288,14 @@ const int64_t kNanosecondsPerSecond = 1000000000; _rotation = RTCVideoRotation_0; #endif - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * kNanosecondsPerSecond; - RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer - rotation:_rotation - timeStampNs:timeStampNs]; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:_rotation + timeStampNs:timeStampNs]; [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; } @@ -343,29 +346,29 @@ const int64_t kNanosecondsPerSecond = 1000000000; NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey]; RTCLogError(@"Capture session runtime error: %@", error); - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ #if TARGET_OS_IPHONE - if (error.code == AVErrorMediaServicesWereReset) { - [self handleNonFatalError]; - } else { - [self handleFatalError]; - } + if (error.code == AVErrorMediaServicesWereReset) { + [self handleNonFatalError]; + } else { + [self handleFatalError]; + } #else - [self handleFatalError]; + [self handleFatalError]; #endif - }]; + }]; } - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { RTCLog(@"Capture session started."); - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - // If we successfully restarted after an unknown error, - // allow future retries on fatal errors. - self.hasRetriedOnFatalError = NO; - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + // If we successfully restarted after an unknown + // error, allow future retries on fatal errors. + self.hasRetriedOnFatalError = NO; + }]; } - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { @@ -373,7 +376,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; } - (void)handleFatalError { - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ if (!self.hasRetriedOnFatalError) { @@ -387,13 +390,13 @@ const int64_t kNanosecondsPerSecond = 1000000000; } - (void)handleNonFatalError { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - RTCLog(@"Restarting capture session after error."); - if (self.isRunning) { - [self.captureSession startRunning]; - } - }]; + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + RTCLog(@"Restarting capture session after error."); + if (self.isRunning) { + [self.captureSession startRunning]; + } + }]; } #if TARGET_OS_IPHONE @@ -401,13 +404,14 @@ const int64_t kNanosecondsPerSecond = 1000000000; #pragma mark - UIApplication notifications - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - if (self.isRunning && !self.captureSession.isRunning) { - RTCLog(@"Restarting capture session on active."); - [self.captureSession startRunning]; - } - }]; + [RTC_OBJC_TYPE(RTCDispatcher) + dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + if (self.isRunning && !self.captureSession.isRunning) { + RTCLog(@"Restarting capture session on active."); + [self.captureSession startRunning]; + } + }]; } #endif // TARGET_OS_IPHONE @@ -448,7 +452,8 @@ const int64_t kNanosecondsPerSecond = 1000000000; // `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the // device with the most efficient output format first. Find the first format that we support. - NSSet *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats]; + NSSet *supportedPixelFormats = + [RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats]; NSMutableOrderedSet *availablePixelFormats = [NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes]; [availablePixelFormats intersectSet:supportedPixelFormats]; @@ -465,7 +470,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; - (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format { FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription); - if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) { + if (![[RTC_OBJC_TYPE(RTCCVPixelBuffer) supportedPixelFormats] containsObject:@(mediaSubType)]) { mediaSubType = _preferredOutputPixelFormat; } @@ -479,7 +484,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; #pragma mark - Private, called inside capture queue - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; @@ -491,7 +496,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; } - (void)reconfigureCaptureSessionInput { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; AVCaptureDeviceInput *input = @@ -513,7 +518,7 @@ const int64_t kNanosecondsPerSecond = 1000000000; } - (void)updateOrientation { - NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession], + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"updateOrientation must be called on the capture queue."); #if TARGET_OS_IPHONE _orientation = [UIDevice currentDevice].orientation; diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.h b/sdk/objc/components/capturer/RTCFileVideoCapturer.h index 0782588d9c..19262c64cf 100644 --- a/sdk/objc/components/capturer/RTCFileVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.h @@ -27,7 +27,7 @@ typedef void (^RTCFileVideoCapturerErrorBlock)(NSError *error); RTC_OBJC_EXPORT NS_CLASS_AVAILABLE_IOS(10) -@interface RTCFileVideoCapturer : RTCVideoCapturer +@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) /** * Starts asynchronous capture of frames from video file. diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/sdk/objc/components/capturer/RTCFileVideoCapturer.m index 2c82ba1ccf..4c39ccda3a 100644 --- a/sdk/objc/components/capturer/RTCFileVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.m @@ -15,7 +15,8 @@ #import "components/video_frame_buffer/RTCCVPixelBuffer.h" #include "rtc_base/system/gcd_helpers.h" -NSString *const kRTCFileVideoCapturerErrorDomain = @"org.webrtc.RTCFileVideoCapturer"; +NSString *const kRTCFileVideoCapturerErrorDomain = + @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)"; typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) { RTCFileVideoCapturerErrorCode_CapturerRunning = 2000, @@ -28,12 +29,12 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) { RTCFileVideoCapturerStatusStopped }; -@interface RTCFileVideoCapturer () -@property(nonatomic, assign) CMTime lastPresentationTime; +@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) +() @property(nonatomic, assign) CMTime lastPresentationTime; @property(nonatomic, strong) NSURL *fileURL; @end -@implementation RTCFileVideoCapturer { +@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { AVAssetReader *_reader; AVAssetReaderTrackOutput *_outTrack; RTCFileVideoCapturerStatus _status; @@ -182,11 +183,14 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) { return; } - RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; NSTimeInterval timeStampSeconds = CACurrentMediaTime(); int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); - RTCVideoFrame *videoFrame = - [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs]; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:0 + timeStampNs:timeStampNs]; CFRelease(sampleBuffer); dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index 6cd7ff3055..f4c76fa313 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -97,7 +97,7 @@ static NSString *const shaderSource = MTL_STRINGIFY( cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { *width = frame.width; *height = frame.height; *cropWidth = frame.width; @@ -106,7 +106,7 @@ static NSString *const shaderSource = MTL_STRINGIFY( *cropY = 0; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { if (![super setupTexturesForFrame:frame]) { return NO; } @@ -116,7 +116,7 @@ static NSString *const shaderSource = MTL_STRINGIFY( return NO; } - id buffer = [frame.buffer toI420]; + id buffer = [frame.buffer toI420]; // Luma (y) texture. if (!_descriptor || _width != frame.width || _height != frame.height) { diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index 7b615396d0..f70e2ad5ee 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -15,9 +15,9 @@ NS_AVAILABLE_MAC(10.11) RTC_OBJC_EXPORT -@interface RTCMTLNSVideoView : NSView +@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; + (BOOL)isMetalAvailable; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m index ac5294e4c0..625fb1caa7 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m @@ -17,13 +17,13 @@ #import "RTCMTLI420Renderer.h" -@interface RTCMTLNSVideoView () -@property(nonatomic) id renderer; +@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) +() @property(nonatomic) id renderer; @property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTCVideoFrame *videoFrame; +@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @end -@implementation RTCMTLNSVideoView { +@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { id _renderer; } @@ -102,7 +102,7 @@ - (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - (void)setSize:(CGSize)size { _metalView.drawableSize = size; @@ -112,7 +112,7 @@ [_metalView draw]; } -- (void)renderFrame:(nullable RTCVideoFrame *)frame { +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { if (frame == nil) { return; } diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 98835cb518..7b037c6dbc 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -95,8 +95,8 @@ static NSString *const shaderSource = MTL_STRINGIFY( cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { - RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); *cropWidth = pixelBuffer.cropWidth; @@ -105,12 +105,12 @@ static NSString *const shaderSource = MTL_STRINGIFY( *cropY = pixelBuffer.cropY; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]); +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); if (![super setupTexturesForFrame:frame]) { return NO; } - CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer; + CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer; id lumaTexture = nil; id chromaTexture = nil; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index eb4c2ba106..c6adcd0fb5 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -93,8 +93,8 @@ static NSString *const shaderSource = MTL_STRINGIFY( cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { - RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *pixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; *width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer); *height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer); *cropWidth = pixelBuffer.cropWidth; @@ -103,12 +103,12 @@ static NSString *const shaderSource = MTL_STRINGIFY( *cropY = pixelBuffer.cropY; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { - RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]); +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + RTC_DCHECK([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); if (![super setupTexturesForFrame:frame]) { return NO; } - CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer; + CVPixelBufferRef pixelBuffer = ((RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer).pixelBuffer; id gpuTexture = nil; CVMetalTextureRef textureOut = nullptr; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index f442886b79..916d4d4430 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN @interface RTCMTLRenderer (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame; +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; - (void)uploadTexturesToRenderEncoder:(id)renderEncoder; - (void)getWidth:(nonnull int *)width height:(nonnull int *)height @@ -27,7 +27,7 @@ NS_ASSUME_NONNULL_BEGIN cropHeight:(nonnull int *)cropHeight cropX:(nonnull int *)cropX cropY:(nonnull int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame; + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index 9c1f3719b3..aa31545973 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -28,7 +28,7 @@ NS_ASSUME_NONNULL_BEGIN * * @param frame The frame to be rendered. */ -- (void)drawFrame:(RTCVideoFrame *)frame; +- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; /** * Sets the provided view as rendering destination if possible. diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 63cf225bac..e8d161330f 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -167,11 +167,11 @@ static const NSInteger kMaxInflightBuffers = 1; cropHeight:(int *)cropHeight cropX:(int *)cropX cropY:(int *)cropY - ofFrame:(nonnull RTCVideoFrame *)frame { + ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTC_NOTREACHED() << "Virtual method not implemented in subclass."; } -- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame { +- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { // Apply rotation override if set. RTCVideoRotation rotation; NSValue *rotationOverride = self.rotationOverride; @@ -311,7 +311,7 @@ static const NSInteger kMaxInflightBuffers = 1; #pragma mark - RTCMTLRenderer -- (void)drawFrame:(RTCVideoFrame *)frame { +- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { @autoreleasepool { // Wait until the inflight (curently sent to GPU) command buffer // has completed the GPU work. diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index 36cb144a13..5678112ade 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -27,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN NS_CLASS_AVAILABLE_IOS(9) RTC_OBJC_EXPORT -@interface RTCMTLVideoView : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; @property(nonatomic) UIViewContentMode videoContentMode; diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index c9a622e484..f5be7c061c 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -29,17 +29,17 @@ #define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") #define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") -@interface RTCMTLVideoView () -@property(nonatomic) RTCMTLI420Renderer *rendererI420; +@interface RTC_OBJC_TYPE (RTCMTLVideoView) +() @property(nonatomic) RTCMTLI420Renderer *rendererI420; @property(nonatomic) RTCMTLNV12Renderer *rendererNV12; @property(nonatomic) RTCMTLRGBRenderer *rendererRGB; @property(nonatomic) MTKView *metalView; -@property(atomic) RTCVideoFrame *videoFrame; +@property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @property(nonatomic) int64_t lastFrameTimeNs; @end -@implementation RTCMTLVideoView +@implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize delegate = _delegate; @synthesize rendererI420 = _rendererI420; @@ -110,9 +110,10 @@ } - (void)configure { - NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device"); + NSAssert([RTC_OBJC_TYPE(RTCMTLVideoView) isMetalAvailable], + @"Metal not availiable on this device"); - self.metalView = [RTCMTLVideoView createMetalView:self.bounds]; + self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; self.metalView.contentMode = UIViewContentModeScaleAspectFill; [self addSubview:self.metalView]; @@ -140,7 +141,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance."); - RTCVideoFrame *videoFrame = self.videoFrame; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = self.videoFrame; // Skip rendering if we've already rendered this frame. if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) { return; @@ -151,12 +152,12 @@ } RTCMTLRenderer *renderer; - if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { - RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer; + if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) { if (!self.rendererRGB) { - self.rendererRGB = [RTCMTLVideoView createRGBRenderer]; + self.rendererRGB = [RTC_OBJC_TYPE(RTCMTLVideoView) createRGBRenderer]; if (![self.rendererRGB addRenderingDestination:self.metalView]) { self.rendererRGB = nil; RTCLogError(@"Failed to create RGB renderer"); @@ -166,7 +167,7 @@ renderer = self.rendererRGB; } else { if (!self.rendererNV12) { - self.rendererNV12 = [RTCMTLVideoView createNV12Renderer]; + self.rendererNV12 = [RTC_OBJC_TYPE(RTCMTLVideoView) createNV12Renderer]; if (![self.rendererNV12 addRenderingDestination:self.metalView]) { self.rendererNV12 = nil; RTCLogError(@"Failed to create NV12 renderer"); @@ -177,7 +178,7 @@ } } else { if (!self.rendererI420) { - self.rendererI420 = [RTCMTLVideoView createI420Renderer]; + self.rendererI420 = [RTC_OBJC_TYPE(RTCMTLVideoView) createI420Renderer]; if (![self.rendererI420 addRenderingDestination:self.metalView]) { self.rendererI420 = nil; RTCLogError(@"Failed to create I420 renderer"); @@ -236,12 +237,12 @@ } } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - (void)setSize:(CGSize)size { - __weak RTCMTLVideoView *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCMTLVideoView) *weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ - RTCMTLVideoView *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCMTLVideoView) *strongSelf = weakSelf; strongSelf.videoFrameSize = size; CGSize drawableSize = [strongSelf drawableSize]; @@ -252,7 +253,7 @@ }); } -- (void)renderFrame:(nullable RTCVideoFrame *)frame { +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { if (!self.isEnabled) { return; } diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index 034a22bdd0..71a073ab21 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -12,11 +12,11 @@ NS_ASSUME_NONNULL_BEGIN -/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView and - * RTCEAGLVideoView if no external shader is specified. This shader will render +/** Default RTCVideoViewShading that will be used in RTCNSGLVideoView + * and RTCEAGLVideoView if no external shader is specified. This shader will render * the video in a rectangle without any color or geometric transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTCDefaultShader : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h index 73cd3a1a26..24b26cd602 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.h @@ -17,23 +17,25 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCEAGLVideoView; +@class RTC_OBJC_TYPE(RTCEAGLVideoView); /** - * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames in its - * bounds using OpenGLES 2.0 or OpenGLES 3.0. + * RTCEAGLVideoView is an RTCVideoRenderer which renders video frames + * in its bounds using OpenGLES 2.0 or OpenGLES 3.0. */ RTC_OBJC_EXPORT NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.") -@interface RTCEAGLVideoView : UIView +@interface RTC_OBJC_TYPE (RTCEAGLVideoView) : UIView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; - (instancetype)initWithFrame:(CGRect)frame - shader:(id)shader NS_DESIGNATED_INITIALIZER; + shader:(id)shader + NS_DESIGNATED_INITIALIZER; - (instancetype)initWithCoder:(NSCoder *)aDecoder - shader:(id)shader NS_DESIGNATED_INITIALIZER; + shader:(id)shader + NS_DESIGNATED_INITIALIZER; /** @abstract Wrapped RTCVideoRotation, or nil. */ diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index 6a01d48f32..a3435a7815 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -21,7 +21,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -// RTCEAGLVideoView wraps a GLKView which is setup with +// RTC_OBJC_TYPE(RTCEAGLVideoView) wraps a GLKView which is setup with // enableSetNeedsDisplay = NO for the purpose of gaining control of // exactly when to call -[GLKView display]. This need for extra // control is required to avoid triggering method calls on GLKView @@ -30,23 +30,24 @@ // error GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT. -[GLKView display] is // the method that will trigger the binding of the render // buffer. Because the standard behaviour of -[UIView setNeedsDisplay] -// is disabled for the reasons above, the RTCEAGLVideoView maintains +// is disabled for the reasons above, the RTC_OBJC_TYPE(RTCEAGLVideoView) maintains // its own |isDirty| flag. -@interface RTCEAGLVideoView () -// |videoFrame| is set when we receive a frame from a worker thread and is read -// from the display link callback so atomicity is required. -@property(atomic, strong) RTCVideoFrame *videoFrame; +@interface RTC_OBJC_TYPE (RTCEAGLVideoView) +() + // |videoFrame| is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic, readonly) GLKView *glkView; @end -@implementation RTCEAGLVideoView { +@implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { RTCDisplayLinkTimer *_timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; - id _shader; + id _shader; RTCNV12TextureCache *_nv12TextureCache; RTCI420TextureCache *_i420TextureCache; // As timestamps should be unique between frames, will store last @@ -67,7 +68,7 @@ return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; } -- (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { +- (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { if (self = [super initWithFrame:frame]) { _shader = shader; if (![self configure]) { @@ -77,7 +78,8 @@ return self; } -- (instancetype)initWithCoder:(NSCoder *)aDecoder shader:(id)shader { +- (instancetype)initWithCoder:(NSCoder *)aDecoder + shader:(id)shader { if (self = [super initWithCoder:aDecoder]) { _shader = shader; if (![self configure]) { @@ -127,11 +129,11 @@ // Frames are received on a separate thread, so we poll for current frame // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. - __weak RTCEAGLVideoView *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ - RTCEAGLVideoView *strongSelf = weakSelf; - [strongSelf displayLinkTimerDidFire]; - }]; + RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; + [strongSelf displayLinkTimerDidFire]; + }]; if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive) { [self setupGL]; } @@ -182,7 +184,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { // The renderer will draw the frame to the framebuffer corresponding to the // one used by |view|. - RTCVideoFrame *frame = self.videoFrame; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; if (!frame || frame.timeStampNs == _lastDrawnFrameTimeStampNs) { return; } @@ -192,7 +194,7 @@ } [self ensureGLContext]; glClear(GL_COLOR_BUFFER_BIT); - if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; } @@ -223,18 +225,18 @@ } } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) // These methods may be called on non-main thread. - (void)setSize:(CGSize)size { - __weak RTCEAGLVideoView *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; dispatch_async(dispatch_get_main_queue(), ^{ - RTCEAGLVideoView *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf.delegate videoView:strongSelf didChangeVideoSize:size]; }); } -- (void)renderFrame:(RTCVideoFrame *)frame { +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { self.videoFrame = frame; } diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 07172e713a..9fdcc5a695 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -20,6 +20,6 @@ - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithContext:(GlContextType *)context NS_DESIGNATED_INITIALIZER; -- (void)uploadFrameToTextures:(RTCVideoFrame *)frame; +- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index 865f3a258a..5dccd4bf6a 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -123,10 +123,10 @@ static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; uploadPlane); } -- (void)uploadFrameToTextures:(RTCVideoFrame *)frame { +- (void)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { _currentTextureSet = (_currentTextureSet + 1) % kNumTextureSets; - id buffer = [frame.buffer toI420]; + id buffer = [frame.buffer toI420]; const int chromaWidth = buffer.chromaWidth; const int chromaHeight = buffer.chromaHeight; diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h index 2540f38154..c9ee986f88 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.h @@ -19,20 +19,21 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCNSGLVideoView; +@class RTC_OBJC_TYPE(RTCNSGLVideoView); RTC_OBJC_EXPORT -@protocol RTCNSGLVideoViewDelegate -@end +@protocol RTC_OBJC_TYPE +(RTCNSGLVideoViewDelegate) @end RTC_OBJC_EXPORT -@interface RTCNSGLVideoView : NSOpenGLView +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) : NSOpenGLView -@property(nonatomic, weak) id delegate; +@property(nonatomic, weak) id delegate; - (instancetype)initWithFrame:(NSRect)frameRect pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader NS_DESIGNATED_INITIALIZER; + shader:(id)shader + NS_DESIGNATED_INITIALIZER; @end diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m index 714cae79c6..de54e36711 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -23,10 +23,12 @@ #import "base/RTCLogging.h" #import "base/RTCVideoFrame.h" -@interface RTCNSGLVideoView () -// |videoFrame| is set when we receive a frame from a worker thread and is read -// from the display link callback so atomicity is required. -@property(atomic, strong) RTCVideoFrame *videoFrame; +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) +() + // |videoFrame| is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * + videoFrame; @property(atomic, strong) RTCI420TextureCache *i420TextureCache; - (void)drawFrame; @@ -38,15 +40,16 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext) { - RTCNSGLVideoView *view = (__bridge RTCNSGLVideoView *)displayLinkContext; + RTC_OBJC_TYPE(RTCNSGLVideoView) *view = + (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext; [view drawFrame]; return kCVReturnSuccess; } -@implementation RTCNSGLVideoView { +@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { CVDisplayLinkRef _displayLink; - RTCVideoFrame *_lastDrawnFrame; - id _shader; + RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame; + id _shader; } @synthesize delegate = _delegate; @@ -59,7 +62,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, - (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format - shader:(id)shader { + shader:(id)shader { if (self = [super initWithFrame:frame pixelFormat:format]) { _shader = shader; } @@ -105,7 +108,7 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, [super clearGLContext]; } -#pragma mark - RTCVideoRenderer +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) // These methods may be called on non-main thread. - (void)setSize:(CGSize)size { @@ -114,14 +117,14 @@ static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, }); } -- (void)renderFrame:(RTCVideoFrame *)frame { +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { self.videoFrame = frame; } #pragma mark - Private - (void)drawFrame { - RTCVideoFrame *frame = self.videoFrame; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; if (!frame || frame == _lastDrawnFrame) { return; } diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index 9cba823271..f202b836b5 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -10,7 +10,9 @@ #import -@class RTCVideoFrame; +#import "base/RTCMacros.h" + +@class RTC_OBJC_TYPE(RTCVideoFrame); NS_ASSUME_NONNULL_BEGIN @@ -22,7 +24,7 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)init NS_UNAVAILABLE; - (nullable instancetype)initWithContext:(EAGLContext *)context NS_DESIGNATED_INITIALIZER; -- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame; +- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; - (void)releaseTextures; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index aab62d4363..a520ac45b4 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -76,10 +76,10 @@ return YES; } -- (BOOL)uploadFrameToTextures:(RTCVideoFrame *)frame { - NSAssert([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]], +- (BOOL)uploadFrameToTextures:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + NSAssert([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]], @"frame must be CVPixelBuffer backed"); - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; CVPixelBufferRef pixelBuffer = rtcPixelBuffer.pixelBuffer; return [self loadTexture:&_yTextureRef pixelBuffer:pixelBuffer diff --git a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h index 6876cc3ab5..9df30a8fa0 100644 --- a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h +++ b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h @@ -15,19 +15,17 @@ NS_ASSUME_NONNULL_BEGIN /** - * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES) shaders used in - * rendering for the RTCEAGLVideoView/RTCNSGLVideoView. + * RTCVideoViewShading provides a way for apps to customize the OpenGL(ES shaders + * used in rendering for the RTCEAGLVideoView/RTCNSGLVideoView. */ RTC_OBJC_EXPORT -@protocol RTCVideoViewShading +@protocol RTC_OBJC_TYPE +(RTCVideoViewShading) -/** Callback for I420 frames. Each plane is given as a texture. */ -- (void)applyShadingForFrameWithWidth:(int)width - height:(int)height - rotation:(RTCVideoRotation)rotation - yPlane:(GLuint)yPlane - uPlane:(GLuint)uPlane - vPlane:(GLuint)vPlane; + /** Callback for I420 frames. Each plane is given as a texture. */ + - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation + : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane + : (GLuint)vPlane; /** Callback for NV12 frames. Each plane is given as a texture. */ - (void)applyShadingForFrameWithWidth:(int)width diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h index f67fa94ca8..a0cd8515d1 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h @@ -15,9 +15,10 @@ NS_ASSUME_NONNULL_BEGIN /* Interfaces for converting to/from internal C++ formats. */ -@interface RTCCodecSpecificInfoH264 () +@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) +() -- (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo; + - (webrtc::CodecSpecificInfo)nativeCodecSpecificInfo; @end diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h index ece9570a13..ae3003a115 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h @@ -20,7 +20,7 @@ typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) { }; RTC_OBJC_EXPORT -@interface RTCCodecSpecificInfoH264 : NSObject +@interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject @property(nonatomic, assign) RTCH264PacketizationMode packetizationMode; diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm index 57f2411e3b..e38ed307b3 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.mm @@ -13,7 +13,7 @@ #import "RTCH264ProfileLevelId.h" // H264 specific settings. -@implementation RTCCodecSpecificInfoH264 +@implementation RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) @synthesize packetizationMode = _packetizationMode; diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h index 7ca9463a59..de5a9c4684 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h +++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h @@ -16,10 +16,11 @@ NS_ASSUME_NONNULL_BEGIN /** This decoder factory include support for all codecs bundled with WebRTC. If using custom - * codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory. + * codecs, create custom implementations of RTCVideoEncoderFactory and + * RTCVideoDecoderFactory. */ RTC_OBJC_EXPORT -@interface RTCDefaultVideoDecoderFactory : NSObject +@interface RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) : NSObject @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m index 44445f4b13..4046cfedbe 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m @@ -19,31 +19,33 @@ #import "api/video_codec/RTCVideoDecoderVP9.h" #endif -@implementation RTCDefaultVideoDecoderFactory +@implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) -- (NSArray *)supportedCodecs { +- (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name - parameters:constrainedHighParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name - parameters:constrainedBaselineParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedBaselineParams]; - RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name]; #if defined(RTC_ENABLE_VP9) - RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]; #endif return @[ @@ -56,14 +58,14 @@ ]; } -- (id)createDecoder:(RTCVideoCodecInfo *)info { +- (id)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { - return [[RTCVideoDecoderH264 alloc] init]; + return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init]; } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { - return [RTCVideoDecoderVP8 vp8Decoder]; + return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder]; #if defined(RTC_ENABLE_VP9) } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) { - return [RTCVideoDecoderVP9 vp9Decoder]; + return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder]; #endif } diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h index c45e54362b..92ab40c95b 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h @@ -16,14 +16,15 @@ NS_ASSUME_NONNULL_BEGIN /** This encoder factory include support for all codecs bundled with WebRTC. If using custom - * codecs, create custom implementations of RTCVideoEncoderFactory and RTCVideoDecoderFactory. + * codecs, create custom implementations of RTCVideoEncoderFactory and + * RTCVideoDecoderFactory. */ RTC_OBJC_EXPORT -@interface RTCDefaultVideoEncoderFactory : NSObject +@interface RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) : NSObject -@property(nonatomic, retain) RTCVideoCodecInfo *preferredCodec; +@property(nonatomic, retain) RTC_OBJC_TYPE(RTCVideoCodecInfo) *preferredCodec; -+ (NSArray *)supportedCodecs; ++ (NSArray *)supportedCodecs; @end diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index b72296b64f..35a1407f38 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -19,33 +19,35 @@ #import "api/video_codec/RTCVideoEncoderVP9.h" #endif -@implementation RTCDefaultVideoEncoderFactory +@implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) @synthesize preferredCodec; -+ (NSArray *)supportedCodecs { ++ (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name - parameters:constrainedHighParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name - parameters:constrainedBaselineParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + parameters:constrainedBaselineParams]; - RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name]; #if defined(RTC_ENABLE_VP9) - RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp9Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]; #endif return @[ @@ -58,24 +60,25 @@ ]; } -- (id)createEncoder:(RTCVideoCodecInfo *)info { +- (id)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { - return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info]; + return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info]; } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { - return [RTCVideoEncoderVP8 vp8Encoder]; + return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder]; #if defined(RTC_ENABLE_VP9) } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) { - return [RTCVideoEncoderVP9 vp9Encoder]; + return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder]; #endif } return nil; } -- (NSArray *)supportedCodecs { - NSMutableArray *codecs = [[[self class] supportedCodecs] mutableCopy]; +- (NSArray *)supportedCodecs { + NSMutableArray *codecs = + [[[self class] supportedCodecs] mutableCopy]; - NSMutableArray *orderedCodecs = [NSMutableArray array]; + NSMutableArray *orderedCodecs = [NSMutableArray array]; NSUInteger index = [codecs indexOfObject:self.preferredCodec]; if (index != NSNotFound) { [orderedCodecs addObject:[codecs objectAtIndex:index]]; diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h index 56b353215a..dac7bb5610 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h @@ -48,7 +48,7 @@ typedef NS_ENUM(NSUInteger, RTCH264Level) { }; RTC_OBJC_EXPORT -@interface RTCH264ProfileLevelId : NSObject +@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject @property(nonatomic, readonly) RTCH264Profile profile; @property(nonatomic, readonly) RTCH264Level level; diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm index afd9fcb44c..b985d9df02 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm @@ -75,15 +75,16 @@ NSString *MaxSupportedProfileLevelConstrainedHigh() { } // namespace -@interface RTCH264ProfileLevelId () +@interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) +() -@property(nonatomic, assign) RTCH264Profile profile; + @property(nonatomic, assign) RTCH264Profile profile; @property(nonatomic, assign) RTCH264Level level; @property(nonatomic, strong) NSString *hexString; @end -@implementation RTCH264ProfileLevelId +@implementation RTC_OBJC_TYPE (RTCH264ProfileLevelId) @synthesize profile = _profile; @synthesize level = _level; diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h index 4fcff1dff7..88bacbbdfe 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h @@ -14,5 +14,5 @@ #import "RTCVideoDecoderFactory.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderFactoryH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) : NSObject @end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m index bf399c6e7b..bdae19d687 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m @@ -13,10 +13,10 @@ #import "RTCH264ProfileLevelId.h" #import "RTCVideoDecoderH264.h" -@implementation RTCVideoDecoderFactoryH264 +@implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) -- (NSArray *)supportedCodecs { - NSMutableArray *codecs = [NSMutableArray array]; +- (NSArray *)supportedCodecs { + NSMutableArray *codecs = [NSMutableArray array]; NSString *codecName = kRTCVideoCodecH264Name; NSDictionary *constrainedHighParams = @{ @@ -24,8 +24,9 @@ @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedHighParams]; [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ @@ -33,15 +34,16 @@ @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedBaselineParams]; [codecs addObject:constrainedBaselineInfo]; return [codecs copy]; } -- (id)createDecoder:(RTCVideoCodecInfo *)info { - return [[RTCVideoDecoderH264 alloc] init]; +- (id)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init]; } @end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.h b/sdk/objc/components/video_codec/RTCVideoDecoderH264.h index b860276206..a12e4212a7 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.h +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.h @@ -14,5 +14,5 @@ #import "RTCVideoDecoder.h" RTC_OBJC_EXPORT -@interface RTCVideoDecoderH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) : NSObject @end diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm index 04bdabf643..52edefe053 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm @@ -37,8 +37,8 @@ struct RTCFrameDecodeParams { int64_t timestamp; }; -@interface RTCVideoDecoderH264 () -- (void)setError:(OSStatus)error; +@interface RTC_OBJC_TYPE (RTCVideoDecoderH264) +() - (void)setError : (OSStatus)error; @end // This is the callback function that VideoToolbox calls when decode is @@ -53,23 +53,25 @@ void decompressionOutputCallback(void *decoderRef, std::unique_ptr decodeParams( reinterpret_cast(params)); if (status != noErr) { - RTCVideoDecoderH264 *decoder = (__bridge RTCVideoDecoderH264 *)decoderRef; + RTC_OBJC_TYPE(RTCVideoDecoderH264) *decoder = + (__bridge RTC_OBJC_TYPE(RTCVideoDecoderH264) *)decoderRef; [decoder setError:status]; RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status; return; } // TODO(tkchin): Handle CVO properly. - RTCCVPixelBuffer *frameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:imageBuffer]; - RTCVideoFrame *decodedFrame = - [[RTCVideoFrame alloc] initWithBuffer:frameBuffer - rotation:RTCVideoRotation_0 - timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *frameBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] + initWithBuffer:frameBuffer + rotation:RTCVideoRotation_0 + timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec]; decodedFrame.timeStamp = decodeParams->timestamp; decodeParams->callback(decodedFrame); } // Decoder. -@implementation RTCVideoDecoderH264 { +@implementation RTC_OBJC_TYPE (RTCVideoDecoderH264) { CMVideoFormatDescriptionRef _videoFormat; CMMemoryPoolRef _memoryPool; VTDecompressionSessionRef _decompressionSession; @@ -96,9 +98,9 @@ void decompressionOutputCallback(void *decoderRef, return WEBRTC_VIDEO_CODEC_OK; } -- (NSInteger)decode:(RTCEncodedImage *)inputImage +- (NSInteger)decode:(RTC_OBJC_TYPE(RTCEncodedImage) *)inputImage missingFrames:(BOOL)missingFrames - codecSpecificInfo:(nullable id)info + codecSpecificInfo:(nullable id)info renderTimeMs:(int64_t)renderTimeMs { RTC_DCHECK(inputImage.buffer); diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h index c64405e4da..45fc4be2ea 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h @@ -14,5 +14,5 @@ #import "RTCVideoEncoderFactory.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderFactoryH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) : NSObject @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m index bbc15e9d5d..9843849307 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m @@ -13,10 +13,10 @@ #import "RTCH264ProfileLevelId.h" #import "RTCVideoEncoderH264.h" -@implementation RTCVideoEncoderFactoryH264 +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) -- (NSArray *)supportedCodecs { - NSMutableArray *codecs = [NSMutableArray array]; +- (NSArray *)supportedCodecs { + NSMutableArray *codecs = [NSMutableArray array]; NSString *codecName = kRTCVideoCodecH264Name; NSDictionary *constrainedHighParams = @{ @@ -24,8 +24,9 @@ @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedHighInfo = - [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedHighParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedHighParams]; [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ @@ -33,15 +34,16 @@ @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; - RTCVideoCodecInfo *constrainedBaselineInfo = - [[RTCVideoCodecInfo alloc] initWithName:codecName parameters:constrainedBaselineParams]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:codecName + parameters:constrainedBaselineParams]; [codecs addObject:constrainedBaselineInfo]; return [codecs copy]; } -- (id)createEncoder:(RTCVideoCodecInfo *)info { - return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info]; +- (id)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info]; } @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.h b/sdk/objc/components/video_codec/RTCVideoEncoderH264.h index a9c05580a4..9f4f4c7c8d 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.h +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.h @@ -15,8 +15,8 @@ #import "RTCVideoEncoder.h" RTC_OBJC_EXPORT -@interface RTCVideoEncoderH264 : NSObject +@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) : NSObject -- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo; +- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo; @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 5b90922fca..113806489c 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -40,17 +40,14 @@ #include "sdk/objc/components/video_codec/nalu_rewriter.h" #include "third_party/libyuv/include/libyuv/convert_from.h" -@interface RTCVideoEncoderH264 () +@interface RTC_OBJC_TYPE (RTCVideoEncoderH264) +() -- (void)frameWasEncoded:(OSStatus)status - flags:(VTEncodeInfoFlags)infoFlags - sampleBuffer:(CMSampleBufferRef)sampleBuffer - codecSpecificInfo:(id)codecSpecificInfo - width:(int32_t)width - height:(int32_t)height - renderTimeMs:(int64_t)renderTimeMs - timestamp:(uint32_t)timestamp - rotation:(RTCVideoRotation)rotation; + - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer + : (CMSampleBufferRef)sampleBuffer codecSpecificInfo + : (id)codecSpecificInfo width : (int32_t)width height + : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation + : (RTCVideoRotation)rotation; @end @@ -70,8 +67,8 @@ const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { - RTCFrameEncodeParams(RTCVideoEncoderH264 *e, - RTCCodecSpecificInfoH264 *csi, + RTCFrameEncodeParams(RTC_OBJC_TYPE(RTCVideoEncoderH264) * e, + RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * csi, int32_t w, int32_t h, int64_t rtms, @@ -81,12 +78,12 @@ struct RTCFrameEncodeParams { if (csi) { codecSpecificInfo = csi; } else { - codecSpecificInfo = [[RTCCodecSpecificInfoH264 alloc] init]; + codecSpecificInfo = [[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) alloc] init]; } } - RTCVideoEncoderH264 *encoder; - RTCCodecSpecificInfoH264 *codecSpecificInfo; + RTC_OBJC_TYPE(RTCVideoEncoderH264) * encoder; + RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) * codecSpecificInfo; int32_t width; int32_t height; int64_t render_time_ms; @@ -97,7 +94,8 @@ struct RTCFrameEncodeParams { // We receive I420Frames as input, but we need to feed CVPixelBuffers into the // encoder. This performs the copy and format conversion. // TODO(tkchin): See if encoder will accept i420 frames and compare performance. -bool CopyVideoFrameToNV12PixelBuffer(id frameBuffer, CVPixelBufferRef pixelBuffer) { +bool CopyVideoFrameToNV12PixelBuffer(id frameBuffer, + CVPixelBufferRef pixelBuffer) { RTC_DCHECK(pixelBuffer); RTC_DCHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer), kNV12PixelFormat); RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixelBuffer, 0), frameBuffer.height); @@ -313,8 +311,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id } } // namespace -@implementation RTCVideoEncoderH264 { - RTCVideoCodecInfo *_codecInfo; +@implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { + RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; uint32_t _encoderBitrateBps; @@ -340,7 +338,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id // drastically reduced bitrate, so we want to avoid that. In steady state // conditions, 0.95 seems to give us better overall bitrate over long periods // of time. -- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo { +- (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); @@ -358,7 +356,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id [self destroyCompressionSession]; } -- (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings +- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores { RTC_DCHECK(settings); RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]); @@ -388,8 +386,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat]; } -- (NSInteger)encode:(RTCVideoFrame *)frame - codecSpecificInfo:(nullable id)codecSpecificInfo +- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame + codecSpecificInfo:(nullable id)codecSpecificInfo frameTypes:(NSArray *)frameTypes { RTC_DCHECK_EQ(frame.width, _width); RTC_DCHECK_EQ(frame.height, _height); @@ -404,9 +402,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id } CVPixelBufferRef pixelBuffer = nullptr; - if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { // Native frame buffer - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; if (![rtcPixelBuffer requiresCropping]) { // This pixel buffer might have a higher resolution than what the // compression session is configured to. The compression session can @@ -543,17 +542,18 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id return WEBRTC_VIDEO_CODEC_OK; } -- (OSType)pixelFormatOfFrame:(RTCVideoFrame *)frame { +- (OSType)pixelFormatOfFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { // Use NV12 for non-native frames. - if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; return CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer); } return kNV12PixelFormat; } -- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTCVideoFrame *)frame { +- (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { BOOL resetCompressionSession = NO; // If we're capturing native frames in another pixel format than the compression session is @@ -755,7 +755,7 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id - (void)frameWasEncoded:(OSStatus)status flags:(VTEncodeInfoFlags)infoFlags sampleBuffer:(CMSampleBufferRef)sampleBuffer - codecSpecificInfo:(id)codecSpecificInfo + codecSpecificInfo:(id)codecSpecificInfo width:(int32_t)width height:(int32_t)height renderTimeMs:(int64_t)renderTimeMs @@ -783,18 +783,19 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id } __block std::unique_ptr buffer = std::make_unique(); - RTCRtpFragmentationHeader *header; + RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * header; { std::unique_ptr header_cpp; bool result = H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get(), &header_cpp); - header = [[RTCRtpFragmentationHeader alloc] initWithNativeFragmentationHeader:header_cpp.get()]; + header = [[RTC_OBJC_TYPE(RTCRtpFragmentationHeader) alloc] + initWithNativeFragmentationHeader:header_cpp.get()]; if (!result) { return; } } - RTCEncodedImage *frame = [[RTCEncodedImage alloc] init]; + RTC_OBJC_TYPE(RTCEncodedImage) *frame = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init]; // This assumes ownership of `buffer` and is responsible for freeing it when done. frame.buffer = [[NSData alloc] initWithBytesNoCopy:buffer->data() length:buffer->size() @@ -825,9 +826,10 @@ NSUInteger GetMaxSampleRate(const webrtc::H264::ProfileLevelId &profile_level_id _bitrateAdjuster->Update(frame.buffer.length); } -- (nullable RTCVideoEncoderQpThresholds *)scalingSettings { - return [[RTCVideoEncoderQpThresholds alloc] initWithThresholdsLow:kLowH264QpThreshold - high:kHighH264QpThreshold]; +- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { + return [[RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) alloc] + initWithThresholdsLow:kLowH264QpThreshold + high:kHighH264QpThreshold]; } @end diff --git a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h index 432a382574..17eebd0aff 100644 --- a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h +++ b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN /** RTCVideoFrameBuffer containing a CVPixelBufferRef */ RTC_OBJC_EXPORT -@interface RTCCVPixelBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCCVPixelBuffer) : NSObject @property(nonatomic, readonly) CVPixelBufferRef pixelBuffer; @property(nonatomic, readonly) int cropX; diff --git a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm index 01b6405dc4..df8077b35f 100644 --- a/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm +++ b/sdk/objc/components/video_frame_buffer/RTCCVPixelBuffer.mm @@ -22,7 +22,7 @@ #import #endif -@implementation RTCCVPixelBuffer { +@implementation RTC_OBJC_TYPE (RTCCVPixelBuffer) { int _width; int _height; int _bufferWidth; @@ -152,13 +152,13 @@ return YES; } -- (id)toI420 { +- (id)toI420 { const OSType pixelFormat = CVPixelBufferGetPixelFormatType(_pixelBuffer); CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly); - RTCMutableI420Buffer* i420Buffer = - [[RTCMutableI420Buffer alloc] initWithWidth:[self width] height:[self height]]; + RTC_OBJC_TYPE(RTCMutableI420Buffer)* i420Buffer = + [[RTC_OBJC_TYPE(RTCMutableI420Buffer) alloc] initWithWidth:[self width] height:[self height]]; switch (pixelFormat) { case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: diff --git a/sdk/objc/helpers/RTCCameraPreviewView.h b/sdk/objc/helpers/RTCCameraPreviewView.h index 17f8d33d77..db9b15a45c 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.h +++ b/sdk/objc/helpers/RTCCameraPreviewView.h @@ -19,7 +19,7 @@ * AVCaptureSession. */ RTC_OBJC_EXPORT -@interface RTCCameraPreviewView : UIView +@interface RTC_OBJC_TYPE (RTCCameraPreviewView) : UIView /** The capture session being rendered in the view. Capture session * is assigned to AVCaptureVideoPreviewLayer async in the same diff --git a/sdk/objc/helpers/RTCCameraPreviewView.m b/sdk/objc/helpers/RTCCameraPreviewView.m index 57dadea5c7..12e87d8d64 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.m +++ b/sdk/objc/helpers/RTCCameraPreviewView.m @@ -15,7 +15,7 @@ #import "RTCDispatcher+Private.h" -@implementation RTCCameraPreviewView +@implementation RTC_OBJC_TYPE (RTCCameraPreviewView) @synthesize captureSession = _captureSession; @@ -48,15 +48,15 @@ return; } _captureSession = captureSession; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain block:^{ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer]; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ previewLayer.session = captureSession; - [RTCDispatcher + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain block:^{ [self setCorrectVideoOrientation]; diff --git a/sdk/objc/helpers/RTCDispatcher+Private.h b/sdk/objc/helpers/RTCDispatcher+Private.h index aa741f4db9..195c651790 100644 --- a/sdk/objc/helpers/RTCDispatcher+Private.h +++ b/sdk/objc/helpers/RTCDispatcher+Private.h @@ -10,8 +10,9 @@ #import "RTCDispatcher.h" -@interface RTCDispatcher () +@interface RTC_OBJC_TYPE (RTCDispatcher) +() -+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType; + + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.h b/sdk/objc/helpers/RTCDispatcher.h index 4f8359b32c..f8580f95fa 100644 --- a/sdk/objc/helpers/RTCDispatcher.h +++ b/sdk/objc/helpers/RTCDispatcher.h @@ -26,7 +26,7 @@ typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) { * shared dispatch queue. */ RTC_OBJC_EXPORT -@interface RTCDispatcher : NSObject +@interface RTC_OBJC_TYPE (RTCDispatcher) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/helpers/RTCDispatcher.m b/sdk/objc/helpers/RTCDispatcher.m index b9d64a4c54..2e83573adc 100644 --- a/sdk/objc/helpers/RTCDispatcher.m +++ b/sdk/objc/helpers/RTCDispatcher.m @@ -13,7 +13,7 @@ static dispatch_queue_t kAudioSessionQueue = nil; static dispatch_queue_t kCaptureSessionQueue = nil; -@implementation RTCDispatcher +@implementation RTC_OBJC_TYPE (RTCDispatcher) + (void)initialize { static dispatch_once_t onceToken; diff --git a/sdk/objc/native/api/video_capturer.h b/sdk/objc/native/api/video_capturer.h index 464d148bd1..9847d8148b 100644 --- a/sdk/objc/native/api/video_capturer.h +++ b/sdk/objc/native/api/video_capturer.h @@ -20,7 +20,7 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoCapturer( - RTCVideoCapturer* objc_video_capturer, + RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread* signaling_thread, rtc::Thread* worker_thread); diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index 26185509a7..6dd0edbcd9 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -17,7 +17,7 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoCapturer( - RTCVideoCapturer *objc_video_capturer, + RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; diff --git a/sdk/objc/native/api/video_decoder_factory.h b/sdk/objc/native/api/video_decoder_factory.h index 710bb6eba5..03d8af3cfe 100644 --- a/sdk/objc/native/api/video_decoder_factory.h +++ b/sdk/objc/native/api/video_decoder_factory.h @@ -20,7 +20,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoDecoderFactory( - id objc_video_decoder_factory); + id objc_video_decoder_factory); } // namespace webrtc diff --git a/sdk/objc/native/api/video_decoder_factory.mm b/sdk/objc/native/api/video_decoder_factory.mm index 8d0e4ab4cb..d418f2fe6f 100644 --- a/sdk/objc/native/api/video_decoder_factory.mm +++ b/sdk/objc/native/api/video_decoder_factory.mm @@ -17,7 +17,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoDecoderFactory( - id objc_video_decoder_factory) { + id objc_video_decoder_factory) { return std::make_unique(objc_video_decoder_factory); } diff --git a/sdk/objc/native/api/video_encoder_factory.h b/sdk/objc/native/api/video_encoder_factory.h index 8dab48c48d..6e551b288d 100644 --- a/sdk/objc/native/api/video_encoder_factory.h +++ b/sdk/objc/native/api/video_encoder_factory.h @@ -20,7 +20,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoEncoderFactory( - id objc_video_encoder_factory); + id objc_video_encoder_factory); } // namespace webrtc diff --git a/sdk/objc/native/api/video_encoder_factory.mm b/sdk/objc/native/api/video_encoder_factory.mm index b582deb108..6fa5563f75 100644 --- a/sdk/objc/native/api/video_encoder_factory.mm +++ b/sdk/objc/native/api/video_encoder_factory.mm @@ -17,7 +17,7 @@ namespace webrtc { std::unique_ptr ObjCToNativeVideoEncoderFactory( - id objc_video_encoder_factory) { + id objc_video_encoder_factory) { return std::make_unique(objc_video_encoder_factory); } diff --git a/sdk/objc/native/api/video_frame.h b/sdk/objc/native/api/video_frame.h index f8dd568b87..b4416ffabe 100644 --- a/sdk/objc/native/api/video_frame.h +++ b/sdk/objc/native/api/video_frame.h @@ -17,7 +17,7 @@ namespace webrtc { -RTCVideoFrame* NativeToObjCVideoFrame(const VideoFrame& frame); +RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame); } // namespace webrtc diff --git a/sdk/objc/native/api/video_frame.mm b/sdk/objc/native/api/video_frame.mm index 02dd830dce..b82994fd5f 100644 --- a/sdk/objc/native/api/video_frame.mm +++ b/sdk/objc/native/api/video_frame.mm @@ -14,7 +14,7 @@ namespace webrtc { -RTCVideoFrame* NativeToObjCVideoFrame(const VideoFrame& frame) { +RTC_OBJC_TYPE(RTCVideoFrame) * NativeToObjCVideoFrame(const VideoFrame& frame) { return ToObjCVideoFrame(frame); } diff --git a/sdk/objc/native/api/video_frame_buffer.h b/sdk/objc/native/api/video_frame_buffer.h index 54a73750f7..204d65d850 100644 --- a/sdk/objc/native/api/video_frame_buffer.h +++ b/sdk/objc/native/api/video_frame_buffer.h @@ -19,9 +19,9 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoFrameBuffer( - id objc_video_frame_buffer); + id objc_video_frame_buffer); -id NativeToObjCVideoFrameBuffer( +id NativeToObjCVideoFrameBuffer( const rtc::scoped_refptr& buffer); } // namespace webrtc diff --git a/sdk/objc/native/api/video_frame_buffer.mm b/sdk/objc/native/api/video_frame_buffer.mm index 2abda42871..6dc99756a6 100644 --- a/sdk/objc/native/api/video_frame_buffer.mm +++ b/sdk/objc/native/api/video_frame_buffer.mm @@ -15,11 +15,11 @@ namespace webrtc { rtc::scoped_refptr ObjCToNativeVideoFrameBuffer( - id objc_video_frame_buffer) { + id objc_video_frame_buffer) { return new rtc::RefCountedObject(objc_video_frame_buffer); } -id NativeToObjCVideoFrameBuffer( +id NativeToObjCVideoFrameBuffer( const rtc::scoped_refptr &buffer) { return ToObjCVideoFrameBuffer(buffer); } diff --git a/sdk/objc/native/api/video_renderer.h b/sdk/objc/native/api/video_renderer.h index afa65430cb..04796b8049 100644 --- a/sdk/objc/native/api/video_renderer.h +++ b/sdk/objc/native/api/video_renderer.h @@ -21,7 +21,7 @@ namespace webrtc { std::unique_ptr> ObjCToNativeVideoRenderer( - id objc_video_renderer); + id objc_video_renderer); } // namespace webrtc diff --git a/sdk/objc/native/api/video_renderer.mm b/sdk/objc/native/api/video_renderer.mm index 66316856f4..e92d47d1e3 100644 --- a/sdk/objc/native/api/video_renderer.mm +++ b/sdk/objc/native/api/video_renderer.mm @@ -17,7 +17,7 @@ namespace webrtc { std::unique_ptr> ObjCToNativeVideoRenderer( - id objc_video_renderer) { + id objc_video_renderer) { return std::make_unique(objc_video_renderer); } diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 6bf1d9b8ea..b70c4d0e50 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -152,7 +152,8 @@ AudioDeviceGeneric::InitStatus AudioDeviceIOS::Init() { // here. They have not been set and confirmed yet since configureForWebRTC // is not called until audio is about to start. However, it makes sense to // store the parameters now and then verify at a later stage. - RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* config = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels); record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels); // Ensure that the audio device buffer (ADB) knows about the internal audio @@ -532,12 +533,12 @@ void AudioDeviceIOS::HandleInterruptionEnd() { // Allocate new buffers given the potentially new stream format. SetupAudioBuffersForActiveAudioSession(); } - UpdateAudioUnit([RTCAudioSession sharedInstance].canPlayOrRecord); + UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord); } void AudioDeviceIOS::HandleValidRouteChange() { RTC_DCHECK_RUN_ON(&thread_checker_); - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; RTCLog(@"%@", session); HandleSampleRateChange(session.sampleRate); } @@ -565,7 +566,7 @@ void AudioDeviceIOS::HandleSampleRateChange(float sample_rate) { // The audio unit is already initialized or started. // Check to see if the sample rate or buffer size has changed. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; const double session_sample_rate = session.sampleRate; const NSTimeInterval session_buffer_duration = session.IOBufferDuration; const size_t session_frames_per_buffer = @@ -646,7 +647,7 @@ void AudioDeviceIOS::HandlePlayoutGlitchDetected() { int64_t glitch_count = num_detected_playout_glitches_; dispatch_async(dispatch_get_main_queue(), ^{ - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session notifyDidDetectPlayoutGlitch:glitch_count]; }); } @@ -678,7 +679,7 @@ void AudioDeviceIOS::UpdateAudioDeviceBuffer() { void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { LOGI() << "SetupAudioBuffersForActiveAudioSession"; // Verify the current values once the audio session has been activated. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; double sample_rate = session.sampleRate; NSTimeInterval io_buffer_duration = session.IOBufferDuration; RTCLog(@"%@", session); @@ -687,7 +688,8 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { // hardware sample rate but continue and use the non-ideal sample rate after // reinitializing the audio parameters. Most BT headsets only support 8kHz or // 16kHz. - RTCAudioSessionConfiguration* webRTCConfig = [RTCAudioSessionConfiguration webRTCConfiguration]; + RTC_OBJC_TYPE(RTCAudioSessionConfiguration)* webRTCConfig = + [RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration]; if (sample_rate != webRTCConfig.sampleRate) { RTC_LOG(LS_WARNING) << "Unable to set the preferred sample rate"; } @@ -797,7 +799,7 @@ void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) { if (should_start_audio_unit) { RTCLog(@"Starting audio unit for UpdateAudioUnit"); // Log session settings before trying to start audio streaming. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; RTCLog(@"%@", session); if (!audio_unit_->Start()) { RTCLogError(@"Failed to start audio unit."); @@ -827,7 +829,7 @@ bool AudioDeviceIOS::ConfigureAudioSession() { RTCLogWarning(@"Audio session already configured."); return false; } - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; bool success = [session configureWebRTCSession:nil]; [session unlockForConfiguration]; @@ -847,7 +849,7 @@ void AudioDeviceIOS::UnconfigureAudioSession() { RTCLogWarning(@"Audio session already unconfigured."); return; } - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; [session unconfigureWebRTCSession:nil]; [session endWebRTCSession:nil]; @@ -865,7 +867,7 @@ bool AudioDeviceIOS::InitPlayOrRecord() { return false; } - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; // Subscribe to audio session events. [session pushDelegate:audio_session_observer_]; is_interrupted_ = session.isInterrupted ? true : false; @@ -915,7 +917,7 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() { io_thread_checker_.Detach(); // Remove audio session notification observers. - RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session removeDelegate:audio_session_observer_]; // All I/O should be stopped or paused prior to deactivating the audio diff --git a/sdk/objc/native/src/objc_frame_buffer.h b/sdk/objc/native/src/objc_frame_buffer.h index f941dad6e4..9c1ff17876 100644 --- a/sdk/objc/native/src/objc_frame_buffer.h +++ b/sdk/objc/native/src/objc_frame_buffer.h @@ -13,15 +13,18 @@ #import +#import "base/RTCMacros.h" + #include "common_video/include/video_frame_buffer.h" -@protocol RTCVideoFrameBuffer; +@protocol RTC_OBJC_TYPE +(RTCVideoFrameBuffer); namespace webrtc { class ObjCFrameBuffer : public VideoFrameBuffer { public: - explicit ObjCFrameBuffer(id); + explicit ObjCFrameBuffer(id); ~ObjCFrameBuffer() override; Type type() const override; @@ -31,15 +34,15 @@ class ObjCFrameBuffer : public VideoFrameBuffer { rtc::scoped_refptr ToI420() override; - id wrapped_frame_buffer() const; + id wrapped_frame_buffer() const; private: - id frame_buffer_; + id frame_buffer_; int width_; int height_; }; -id ToObjCVideoFrameBuffer( +id ToObjCVideoFrameBuffer( const rtc::scoped_refptr& buffer); } // namespace webrtc diff --git a/sdk/objc/native/src/objc_frame_buffer.mm b/sdk/objc/native/src/objc_frame_buffer.mm index 52e434152c..deb38a7a74 100644 --- a/sdk/objc/native/src/objc_frame_buffer.mm +++ b/sdk/objc/native/src/objc_frame_buffer.mm @@ -17,10 +17,10 @@ namespace webrtc { namespace { -/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTCI420Buffer */ +/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */ class ObjCI420FrameBuffer : public I420BufferInterface { public: - explicit ObjCI420FrameBuffer(id frame_buffer) + explicit ObjCI420FrameBuffer(id frame_buffer) : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {} ~ObjCI420FrameBuffer() override {} @@ -41,14 +41,14 @@ class ObjCI420FrameBuffer : public I420BufferInterface { int StrideV() const override { return frame_buffer_.strideV; } private: - id frame_buffer_; + id frame_buffer_; int width_; int height_; }; } // namespace -ObjCFrameBuffer::ObjCFrameBuffer(id frame_buffer) +ObjCFrameBuffer::ObjCFrameBuffer(id frame_buffer) : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {} ObjCFrameBuffer::~ObjCFrameBuffer() {} @@ -72,15 +72,16 @@ rtc::scoped_refptr ObjCFrameBuffer::ToI420() { return buffer; } -id ObjCFrameBuffer::wrapped_frame_buffer() const { +id ObjCFrameBuffer::wrapped_frame_buffer() const { return frame_buffer_; } -id ToObjCVideoFrameBuffer(const rtc::scoped_refptr& buffer) { +id ToObjCVideoFrameBuffer( + const rtc::scoped_refptr& buffer) { if (buffer->type() == VideoFrameBuffer::Type::kNative) { return static_cast(buffer.get())->wrapped_frame_buffer(); } else { - return [[RTCI420Buffer alloc] initWithFrameBuffer:buffer->ToI420()]; + return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()]; } } diff --git a/sdk/objc/native/src/objc_video_decoder_factory.h b/sdk/objc/native/src/objc_video_decoder_factory.h index 9911bbfe01..30ad8c2a4b 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.h +++ b/sdk/objc/native/src/objc_video_decoder_factory.h @@ -11,26 +11,29 @@ #ifndef SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_ #define SDK_OBJC_NATIVE_SRC_OBJC_VIDEO_DECODER_FACTORY_H_ +#import "base/RTCMacros.h" + #include "api/video_codecs/video_decoder_factory.h" #include "media/base/codec.h" -@protocol RTCVideoDecoderFactory; +@protocol RTC_OBJC_TYPE +(RTCVideoDecoderFactory); namespace webrtc { class ObjCVideoDecoderFactory : public VideoDecoderFactory { public: - explicit ObjCVideoDecoderFactory(id); + explicit ObjCVideoDecoderFactory(id); ~ObjCVideoDecoderFactory() override; - id wrapped_decoder_factory() const; + id wrapped_decoder_factory() const; std::vector GetSupportedFormats() const override; std::unique_ptr CreateVideoDecoder( const SdpVideoFormat& format) override; private: - id decoder_factory_; + id decoder_factory_; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm index 5aca02d5ab..09060548de 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.mm +++ b/sdk/objc/native/src/objc_video_decoder_factory.mm @@ -33,7 +33,7 @@ namespace webrtc { namespace { class ObjCVideoDecoder : public VideoDecoder { public: - ObjCVideoDecoder(id decoder) + ObjCVideoDecoder(id decoder) : decoder_(decoder), implementation_name_([decoder implementationName].stdString) {} int32_t InitDecode(const VideoCodec *codec_settings, int32_t number_of_cores) override { @@ -43,8 +43,8 @@ class ObjCVideoDecoder : public VideoDecoder { int32_t Decode(const EncodedImage &input_image, bool missing_frames, int64_t render_time_ms = -1) override { - RTCEncodedImage *encodedImage = - [[RTCEncodedImage alloc] initWithNativeEncodedImage:input_image]; + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image]; return [decoder_ decode:encodedImage missingFrames:missing_frames @@ -53,7 +53,7 @@ class ObjCVideoDecoder : public VideoDecoder { } int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override { - [decoder_ setCallback:^(RTCVideoFrame *frame) { + [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { const rtc::scoped_refptr buffer = new rtc::RefCountedObject(frame.buffer); VideoFrame videoFrame = @@ -76,26 +76,27 @@ class ObjCVideoDecoder : public VideoDecoder { const char *ImplementationName() const override { return implementation_name_.c_str(); } private: - id decoder_; + id decoder_; const std::string implementation_name_; }; } // namespace -ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(id decoder_factory) +ObjCVideoDecoderFactory::ObjCVideoDecoderFactory( + id decoder_factory) : decoder_factory_(decoder_factory) {} ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {} -id ObjCVideoDecoderFactory::wrapped_decoder_factory() const { +id ObjCVideoDecoderFactory::wrapped_decoder_factory() const { return decoder_factory_; } std::unique_ptr ObjCVideoDecoderFactory::CreateVideoDecoder( const SdpVideoFormat &format) { NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()]; - for (RTCVideoCodecInfo *codecInfo in decoder_factory_.supportedCodecs) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) { if ([codecName isEqualToString:codecInfo.name]) { - id decoder = [decoder_factory_ createDecoder:codecInfo]; + id decoder = [decoder_factory_ createDecoder:codecInfo]; if ([decoder isKindOfClass:[RTCWrappedNativeVideoDecoder class]]) { return [(RTCWrappedNativeVideoDecoder *)decoder releaseWrappedDecoder]; @@ -110,7 +111,7 @@ std::unique_ptr ObjCVideoDecoderFactory::CreateVideoDecoder( std::vector ObjCVideoDecoderFactory::GetSupportedFormats() const { std::vector supported_formats; - for (RTCVideoCodecInfo *supportedCodec in decoder_factory_.supportedCodecs) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } diff --git a/sdk/objc/native/src/objc_video_encoder_factory.h b/sdk/objc/native/src/objc_video_encoder_factory.h index ca7a23a129..7e474c976a 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.h +++ b/sdk/objc/native/src/objc_video_encoder_factory.h @@ -13,18 +13,21 @@ #import +#import "base/RTCMacros.h" + #include "api/video_codecs/video_encoder_factory.h" -@protocol RTCVideoEncoderFactory; +@protocol RTC_OBJC_TYPE +(RTCVideoEncoderFactory); namespace webrtc { class ObjCVideoEncoderFactory : public VideoEncoderFactory { public: - explicit ObjCVideoEncoderFactory(id); + explicit ObjCVideoEncoderFactory(id); ~ObjCVideoEncoderFactory() override; - id wrapped_encoder_factory() const; + id wrapped_encoder_factory() const; std::vector GetSupportedFormats() const override; std::vector GetImplementations() const override; @@ -34,7 +37,7 @@ class ObjCVideoEncoderFactory : public VideoEncoderFactory { std::unique_ptr GetEncoderSelector() const override; private: - id encoder_factory_; + id encoder_factory_; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm index 027bfb5189..0b53eceb64 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.mm +++ b/sdk/objc/native/src/objc_video_encoder_factory.mm @@ -12,6 +12,7 @@ #include +#import "base/RTCMacros.h" #import "base/RTCVideoEncoder.h" #import "base/RTCVideoEncoderFactory.h" #import "components/video_codec/RTCCodecSpecificInfoH264+Private.h" @@ -38,26 +39,27 @@ namespace { class ObjCVideoEncoder : public VideoEncoder { public: - ObjCVideoEncoder(id encoder) + ObjCVideoEncoder(id encoder) : encoder_(encoder), implementation_name_([encoder implementationName].stdString) {} int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override { - RTCVideoEncoderSettings *settings = - [[RTCVideoEncoderSettings alloc] initWithNativeVideoCodec:codec_settings]; + RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings = + [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings]; return [encoder_ startEncodeWithSettings:settings numberOfCores:encoder_settings.number_of_cores]; } int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override { - [encoder_ setCallback:^BOOL(RTCEncodedImage *_Nonnull frame, - id _Nonnull info, - RTCRtpFragmentationHeader *_Nonnull header) { + [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame, + id _Nonnull info, + RTC_OBJC_TYPE(RTCRtpFragmentationHeader) * _Nonnull header) { EncodedImage encodedImage = [frame nativeEncodedImage]; // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases. CodecSpecificInfo codecSpecificInfo; - if ([info isKindOfClass:[RTCCodecSpecificInfoH264 class]]) { - codecSpecificInfo = [(RTCCodecSpecificInfoH264 *)info nativeCodecSpecificInfo]; + if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) { + codecSpecificInfo = + [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo]; } std::unique_ptr fragmentationHeader = @@ -95,7 +97,7 @@ class ObjCVideoEncoder : public VideoEncoder { info.supports_native_handle = true; info.implementation_name = implementation_name_; - RTCVideoEncoderQpThresholds *qp_thresholds = [encoder_ scalingSettings]; + RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings]; info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) : ScalingSettings::kOff; @@ -105,26 +107,29 @@ class ObjCVideoEncoder : public VideoEncoder { } private: - id encoder_; + id encoder_; const std::string implementation_name_; }; class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface { public: - ObjcVideoEncoderSelector(id selector) { selector_ = selector; } + ObjcVideoEncoderSelector(id selector) { + selector_ = selector; + } void OnCurrentEncoder(const SdpVideoFormat &format) override { - RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; [selector_ registerCurrentEncoderInfo:info]; } absl::optional OnEncoderBroken() override { - RTCVideoCodecInfo *info = [selector_ encoderForBrokenEncoder]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder]; if (info) { return [info nativeSdpVideoFormat]; } return absl::nullopt; } absl::optional OnAvailableBitrate(const DataRate &rate) override { - RTCVideoCodecInfo *info = [selector_ encoderForBitrate:rate.kbps()]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps()]; if (info) { return [info nativeSdpVideoFormat]; } @@ -132,23 +137,24 @@ class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInte } private: - id selector_; + id selector_; }; } // namespace -ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(id encoder_factory) +ObjCVideoEncoderFactory::ObjCVideoEncoderFactory( + id encoder_factory) : encoder_factory_(encoder_factory) {} ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {} -id ObjCVideoEncoderFactory::wrapped_encoder_factory() const { +id ObjCVideoEncoderFactory::wrapped_encoder_factory() const { return encoder_factory_; } std::vector ObjCVideoEncoderFactory::GetSupportedFormats() const { std::vector supported_formats; - for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ supportedCodecs]) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } @@ -159,7 +165,7 @@ std::vector ObjCVideoEncoderFactory::GetSupportedFormats() const std::vector ObjCVideoEncoderFactory::GetImplementations() const { if ([encoder_factory_ respondsToSelector:@selector(implementations)]) { std::vector supported_formats; - for (RTCVideoCodecInfo *supportedCodec in [encoder_factory_ implementations]) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } @@ -183,8 +189,9 @@ VideoEncoderFactory::CodecInfo ObjCVideoEncoderFactory::QueryVideoEncoder( std::unique_ptr ObjCVideoEncoderFactory::CreateVideoEncoder( const SdpVideoFormat &format) { - RTCVideoCodecInfo *info = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat:format]; - id encoder = [encoder_factory_ createEncoder:info]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; + id encoder = [encoder_factory_ createEncoder:info]; if ([encoder isKindOfClass:[RTCWrappedNativeVideoEncoder class]]) { return [(RTCWrappedNativeVideoEncoder *)encoder releaseWrappedEncoder]; } else { diff --git a/sdk/objc/native/src/objc_video_frame.h b/sdk/objc/native/src/objc_video_frame.h index fd74aca8a1..c2931cb2f8 100644 --- a/sdk/objc/native/src/objc_video_frame.h +++ b/sdk/objc/native/src/objc_video_frame.h @@ -17,7 +17,7 @@ namespace webrtc { -RTCVideoFrame* ToObjCVideoFrame(const VideoFrame& frame); +RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame& frame); } // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm index 76f7add6fc..2e8ce6153e 100644 --- a/sdk/objc/native/src/objc_video_frame.mm +++ b/sdk/objc/native/src/objc_video_frame.mm @@ -15,11 +15,11 @@ namespace webrtc { -RTCVideoFrame *ToObjCVideoFrame(const VideoFrame &frame) { - RTCVideoFrame *videoFrame = - [[RTCVideoFrame alloc] initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) - rotation:RTCVideoRotation(frame.rotation()) - timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; +RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) { + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] + initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) + rotation:RTCVideoRotation(frame.rotation()) + timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; videoFrame.timeStamp = frame.timestamp(); return videoFrame; diff --git a/sdk/objc/native/src/objc_video_renderer.h b/sdk/objc/native/src/objc_video_renderer.h index 9396ab6025..f9c35eae96 100644 --- a/sdk/objc/native/src/objc_video_renderer.h +++ b/sdk/objc/native/src/objc_video_renderer.h @@ -14,20 +14,23 @@ #import #import +#import "base/RTCMacros.h" + #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" -@protocol RTCVideoRenderer; +@protocol RTC_OBJC_TYPE +(RTCVideoRenderer); namespace webrtc { class ObjCVideoRenderer : public rtc::VideoSinkInterface { public: - ObjCVideoRenderer(id renderer); + ObjCVideoRenderer(id renderer); void OnFrame(const VideoFrame& nativeVideoFrame) override; private: - id renderer_; + id renderer_; CGSize size_; }; diff --git a/sdk/objc/native/src/objc_video_renderer.mm b/sdk/objc/native/src/objc_video_renderer.mm index 486b7e3b00..4a9b647ec3 100644 --- a/sdk/objc/native/src/objc_video_renderer.mm +++ b/sdk/objc/native/src/objc_video_renderer.mm @@ -10,6 +10,7 @@ #include "sdk/objc/native/src/objc_video_renderer.h" +#import "base/RTCMacros.h" #import "base/RTCVideoFrame.h" #import "base/RTCVideoRenderer.h" @@ -17,11 +18,11 @@ namespace webrtc { -ObjCVideoRenderer::ObjCVideoRenderer(id renderer) +ObjCVideoRenderer::ObjCVideoRenderer(id renderer) : renderer_(renderer), size_(CGSizeZero) {} void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) { - RTCVideoFrame* videoFrame = ToObjCVideoFrame(nativeVideoFrame); + RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = ToObjCVideoFrame(nativeVideoFrame); CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 93e7d15e2f..dad6544315 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -17,9 +17,9 @@ #include "media/base/adapted_video_track_source.h" #include "rtc_base/timestamp_aligner.h" -RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTCObjCVideoSourceAdapter : NSObject @end namespace webrtc { @@ -42,7 +42,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { bool remote() const override; - void OnCapturedFrame(RTCVideoFrame* frame); + void OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame); // Called by RTCVideoSource. void OnOutputFormatRequest(int width, int height, int fps); diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 580180a1a2..85ad087e8b 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -25,7 +25,8 @@ @synthesize objCVideoTrackSource = _objCVideoTrackSource; -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { _objCVideoTrackSource->OnCapturedFrame(frame); } @@ -61,7 +62,7 @@ void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) video_adapter()->OnOutputFormatRequest(format); } -void ObjCVideoTrackSource::OnCapturedFrame(RTCVideoFrame *frame) { +void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; const int64_t translated_timestamp_us = timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros()); @@ -88,10 +89,11 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTCVideoFrame *frame) { if (adapted_width == frame.width && adapted_height == frame.height) { // No adaption - optimized path. buffer = new rtc::RefCountedObject(frame.buffer); - } else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { // Adapted CVPixelBuffer frame. - RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer; - buffer = new rtc::RefCountedObject([[RTCCVPixelBuffer alloc] + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + buffer = new rtc::RefCountedObject([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:rtcPixelBuffer.pixelBuffer adaptedWidth:adapted_width adaptedHeight:adapted_height diff --git a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm index a937957c19..ca3d67293f 100644 --- a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm +++ b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm @@ -25,7 +25,7 @@ #include "rtc_base/ref_counted_object.h" #include "sdk/objc/native/api/video_frame.h" -typedef void (^VideoSinkCallback)(RTCVideoFrame *); +typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *); namespace { @@ -63,10 +63,13 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; @@ -92,10 +95,13 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; @@ -119,11 +125,13 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = [[RTCVideoFrame alloc] initWithBuffer:buffer - rotation:RTCVideoRotation_0 - timeStampNs:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; @@ -159,16 +167,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(frame.width, outputFrame.width); XCTAssertEqual(frame.height, outputFrame.height); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(buffer.cropX, outputBuffer.cropX); XCTAssertEqual(buffer.cropY, outputBuffer.cropY); XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); @@ -192,16 +203,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 0); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); @@ -225,16 +239,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 10); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); @@ -259,22 +276,25 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); // Create a frame that's already adapted down. - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:640 - adaptedHeight:360 - cropWidth:720 - cropHeight:1280 - cropX:0 - cropY:0]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:640 + adaptedHeight:360 + cropWidth:720 + cropHeight:1280 + cropX:0 + cropY:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 480); XCTAssertEqual(outputFrame.height, 270); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 0); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(outputBuffer.cropWidth, 640); @@ -300,22 +320,25 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:370 - adaptedHeight:640 - cropWidth:370 - cropHeight:640 - cropX:10 - cropY:0]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:370 + adaptedHeight:640 + cropWidth:370 + cropHeight:640 + cropX:10 + cropY:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 14); XCTAssertEqual(outputBuffer.cropY, 0); XCTAssertEqual(outputBuffer.cropWidth, 360); @@ -341,22 +364,25 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface CVPixelBufferCreate( NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:300 - adaptedHeight:640 - cropWidth:300 - cropHeight:640 - cropX:40 - cropY:0]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:300 + adaptedHeight:640 + cropWidth:300 + cropHeight:640 + cropX:40 + cropY:0]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 300); XCTAssertEqual(outputFrame.height, 534); - RTCCVPixelBuffer *outputBuffer = outputFrame.buffer; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; XCTAssertEqual(outputBuffer.cropX, 40); XCTAssertEqual(outputBuffer.cropY, 52); XCTAssertEqual(outputBuffer.cropWidth, 300); @@ -379,16 +405,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameI420BufferNeedsAdaptation { rtc::scoped_refptr i420Buffer = CreateI420Gradient(720, 1280); - RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCI420Buffer) *buffer = + [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer; + RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); @@ -408,16 +437,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameI420BufferNeedsCropping { rtc::scoped_refptr i420Buffer = CreateI420Gradient(380, 640); - RTCI420Buffer *buffer = [[RTCI420Buffer alloc] initWithFrameBuffer:i420Buffer]; - RTCVideoFrame *frame = - [[RTCVideoFrame alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; + RTC_OBJC_TYPE(RTCI420Buffer) *buffer = + [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *frame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTCVideoFrame *outputFrame) { + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTCI420Buffer *outputBuffer = (RTCI420Buffer *)outputFrame.buffer; + RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); XCTAssertGreaterThanOrEqual(psnr, 40); diff --git a/sdk/objc/unittests/RTCAudioDevice_xctest.mm b/sdk/objc/unittests/RTCAudioDevice_xctest.mm index a3db613dfe..c936399f34 100644 --- a/sdk/objc/unittests/RTCAudioDevice_xctest.mm +++ b/sdk/objc/unittests/RTCAudioDevice_xctest.mm @@ -21,7 +21,7 @@ std::unique_ptr _audio_device; } -@property(nonatomic) RTCAudioSession *audioSession; +@property(nonatomic) RTC_OBJC_TYPE(RTCAudioSession) * audioSession; @end @@ -34,7 +34,7 @@ _audioDeviceModule = webrtc::CreateAudioDeviceModule(); _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS()); - self.audioSession = [RTCAudioSession sharedInstance]; + self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSError *error = nil; [self.audioSession lockForConfiguration]; @@ -61,21 +61,21 @@ // Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly // after an iOS AVAudioSessionInterruptionTypeEnded notification event. -// AudioDeviceIOS listens to RTCAudioSession interrupted notifications by: +// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by: // - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_ -// callback with RTCAudioSession's delegate list. -// - When RTCAudioSession receives an iOS audio interrupted notification, it +// callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list. +// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it // passes the notification to callbacks in its delegate list which sets // AudioDeviceIOS's is_interrupted_ flag to true. // - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its // audio_session_observer_ callback is removed from RTCAudioSessions's // delegate list. -// So if RTCAudioSession receives an iOS end audio interruption notification, -// AudioDeviceIOS is not notified as its callback is not in RTCAudioSession's +// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification, +// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s // delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in // the wrong (true) state and the audio session will ignore audio changes. -// As RTCAudioSession keeps its own interrupted state, the fix is to initialize -// AudioDeviceIOS's is_interrupted_ flag to RTCAudioSession's isInterrupted +// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize +// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted // flag in AudioDeviceIOS.InitPlayOrRecord. - (void)testInterruptedAudioSession { XCTAssertTrue(self.audioSession.isActive); diff --git a/sdk/objc/unittests/RTCAudioSessionTest.mm b/sdk/objc/unittests/RTCAudioSessionTest.mm index c2140c3ba6..4e309ca2fa 100644 --- a/sdk/objc/unittests/RTCAudioSessionTest.mm +++ b/sdk/objc/unittests/RTCAudioSessionTest.mm @@ -20,9 +20,11 @@ #import "components/audio/RTCAudioSession.h" #import "components/audio/RTCAudioSessionConfiguration.h" -@interface RTCAudioSession (UnitTesting) +@interface RTC_OBJC_TYPE (RTCAudioSession) +(UnitTesting) -@property(nonatomic, readonly) std::vector<__weak id > delegates; + @property(nonatomic, + readonly) std::vector<__weak id > delegates; - (instancetype)initWithAudioSession:(id)audioSession; @@ -38,7 +40,7 @@ @synthesize outputVolume = _outputVolume; @end -@interface RTCAudioSessionTestDelegate : NSObject +@interface RTCAudioSessionTestDelegate : NSObject @property (nonatomic, readonly) float outputVolume; @@ -55,31 +57,31 @@ return self; } -- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session { +- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session +- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session shouldResumeSession:(BOOL)shouldResumeSession { } -- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session - reason:(AVAudioSessionRouteChangeReason)reason - previousRoute:(AVAudioSessionRouteDescription *)previousRoute { +- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session + reason:(AVAudioSessionRouteChangeReason)reason + previousRoute:(AVAudioSessionRouteDescription *)previousRoute { } -- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session { +- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionMediaServerReset:(RTCAudioSession *)session { +- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionShouldConfigure:(RTCAudioSession *)session { +- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session { +- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSession:(RTCAudioSession *)audioSession +- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession didChangeOutputVolume:(float)outputVolume { _outputVolume = outputVolume; } @@ -95,14 +97,14 @@ - (instancetype)init { if (self = [super init]) { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session addDelegate:self]; } return self; } - (void)dealloc { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session removeDelegate:self]; } @@ -118,7 +120,7 @@ @implementation RTCAudioSessionTest - (void)testLockForConfiguration { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; for (size_t i = 0; i < 2; i++) { [session lockForConfiguration]; @@ -132,7 +134,7 @@ } - (void)testAddAndRemoveDelegates { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSMutableArray *delegates = [NSMutableArray array]; const size_t count = 5; for (size_t i = 0; i < count; ++i) { @@ -151,7 +153,7 @@ } - (void)testPushDelegate { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSMutableArray *delegates = [NSMutableArray array]; const size_t count = 2; for (size_t i = 0; i < count; ++i) { @@ -184,7 +186,7 @@ // Tests that delegates added to the audio session properly zero out. This is // checking an implementation detail (that vectors of __weak work as expected). - (void)testZeroingWeakDelegate { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; @autoreleasepool { // Add a delegate to the session. There should be one delegate at this // point. @@ -212,12 +214,12 @@ [[RTCTestRemoveOnDeallocDelegate alloc] init]; EXPECT_TRUE(delegate); } - RTCAudioSession *session = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; EXPECT_EQ(0u, session.delegates.size()); } - (void)testAudioSessionActivation { - RTCAudioSession *audioSession = [RTCAudioSession sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; EXPECT_EQ(0, audioSession.activationCount); [audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]]; EXPECT_EQ(1, audioSession.activationCount); @@ -255,10 +257,10 @@ OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){ setActive:YES withOptions:0 error:((NSError __autoreleasing **)[OCMArg anyPointer])]). andDo(setActiveBlock); - id mockAudioSession = OCMPartialMock([RTCAudioSession sharedInstance]); + id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]); OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession); - RTCAudioSession *audioSession = mockAudioSession; + RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession; EXPECT_EQ(0, audioSession.activationCount); [audioSession lockForConfiguration]; EXPECT_TRUE([audioSession checkLock:nil]); @@ -286,7 +288,8 @@ OCMLocation *OCMMakeLocation(id testCase, const char *fileCString, int line){ - (void)testAudioVolumeDidNotify { MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init]; - RTCAudioSession *session = [[RTCAudioSession alloc] initWithAudioSession:mockAVAudioSession]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession]; RTCAudioSessionTestDelegate *delegate = [[RTCAudioSessionTestDelegate alloc] init]; [session addDelegate:delegate]; @@ -304,8 +307,8 @@ namespace webrtc { class AudioSessionTest : public ::testing::Test { protected: void TearDown() override { - RTCAudioSession *session = [RTCAudioSession sharedInstance]; - for (id delegate : session.delegates) { + RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + for (id delegate : session.delegates) { [session removeDelegate:delegate]; } } diff --git a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm index ee970643ab..3a1ab24773 100644 --- a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm +++ b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm @@ -31,7 +31,8 @@ CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertFalse([buffer requiresCropping]); @@ -42,13 +43,14 @@ CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *croppedBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:720 - adaptedHeight:1280 - cropWidth:360 - cropHeight:640 - cropX:100 - cropY:100]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:720 + adaptedHeight:1280 + cropWidth:360 + cropHeight:640 + cropX:100 + cropY:100]; XCTAssertTrue([croppedBuffer requiresCropping]); @@ -60,7 +62,8 @@ CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]); CVBufferRelease(pixelBufferRef); @@ -71,7 +74,8 @@ CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]); CVBufferRelease(pixelBufferRef); @@ -82,13 +86,14 @@ CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:720 - adaptedHeight:1280 - cropWidth:360 - cropHeight:640 - cropX:100 - cropY:100]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:720 + adaptedHeight:1280 + cropWidth:360 + cropHeight:640 + cropX:100 + cropY:100]; XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]); CVBufferRelease(pixelBufferRef); @@ -99,7 +104,8 @@ CVPixelBufferCreate( NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000); CVBufferRelease(pixelBufferRef); @@ -109,7 +115,8 @@ CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0); CVBufferRelease(pixelBufferRef); @@ -198,7 +205,8 @@ rtc::scoped_refptr i420Buffer = CreateI420Gradient(720, 1280); CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual(buffer.width, 720); XCTAssertEqual(buffer.height, 1280); @@ -218,14 +226,14 @@ [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()]; - RTCCVPixelBuffer *scaledBuffer = - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef]; XCTAssertEqual(scaledBuffer.width, outputSize.width); XCTAssertEqual(scaledBuffer.height, outputSize.height); if (outputSize.width > 0 && outputSize.height > 0) { - RTCI420Buffer *originalBufferI420 = [buffer toI420]; - RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420]; double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); @@ -244,14 +252,14 @@ DrawGradientInRGBPixelBuffer(pixelBufferRef); - RTCCVPixelBuffer *buffer = - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef) - adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX - cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY - cropX:cropX - cropY:cropY]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] + initWithPixelBuffer:pixelBufferRef + adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef) + adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef) + cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX + cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY + cropX:cropX + cropY:cropY]; XCTAssertEqual(buffer.width, 720); XCTAssertEqual(buffer.height, 1280); @@ -260,13 +268,13 @@ CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &outputPixelBufferRef); [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL]; - RTCCVPixelBuffer *scaledBuffer = - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef]; XCTAssertEqual(scaledBuffer.width, 360); XCTAssertEqual(scaledBuffer.height, 640); - RTCI420Buffer *originalBufferI420 = [buffer toI420]; - RTCI420Buffer *scaledBufferI420 = [scaledBuffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420]; + RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420]; double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); @@ -282,8 +290,9 @@ CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTCCVPixelBuffer *buffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]; - RTCI420Buffer *fromCVPixelBuffer = [buffer toI420]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420]; double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]); double target = webrtc::kPerfectPSNR; diff --git a/sdk/objc/unittests/RTCCallbackLogger_xctest.m b/sdk/objc/unittests/RTCCallbackLogger_xctest.m index ceaa762f1f..1b6fb1c07b 100644 --- a/sdk/objc/unittests/RTCCallbackLogger_xctest.m +++ b/sdk/objc/unittests/RTCCallbackLogger_xctest.m @@ -14,7 +14,7 @@ @interface RTCCallbackLoggerTests : XCTestCase -@property(nonatomic, strong) RTCCallbackLogger *logger; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCallbackLogger) * logger; @end @@ -23,7 +23,7 @@ @synthesize logger; - (void)setUp { - self.logger = [[RTCCallbackLogger alloc] init]; + self.logger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init]; } - (void)tearDown { diff --git a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm index bd74fc7d6a..34551e5ac8 100644 --- a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm +++ b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm @@ -59,9 +59,11 @@ CMSampleBufferRef createTestSampleBufferRef() { } #endif -@interface RTCCameraVideoCapturer (Tests) -- (instancetype)initWithDelegate:(__weak id)delegate - captureSession:(AVCaptureSession *)captureSession; +@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) +(Tests) - + (instancetype)initWithDelegate + : (__weak id)delegate captureSession + : (AVCaptureSession *)captureSession; @end @interface RTCCameraVideoCapturerTests : NSObject @@ -69,7 +71,7 @@ CMSampleBufferRef createTestSampleBufferRef() { @property(nonatomic, strong) id deviceMock; @property(nonatomic, strong) id captureConnectionMock; @property(nonatomic, strong) id captureSessionMock; -@property(nonatomic, strong) RTCCameraVideoCapturer *capturer; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer; @end @implementation RTCCameraVideoCapturerTests @@ -80,9 +82,10 @@ CMSampleBufferRef createTestSampleBufferRef() { @synthesize capturer = _capturer; - (void)setup { - self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate)); + self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]); - self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock]; + self.capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock]; self.deviceMock = [self createDeviceMock]; } @@ -94,10 +97,11 @@ CMSampleBufferRef createTestSampleBufferRef() { OCMStub([self.captureSessionMock addOutput:[OCMArg any]]); OCMStub([self.captureSessionMock beginConfiguration]); OCMStub([self.captureSessionMock commitConfiguration]); - self.delegateMock = OCMProtocolMock(@protocol(RTCVideoCapturerDelegate)); + self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]); - self.capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:self.delegateMock - captureSession:self.captureSessionMock]; + self.capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock + captureSession:self.captureSessionMock]; self.deviceMock = [self createDeviceMock]; } @@ -160,7 +164,8 @@ CMSampleBufferRef createTestSampleBufferRef() { OCMStub([self.deviceMock formats]).andReturn(formats); // when - NSArray *supportedFormats = [RTCCameraVideoCapturer supportedFormatsForDevice:self.deviceMock]; + NSArray *supportedFormats = + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock]; // then EXPECT_EQ(supportedFormats.count, 3u); @@ -199,7 +204,8 @@ CMSampleBufferRef createTestSampleBufferRef() { // then [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) { + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270); return YES; }]]; @@ -240,22 +246,23 @@ CMSampleBufferRef createTestSampleBufferRef() { CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) { - if (camera == AVCaptureDevicePositionFront) { - if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); - } - } else if (camera == AVCaptureDevicePositionBack) { - if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); - } - } - return YES; - }]]; + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + if (camera == AVCaptureDevicePositionFront) { + if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); + } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); + } + } else if (camera == AVCaptureDevicePositionBack) { + if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); + } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180); + } + } + return YES; + }]]; NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; @@ -298,12 +305,13 @@ CMSampleBufferRef createTestSampleBufferRef() { CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTCVideoFrame *expectedFrame) { - // Front camera and landscape left should return 180. But the frame says its from the back - // camera, so rotation should be 0. - EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); - return YES; - }]]; + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + // Front camera and landscape left should return 180. But the frame says its + // from the back camera, so rotation should be 0. + EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0); + return YES; + }]]; NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; diff --git a/sdk/objc/unittests/RTCCertificateTest.mm b/sdk/objc/unittests/RTCCertificateTest.mm index 5bf1eb3fe4..38c935cef2 100644 --- a/sdk/objc/unittests/RTCCertificateTest.mm +++ b/sdk/objc/unittests/RTCCertificateTest.mm @@ -29,38 +29,39 @@ @implementation RTCCertificateTest - (void)testCertificateIsUsedInConfig { - RTCConfiguration *originalConfig = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; originalConfig.iceServers = @[ server ]; // Generate a new certificate. - RTCCertificate *originalCertificate = [RTCCertificate generateCertificateWithParams:@{ - @"expires" : @100000, - @"name" : @"RSASSA-PKCS1-v1_5" - }]; + RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate) + generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}]; // Store certificate in configuration. originalConfig.certificate = originalCertificate; - RTCMediaConstraints *contraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; - RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; // Create PeerConnection with this certificate. - RTCPeerConnection *peerConnection = + RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = [factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil]; // Retrieve certificate from the configuration. - RTCConfiguration *retrievedConfig = peerConnection.configuration; + RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration; // Extract PEM strings from original certificate. std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String]; std::string originalCertificateField = [[originalCertificate certificate] UTF8String]; // Extract PEM strings from certificate retrieved from configuration. - RTCCertificate *retrievedCertificate = retrievedConfig.certificate; + RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate; std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String]; std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String]; diff --git a/sdk/objc/unittests/RTCConfigurationTest.mm b/sdk/objc/unittests/RTCConfigurationTest.mm index 3fb4d428e4..51e4a70893 100644 --- a/sdk/objc/unittests/RTCConfigurationTest.mm +++ b/sdk/objc/unittests/RTCConfigurationTest.mm @@ -28,9 +28,10 @@ - (void)testConversionToNativeConfiguration { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -47,10 +48,11 @@ config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; - config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:YES - srtpEnableEncryptedRtpHeaderExtensions:YES - sframeRequireFrameEncryption:YES]; + config.cryptoOptions = + [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:YES + srtpEnableEncryptedRtpHeaderExtensions:YES + sframeRequireFrameEncryption:YES]; config.rtcpAudioReportIntervalMs = 2500; config.rtcpVideoReportIntervalMs = 3750; @@ -89,9 +91,10 @@ - (void)testNativeConversionToConfiguration { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -108,20 +111,21 @@ config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; - config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:NO - srtpEnableEncryptedRtpHeaderExtensions:NO - sframeRequireFrameEncryption:NO]; + config.cryptoOptions = + [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:NO + srtpEnableEncryptedRtpHeaderExtensions:NO + sframeRequireFrameEncryption:NO]; config.rtcpAudioReportIntervalMs = 1500; config.rtcpVideoReportIntervalMs = 2150; webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig = [config createNativeConfiguration]; - RTCConfiguration *newConfig = [[RTCConfiguration alloc] - initWithNativeConfiguration:*nativeConfig]; + RTC_OBJC_TYPE(RTCConfiguration) *newConfig = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig]; EXPECT_EQ([config.iceServers count], newConfig.iceServers.count); - RTCIceServer *newServer = newConfig.iceServers[0]; - RTCIceServer *origServer = config.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0]; EXPECT_EQ(origServer.urlStrings.count, server.urlStrings.count); std::string origUrl = origServer.urlStrings.firstObject.UTF8String; std::string url = newServer.urlStrings.firstObject.UTF8String; @@ -152,7 +156,7 @@ } - (void)testDefaultValues { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; EXPECT_EQ(config.cryptoOptions, nil); } diff --git a/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm b/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm index a96ae51707..b3461cc854 100644 --- a/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm +++ b/sdk/objc/unittests/RTCDataChannelConfigurationTest.mm @@ -30,8 +30,8 @@ int channelId = 4; NSString *protocol = @"protocol"; - RTCDataChannelConfiguration *dataChannelConfig = - [[RTCDataChannelConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig = + [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init]; dataChannelConfig.isOrdered = isOrdered; dataChannelConfig.maxPacketLifeTime = maxPacketLifeTime; dataChannelConfig.maxRetransmits = maxRetransmits; @@ -50,7 +50,7 @@ @end -TEST(RTCDataChannelConfiguration, NativeDataChannelInitConversionTest) { +TEST(RTC_OBJC_TYPE(RTCDataChannelConfiguration), NativeDataChannelInitConversionTest) { @autoreleasepool { RTCDataChannelConfigurationTest *test = [[RTCDataChannelConfigurationTest alloc] init]; diff --git a/sdk/objc/unittests/RTCEncodedImage_xctest.mm b/sdk/objc/unittests/RTCEncodedImage_xctest.mm index 577ecda2ff..84804fee87 100644 --- a/sdk/objc/unittests/RTCEncodedImage_xctest.mm +++ b/sdk/objc/unittests/RTCEncodedImage_xctest.mm @@ -22,15 +22,15 @@ webrtc::EncodedImage encoded_image; encoded_image.SetEncodedData(encoded_data); - RTCEncodedImage *encodedImage = - [[RTCEncodedImage alloc] initWithNativeEncodedImage:encoded_image]; + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image]; XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data); } - (void)testInitWithNSData { NSData *bufferData = [NSData data]; - RTCEncodedImage *encodedImage = [[RTCEncodedImage alloc] init]; + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init]; encodedImage.buffer = bufferData; webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage]; @@ -39,12 +39,13 @@ } - (void)testRetainsNativeEncodedImage { - RTCEncodedImage *encodedImage; + RTC_OBJC_TYPE(RTCEncodedImage) * encodedImage; { const auto encoded_data = webrtc::EncodedImageBuffer::Create(); webrtc::EncodedImage encoded_image; encoded_image.SetEncodedData(encoded_data); - encodedImage = [[RTCEncodedImage alloc] initWithNativeEncodedImage:encoded_image]; + encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image]; } webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage]; XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr); diff --git a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm index 01deb68a32..2407c88c1a 100644 --- a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm +++ b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm @@ -17,7 +17,7 @@ NSString *const kTestFileName = @"foreman.mp4"; static const int kTestTimeoutMs = 5 * 1000; // 5secs. -@interface MockCapturerDelegate : NSObject +@interface MockCapturerDelegate : NSObject @property(nonatomic, assign) NSInteger capturedFramesCount; @@ -26,7 +26,8 @@ static const int kTestTimeoutMs = 5 * 1000; // 5secs. @implementation MockCapturerDelegate @synthesize capturedFramesCount = _capturedFramesCount; -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { self.capturedFramesCount++; } @@ -35,7 +36,7 @@ static const int kTestTimeoutMs = 5 * 1000; // 5secs. NS_CLASS_AVAILABLE_IOS(10) @interface RTCFileVideoCapturerTests : XCTestCase -@property(nonatomic, strong) RTCFileVideoCapturer *capturer; +@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * capturer; @property(nonatomic, strong) MockCapturerDelegate *mockDelegate; @end @@ -46,7 +47,7 @@ NS_CLASS_AVAILABLE_IOS(10) - (void)setUp { self.mockDelegate = [[MockCapturerDelegate alloc] init]; - self.capturer = [[RTCFileVideoCapturer alloc] initWithDelegate:self.mockDelegate]; + self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate]; } - (void)tearDown { diff --git a/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m b/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m index 066958692f..ec9dc41796 100644 --- a/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m +++ b/sdk/objc/unittests/RTCH264ProfileLevelId_xctest.m @@ -22,24 +22,26 @@ static NSString *level31ConstrainedBaseline = @"42e01f"; @implementation RTCH264ProfileLevelIdTests - (void)testInitWithString { - RTCH264ProfileLevelId *profileLevelId = - [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedHigh]; + RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedHigh]; XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedHigh); XCTAssertEqual(profileLevelId.level, RTCH264Level3_1); - profileLevelId = [[RTCH264ProfileLevelId alloc] initWithHexString:level31ConstrainedBaseline]; + profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:level31ConstrainedBaseline]; XCTAssertEqual(profileLevelId.profile, RTCH264ProfileConstrainedBaseline); XCTAssertEqual(profileLevelId.level, RTCH264Level3_1); } - (void)testInitWithProfileAndLevel { - RTCH264ProfileLevelId *profileLevelId = - [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedHigh - level:RTCH264Level3_1]; + RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId = + [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithProfile:RTCH264ProfileConstrainedHigh + level:RTCH264Level3_1]; XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedHigh); - profileLevelId = [[RTCH264ProfileLevelId alloc] initWithProfile:RTCH264ProfileConstrainedBaseline - level:RTCH264Level3_1]; + profileLevelId = [[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] + initWithProfile:RTCH264ProfileConstrainedBaseline + level:RTCH264Level3_1]; XCTAssertEqualObjects(profileLevelId.hexString, level31ConstrainedBaseline); } diff --git a/sdk/objc/unittests/RTCIceCandidateTest.mm b/sdk/objc/unittests/RTCIceCandidateTest.mm index 18dcdad8b4..b0b6cb62a0 100644 --- a/sdk/objc/unittests/RTCIceCandidateTest.mm +++ b/sdk/objc/unittests/RTCIceCandidateTest.mm @@ -30,9 +30,8 @@ "fdff:2642:12a6:fe38:c001:beda:fcf9:51aa " "59052 typ host generation 0"; - RTCIceCandidate *candidate = [[RTCIceCandidate alloc] initWithSdp:sdp - sdpMLineIndex:0 - sdpMid:@"audio"]; + RTC_OBJC_TYPE(RTCIceCandidate) *candidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"]; std::unique_ptr nativeCandidate = candidate.nativeCandidate; @@ -51,8 +50,8 @@ webrtc::IceCandidateInterface *nativeCandidate = webrtc::CreateIceCandidate("audio", 0, sdp, nullptr); - RTCIceCandidate *iceCandidate = - [[RTCIceCandidate alloc] initWithNativeCandidate:nativeCandidate]; + RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate = + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate]; EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]); EXPECT_EQ(0, iceCandidate.sdpMLineIndex); diff --git a/sdk/objc/unittests/RTCIceServerTest.mm b/sdk/objc/unittests/RTCIceServerTest.mm index 8ef5195b95..5dbb92f16d 100644 --- a/sdk/objc/unittests/RTCIceServerTest.mm +++ b/sdk/objc/unittests/RTCIceServerTest.mm @@ -28,8 +28,8 @@ @implementation RTCIceServerTest - (void)testOneURLServer { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ - @"stun:stun1.example.net" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); @@ -39,8 +39,8 @@ } - (void)testTwoURLServer { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ - @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(2u, iceStruct.urls.size()); @@ -51,10 +51,10 @@ } - (void)testPasswordCredential { - RTCIceServer *server = [[RTCIceServer alloc] - initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential"]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential"]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -63,11 +63,12 @@ } - (void)testHostname { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname"]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname"]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -77,12 +78,13 @@ } - (void)testTlsAlpnProtocols { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname" - tlsAlpnProtocols:@[ @"proto1", @"proto2" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname" + tlsAlpnProtocols:@[ @"proto1", @"proto2" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -93,13 +95,14 @@ } - (void)testTlsEllipticCurves { - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname" - tlsAlpnProtocols:@[ @"proto1", @"proto2" ] - tlsEllipticCurves:@[ @"curve1", @"curve2" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname" + tlsAlpnProtocols:@[ @"proto1", @"proto2" ] + tlsEllipticCurves:@[ @"curve1", @"curve2" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -121,8 +124,8 @@ nativeServer.tls_elliptic_curves.push_back("curve1"); nativeServer.tls_elliptic_curves.push_back("curve2"); - RTCIceServer *iceServer = - [[RTCIceServer alloc] initWithNativeServer:nativeServer]; + RTC_OBJC_TYPE(RTCIceServer) *iceServer = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer]; EXPECT_EQ(1u, iceServer.urlStrings.count); EXPECT_EQ("stun:stun.example.net", [NSString stdStringForString:iceServer.urlStrings.firstObject]); diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index d7fa12c2ac..eb519bb13f 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -21,10 +21,11 @@ #import "components/renderer/metal/RTCMTLNV12Renderer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -// Extension of RTCMTLVideoView for testing purposes. -@interface RTCMTLVideoView (Testing) +// Extension of RTC_OBJC_TYPE(RTCMTLVideoView) for testing purposes. +@interface RTC_OBJC_TYPE (RTCMTLVideoView) +(Testing) -@property(nonatomic, readonly) MTKView *metalView; + @property(nonatomic, readonly) MTKView *metalView; + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; @@ -48,7 +49,7 @@ @synthesize frameMock = _frameMock; - (void)setUp { - self.classMock = OCMClassMock([RTCMTLVideoView class]); + self.classMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLVideoView) class]); [self startMockingNilView]; } @@ -64,15 +65,16 @@ } - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { - id frameMock = OCMClassMock([RTCVideoFrame class]); + id frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]); if (hasCVPixelBuffer) { CVPixelBufferRef pixelBufferRef; CVPixelBufferCreate( kCFAllocatorDefault, 200, 200, kCVPixelFormatType_420YpCbCr8Planar, nil, &pixelBufferRef); OCMStub([frameMock buffer]) - .andReturn([[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBufferRef]); + .andReturn([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]); } else { - OCMStub([frameMock buffer]).andReturn([[RTCI420Buffer alloc] initWithWidth:200 height:200]); + OCMStub([frameMock buffer]) + .andReturn([[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithWidth:200 height:200]); } OCMStub([frameMock timeStampNs]).andReturn(arc4random_uniform(INT_MAX)); return frameMock; @@ -98,7 +100,8 @@ // when BOOL asserts = NO; @try { - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; (void)realView; } @catch (NSException *ex) { asserts = YES; @@ -111,8 +114,9 @@ // given OCMStub([self.classMock isMetalAvailable]).andReturn(YES); - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; - self.frameMock = OCMClassMock([RTCVideoFrame class]); + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + self.frameMock = OCMClassMock([RTC_OBJC_TYPE(RTCVideoFrame) class]); [[self.frameMock reject] buffer]; [[self.classMock reject] createNV12Renderer]; @@ -137,7 +141,8 @@ OCMExpect([self.classMock createI420Renderer]).andReturn(self.rendererI420Mock); [[self.classMock reject] createNV12Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; // when [realView renderFrame:self.frameMock]; @@ -158,7 +163,8 @@ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; // when [realView renderFrame:self.frameMock]; @@ -178,7 +184,8 @@ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; [realView renderFrame:self.frameMock]; [realView drawInMTKView:realView.metalView]; @@ -186,7 +193,7 @@ [self.classMock verify]; // Recreate view. - realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; OCMExpect([self.rendererNV12Mock drawFrame:self.frameMock]); // View hould reinit renderer. OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); @@ -206,7 +213,8 @@ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; [realView renderFrame:self.frameMock]; [realView drawInMTKView:realView.metalView]; @@ -230,7 +238,8 @@ OCMExpect([self.classMock createNV12Renderer]).andReturn(self.rendererNV12Mock); [[self.classMock reject] createI420Renderer]; - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; [realView renderFrame:self.frameMock]; [realView drawInMTKView:realView.metalView]; @@ -250,11 +259,12 @@ - (void)testReportsSizeChangesToDelegate { OCMStub([self.classMock isMetalAvailable]).andReturn(YES); - id delegateMock = OCMProtocolMock(@protocol(RTCVideoViewDelegate)); + id delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoViewDelegate))); CGSize size = CGSizeMake(640, 480); OCMExpect([delegateMock videoView:[OCMArg any] didChangeVideoSize:size]); - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = + [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectMake(0, 0, 640, 480)]; realView.delegate = delegateMock; [realView setSize:size]; @@ -269,7 +279,7 @@ createMetalView:CGRectZero]; OCMExpect([metalKitView setContentMode:UIViewContentModeScaleAspectFill]); - RTCMTLVideoView *realView = [[RTCMTLVideoView alloc] init]; + RTC_OBJC_TYPE(RTCMTLVideoView) *realView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] init]; [realView setVideoContentMode:UIViewContentModeScaleAspectFill]; OCMVerify(metalKitView); diff --git a/sdk/objc/unittests/RTCMediaConstraintsTest.mm b/sdk/objc/unittests/RTCMediaConstraintsTest.mm index 4d5e450fff..7664a7ef11 100644 --- a/sdk/objc/unittests/RTCMediaConstraintsTest.mm +++ b/sdk/objc/unittests/RTCMediaConstraintsTest.mm @@ -28,9 +28,9 @@ NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"}; NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"}; - RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] - initWithMandatoryConstraints:mandatory - optionalConstraints:optional]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory + optionalConstraints:optional]; std::unique_ptr nativeConstraints = [constraints nativeConstraints]; diff --git a/sdk/objc/unittests/RTCNV12TextureCache_xctest.m b/sdk/objc/unittests/RTCNV12TextureCache_xctest.m index d5fa65b173..7bdc538f67 100644 --- a/sdk/objc/unittests/RTCNV12TextureCache_xctest.m +++ b/sdk/objc/unittests/RTCNV12TextureCache_xctest.m @@ -43,10 +43,12 @@ - (void)testNV12TextureCacheDoesNotCrashOnEmptyFrame { CVPixelBufferRef nullPixelBuffer = NULL; - RTCCVPixelBuffer *badFrameBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:nullPixelBuffer]; - RTCVideoFrame *badFrame = [[RTCVideoFrame alloc] initWithBuffer:badFrameBuffer - rotation:RTCVideoRotation_0 - timeStampNs:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *badFrameBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:nullPixelBuffer]; + RTC_OBJC_TYPE(RTCVideoFrame) *badFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:badFrameBuffer + rotation:RTCVideoRotation_0 + timeStampNs:0]; [_nv12TextureCache uploadFrameToTextures:badFrame]; } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 40b3aa0399..7d19d4095d 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -39,7 +39,7 @@ extern "C" { @implementation RTCPeerConnectionFactoryBuilderTest - (void)testBuilder { - id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]); + id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); OCMExpect([factoryMock alloc]).andReturn(factoryMock); #ifdef HAVE_NO_MEDIA RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]); @@ -54,13 +54,14 @@ extern "C" { mediaTransportFactory:nullptr]); #endif RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; - RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = + [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); OCMVerifyAll(factoryMock); } - (void)testDefaultComponentsBuilder { - id factoryMock = OCMStrictClassMock([RTCPeerConnectionFactory class]); + id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); OCMExpect([factoryMock alloc]).andReturn(factoryMock); #ifdef HAVE_NO_MEDIA RTC_UNUSED([[[factoryMock expect] andReturn:factoryMock] initWithNoMedia]); @@ -75,7 +76,8 @@ extern "C" { mediaTransportFactory:nullptr]); #endif RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; - RTCPeerConnectionFactory* peerConnectionFactory = [builder createPeerConnectionFactory]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = + [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); OCMVerifyAll(factoryMock); } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m index 477b541276..2737bb6eee 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m +++ b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m @@ -30,16 +30,17 @@ - (void)testPeerConnectionLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; [peerConnection close]; @@ -53,11 +54,11 @@ - (void)testMediaStreamLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCMediaStream *mediaStream; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCMediaStream) * mediaStream; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; mediaStream = [factory mediaStreamWithStreamId:@"mediaStream"]; factory = nil; } @@ -69,17 +70,19 @@ - (void)testDataChannelLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; - RTCDataChannelConfiguration *dataChannelConfig = [[RTCDataChannelConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCDataChannelConfiguration) *dataChannelConfig = + [[RTC_OBJC_TYPE(RTCDataChannelConfiguration) alloc] init]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; - RTCDataChannel *dataChannel; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; + RTC_OBJC_TYPE(RTCDataChannel) * dataChannel; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; dataChannel = @@ -97,18 +100,20 @@ - (void)testRTCRtpTransceiverLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - RTCMediaConstraints *contraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; - RTCRtpTransceiverInit *init = [[RTCRtpTransceiverInit alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCRtpTransceiverInit) *init = + [[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; - RTCRtpTransceiver *tranceiver; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; + RTC_OBJC_TYPE(RTCRtpTransceiver) * tranceiver; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; tranceiver = [peerConnection addTransceiverOfType:RTCRtpMediaTypeAudio init:init]; @@ -125,16 +130,17 @@ - (void)testRTCRtpSenderLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *peerConnection; - RTCRtpSender *sender; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection; + RTC_OBJC_TYPE(RTCRtpSender) * sender; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"]; @@ -151,19 +157,20 @@ - (void)testRTCRtpReceiverLifetime { @autoreleasepool { - RTCConfiguration *config = [[RTCConfiguration alloc] init]; - RTCMediaConstraints *constraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; - RTCPeerConnectionFactory *factory; - RTCPeerConnection *pc1; - RTCPeerConnection *pc2; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCPeerConnection) * pc1; + RTC_OBJC_TYPE(RTCPeerConnection) * pc2; - NSArray *receivers1; - NSArray *receivers2; + NSArray *receivers1; + NSArray *receivers2; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"]; @@ -197,11 +204,11 @@ - (void)testAudioSourceLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCAudioSource *audioSource; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCAudioSource) * audioSource; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; audioSource = [factory audioSourceWithConstraints:nil]; XCTAssertNotNil(audioSource); factory = nil; @@ -214,11 +221,11 @@ - (void)testVideoSourceLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCVideoSource *videoSource; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCVideoSource) * videoSource; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; videoSource = [factory videoSource]; XCTAssertNotNil(videoSource); factory = nil; @@ -231,11 +238,11 @@ - (void)testAudioTrackLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCAudioTrack *audioTrack; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCAudioTrack) * audioTrack; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; audioTrack = [factory audioTrackWithTrackId:@"audioTrack"]; XCTAssertNotNil(audioTrack); factory = nil; @@ -248,11 +255,11 @@ - (void)testVideoTrackLifetime { @autoreleasepool { - RTCPeerConnectionFactory *factory; - RTCVideoTrack *videoTrack; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; + RTC_OBJC_TYPE(RTCVideoTrack) * videoTrack; @autoreleasepool { - factory = [[RTCPeerConnectionFactory alloc] init]; + factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; videoTrack = [factory videoTrackWithSource:[factory videoSource] trackId:@"videoTrack"]; XCTAssertNotNil(videoTrack); factory = nil; @@ -263,20 +270,20 @@ XCTAssertTrue(true, "Expect test does not crash"); } -- (bool)negotiatePeerConnection:(RTCPeerConnection *)pc1 - withPeerConnection:(RTCPeerConnection *)pc2 +- (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1 + withPeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc2 negotiationTimeout:(NSTimeInterval)timeout { - __weak RTCPeerConnection *weakPC1 = pc1; - __weak RTCPeerConnection *weakPC2 = pc2; - RTCMediaConstraints *sdpConstraints = - [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{ + __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1; + __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2; + RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue } - optionalConstraints:nil]; + optionalConstraints:nil]; dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0); [weakPC1 offerForConstraints:sdpConstraints - completionHandler:^(RTCSessionDescription *offer, NSError *error) { + completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * offer, NSError * error) { XCTAssertNil(error); XCTAssertNotNil(offer); [weakPC1 @@ -289,8 +296,9 @@ XCTAssertNil(error); [weakPC2 answerForConstraints:sdpConstraints - completionHandler:^(RTCSessionDescription *answer, - NSError *error) { + completionHandler:^( + RTC_OBJC_TYPE(RTCSessionDescription) * answer, + NSError * error) { XCTAssertNil(error); XCTAssertNotNil(answer); [weakPC2 diff --git a/sdk/objc/unittests/RTCPeerConnectionTest.mm b/sdk/objc/unittests/RTCPeerConnectionTest.mm index 53fe27b932..e45ca93a6c 100644 --- a/sdk/objc/unittests/RTCPeerConnectionTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionTest.mm @@ -34,9 +34,10 @@ - (void)testConfigurationGetter { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -54,18 +55,21 @@ RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; config.activeResetSrtpParams = YES; - config.cryptoOptions = [[RTCCryptoOptions alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:YES - srtpEnableEncryptedRtpHeaderExtensions:NO - sframeRequireFrameEncryption:NO]; + config.cryptoOptions = + [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:YES + srtpEnableEncryptedRtpHeaderExtensions:NO + sframeRequireFrameEncryption:NO]; - RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; - RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - RTCConfiguration *newConfig; + RTC_OBJC_TYPE(RTCConfiguration) * newConfig; @autoreleasepool { - RTCPeerConnection *peerConnection = + RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; newConfig = peerConnection.configuration; @@ -78,8 +82,8 @@ } EXPECT_EQ([config.iceServers count], [newConfig.iceServers count]); - RTCIceServer *newServer = newConfig.iceServers[0]; - RTCIceServer *origServer = config.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0]; + RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0]; std::string origUrl = origServer.urlStrings.firstObject.UTF8String; std::string url = newServer.urlStrings.firstObject.UTF8String; EXPECT_EQ(origUrl, url); @@ -109,19 +113,22 @@ - (void)testWithDependencies { NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; - RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:urlStrings]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTCConfiguration *config = [[RTCConfiguration alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; - RTCMediaConstraints *contraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; - RTCPeerConnectionFactory *factory = [[RTCPeerConnectionFactory alloc] init]; + RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} + optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = + [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - RTCConfiguration *newConfig; + RTC_OBJC_TYPE(RTCConfiguration) * newConfig; std::unique_ptr pc_dependencies = std::make_unique(nullptr); @autoreleasepool { - RTCPeerConnection *peerConnection = + RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = [factory peerConnectionWithDependencies:config constraints:contraints dependencies:std::move(pc_dependencies) diff --git a/sdk/objc/unittests/RTCSessionDescriptionTest.mm b/sdk/objc/unittests/RTCSessionDescriptionTest.mm index 0807eedf3a..ee65649cbc 100644 --- a/sdk/objc/unittests/RTCSessionDescriptionTest.mm +++ b/sdk/objc/unittests/RTCSessionDescriptionTest.mm @@ -24,19 +24,18 @@ @implementation RTCSessionDescriptionTest /** - * Test conversion of an Objective-C RTCSessionDescription to a native + * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native * SessionDescriptionInterface (based on the types and SDP strings being equal). */ - (void)testSessionDescriptionConversion { - RTCSessionDescription *description = - [[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer - sdp:[self sdp]]; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]]; webrtc::SessionDescriptionInterface *nativeDescription = description.nativeDescription; EXPECT_EQ(RTCSdpTypeAnswer, - [RTCSessionDescription typeForStdString:nativeDescription->type()]); + [RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]); std::string sdp; nativeDescription->ToString(&sdp); @@ -51,11 +50,10 @@ [self sdp].stdString, nullptr); - RTCSessionDescription *description = - [[RTCSessionDescription alloc] initWithNativeDescription: - nativeDescription]; + RTC_OBJC_TYPE(RTCSessionDescription) *description = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithNativeDescription:nativeDescription]; EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer, - [RTCSessionDescription stdStringForType:description.type]); + [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]); EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]); } diff --git a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm index bd31a6eb0d..cc31f67b3c 100644 --- a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm @@ -13,6 +13,7 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" +#import "base/RTCMacros.h" #import "base/RTCVideoDecoder.h" #import "base/RTCVideoDecoderFactory.h" #include "media/base/codec.h" @@ -20,8 +21,8 @@ #include "modules/video_coding/include/video_error_codes.h" #include "rtc_base/gunit.h" -id CreateDecoderFactoryReturning(int return_code) { - id decoderMock = OCMProtocolMock(@protocol(RTCVideoDecoder)); +id CreateDecoderFactoryReturning(int return_code) { + id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder))); OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code); OCMStub([decoderMock decode:[OCMArg any] missingFrames:NO @@ -30,22 +31,24 @@ id CreateDecoderFactoryReturning(int return_code) { .andReturn(return_code); OCMStub([decoderMock releaseDecoder]).andReturn(return_code); - id decoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoDecoderFactory)); - RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil]; + id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory))); + RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil]; OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]); OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock); return decoderFactoryMock; } -id CreateOKDecoderFactory() { +id CreateOKDecoderFactory() { return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK); } -id CreateErrorDecoderFactory() { +id CreateErrorDecoderFactory() { return CreateDecoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR); } -std::unique_ptr GetObjCDecoder(id factory) { +std::unique_ptr GetObjCDecoder( + id factory) { webrtc::ObjCVideoDecoderFactory decoder_factory(factory); return decoder_factory.CreateVideoDecoder(webrtc::SdpVideoFormat(cricket::kH264CodecName)); } diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm index 452c81566c..728dc018e2 100644 --- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm @@ -25,8 +25,8 @@ #include "rtc_base/gunit.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -id CreateEncoderFactoryReturning(int return_code) { - id encoderMock = OCMProtocolMock(@protocol(RTCVideoEncoder)); +id CreateEncoderFactoryReturning(int return_code) { + id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder))); OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1]) .andReturn(return_code); OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]]) @@ -34,23 +34,25 @@ id CreateEncoderFactoryReturning(int return_code) { OCMStub([encoderMock releaseEncoder]).andReturn(return_code); OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code); - id encoderFactoryMock = OCMProtocolMock(@protocol(RTCVideoEncoderFactory)); - RTCVideoCodecInfo *supported = [[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil]; + id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory))); + RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil]; OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]); OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]); OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock); return encoderFactoryMock; } -id CreateOKEncoderFactory() { +id CreateOKEncoderFactory() { return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_OK); } -id CreateErrorEncoderFactory() { +id CreateErrorEncoderFactory() { return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR); } -std::unique_ptr GetObjCEncoder(id factory) { +std::unique_ptr GetObjCEncoder( + id factory) { webrtc::ObjCVideoEncoderFactory encoder_factory(factory); webrtc::SdpVideoFormat format("H264"); return encoder_factory.CreateVideoEncoder(format); @@ -83,7 +85,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsOKOnSuccess) { CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); rtc::scoped_refptr buffer = new rtc::RefCountedObject( - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]); + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) @@ -101,7 +103,7 @@ TEST(ObjCVideoEncoderFactoryTest, EncodeReturnsErrorOnFail) { CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); rtc::scoped_refptr buffer = new rtc::RefCountedObject( - [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixel_buffer]); + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) diff --git a/test/mac_capturer.mm b/test/mac_capturer.mm index 8d50a804f9..1f84c1bb96 100644 --- a/test/mac_capturer.mm +++ b/test/mac_capturer.mm @@ -15,14 +15,15 @@ #import "sdk/objc/native/api/video_capturer.h" #import "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCTestVideoSourceAdapter : NSObject +@interface RTCTestVideoSourceAdapter : NSObject @property(nonatomic) webrtc::test::MacCapturer *capturer; @end @implementation RTCTestVideoSourceAdapter @synthesize capturer = _capturer; -- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; rtc::scoped_refptr buffer = new rtc::RefCountedObject(frame.buffer); @@ -39,7 +40,7 @@ namespace { AVCaptureDeviceFormat *SelectClosestFormat(AVCaptureDevice *device, size_t width, size_t height) { NSArray *formats = - [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]; AVCaptureDeviceFormat *selectedFormat = nil; int currentDiff = INT_MAX; for (AVCaptureDeviceFormat *format in formats) { @@ -67,11 +68,12 @@ MacCapturer::MacCapturer(size_t width, adapter_ = (__bridge_retained void *)adapter; adapter.capturer = this; - RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:adapter]; + RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer = + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:adapter]; capturer_ = (__bridge_retained void *)capturer; AVCaptureDevice *device = - [[RTCCameraVideoCapturer captureDevices] objectAtIndex:capture_device_index]; + [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices] objectAtIndex:capture_device_index]; AVCaptureDeviceFormat *format = SelectClosestFormat(device, width, height); [capturer startCaptureWithDevice:device format:format fps:target_fps]; } @@ -87,7 +89,8 @@ void MacCapturer::Destroy() { #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wunused-variable" RTCTestVideoSourceAdapter *adapter = (__bridge_transfer RTCTestVideoSourceAdapter *)adapter_; - RTCCameraVideoCapturer *capturer = (__bridge_transfer RTCCameraVideoCapturer *)capturer_; + RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer = + (__bridge_transfer RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer_; [capturer stopCapture]; #pragma clang diagnostic pop }