diff --git a/sdk/objc/base/RTCLogging.h b/sdk/objc/base/RTCLogging.h index b10f97c1f5..3af7868a93 100644 --- a/sdk/objc/base/RTCLogging.h +++ b/sdk/objc/base/RTCLogging.h @@ -47,13 +47,17 @@ RTC_EXTERN NSString* RTCFileName(const char* filePath); RTCLogEx(severity, log_string); \ } while (false) -#define RTCLogVerbose(format, ...) RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__) +#define RTCLogVerbose(format, ...) \ + RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__) -#define RTCLogInfo(format, ...) RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__) +#define RTCLogInfo(format, ...) \ + RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__) -#define RTCLogWarning(format, ...) RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__) +#define RTCLogWarning(format, ...) \ + RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__) -#define RTCLogError(format, ...) RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__) +#define RTCLogError(format, ...) \ + RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__) #if !defined(NDEBUG) #define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__) diff --git a/sdk/objc/base/RTCMacros.h b/sdk/objc/base/RTCMacros.h index 7ab9005951..cb943b4bee 100644 --- a/sdk/objc/base/RTCMacros.h +++ b/sdk/objc/base/RTCMacros.h @@ -49,7 +49,8 @@ // Macro used internally to declare API types. Declaring an API type without // using this macro will not include the declared type in the set of types // that will be affected by the configurable RTC_OBJC_TYPE_PREFIX. -#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name) +#define RTC_OBJC_TYPE(type_name) \ + RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name) #if defined(__cplusplus) #define RTC_EXTERN extern "C" RTC_OBJC_EXPORT diff --git a/sdk/objc/base/RTCMutableI420Buffer.h b/sdk/objc/base/RTCMutableI420Buffer.h index cde721980b..7320b16ea2 100644 --- a/sdk/objc/base/RTCMutableI420Buffer.h +++ b/sdk/objc/base/RTCMutableI420Buffer.h @@ -18,6 +18,7 @@ NS_ASSUME_NONNULL_BEGIN /** Extension of the I420 buffer with mutable data access */ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE -(RTCMutableI420Buffer) @end +(RTCMutableI420Buffer) @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCVideoCapturer.h b/sdk/objc/base/RTCVideoCapturer.h index fe6c42d70d..4eafcfb643 100644 --- a/sdk/objc/base/RTCVideoCapturer.h +++ b/sdk/objc/base/RTCVideoCapturer.h @@ -19,7 +19,8 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCVideoCapturerDelegate) - - (void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame + (void)capturer + : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame; @end @@ -28,7 +29,8 @@ RTC_OBJC_EXPORT @property(nonatomic, weak) id delegate; -- (instancetype)initWithDelegate:(id)delegate; +- (instancetype)initWithDelegate: + (id)delegate; @end diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index e4c699fac1..0a9d4bf215 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -14,7 +14,8 @@ NS_ASSUME_NONNULL_BEGIN -/** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */ +/** Holds information to identify a codec. Corresponds to + * webrtc::SdpVideoFormat. */ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject @@ -23,11 +24,13 @@ RTC_OBJC_EXPORT - (instancetype)initWithName:(NSString *)name; - (instancetype)initWithName:(NSString *)name - parameters:(nullable NSDictionary *)parameters; + parameters:(nullable NSDictionary *) + parameters; - (instancetype)initWithName:(NSString *)name parameters:(NSDictionary *)parameters - scalabilityModes:(NSArray *)scalabilityModes NS_DESIGNATED_INITIALIZER; + scalabilityModes:(NSArray *)scalabilityModes + NS_DESIGNATED_INITIALIZER; - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; diff --git a/sdk/objc/base/RTCVideoDecoderFactory.h b/sdk/objc/base/RTCVideoDecoderFactory.h index 6b8e0a1cc4..9ce9856581 100644 --- a/sdk/objc/base/RTCVideoDecoderFactory.h +++ b/sdk/objc/base/RTCVideoDecoderFactory.h @@ -16,7 +16,8 @@ NS_ASSUME_NONNULL_BEGIN -/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory. +/** RTCVideoDecoderFactory is an Objective-C version of + * webrtc::VideoDecoderFactory. */ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE diff --git a/sdk/objc/base/RTCVideoEncoder.h b/sdk/objc/base/RTCVideoEncoder.h index 632c248eb3..82de79c4ff 100644 --- a/sdk/objc/base/RTCVideoEncoder.h +++ b/sdk/objc/base/RTCVideoEncoder.h @@ -20,8 +20,9 @@ NS_ASSUME_NONNULL_BEGIN /** Callback block for encoder. */ -typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame, - id info); +typedef BOOL (^RTCVideoEncoderCallback)( + RTC_OBJC_TYPE(RTCEncodedImage) * frame, + id info); /** Protocol for encoder implementations. */ RTC_OBJC_EXPORT @@ -29,7 +30,8 @@ RTC_OBJC_EXPORT (RTCVideoEncoder) - (void)setCallback : (nullable RTCVideoEncoderCallback)callback; -- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings +- (NSInteger)startEncodeWithSettings: + (RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores; - (NSInteger)releaseEncoder; - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame @@ -38,20 +40,21 @@ RTC_OBJC_EXPORT - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate; - (NSString *)implementationName; -/** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to - * keep the QP from the encoded images within the given range. Returning nil from this function - * disables quality scaling. */ +/** Returns QP scaling settings for encoder. The quality scaler adjusts the + * resolution in order to keep the QP from the encoded images within the given + * range. Returning nil from this function disables quality scaling. */ - (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings; /** Resolutions should be aligned to this value. */ @property(nonatomic, readonly) NSInteger resolutionAlignment; -/** If enabled, resolution alignment is applied to all simulcast layers simultaneously so that when - scaled, all resolutions comply with 'resolutionAlignment'. */ +/** If enabled, resolution alignment is applied to all simulcast layers + simultaneously so that when scaled, all resolutions comply with + 'resolutionAlignment'. */ @property(nonatomic, readonly) BOOL applyAlignmentToAllSimulcastLayers; -/** If YES, the receiver is expected to resample/scale the source texture to the expected output - size. */ +/** If YES, the receiver is expected to resample/scale the source texture to the + expected output size. */ @property(nonatomic, readonly) BOOL supportsNativeHandle; @end diff --git a/sdk/objc/base/RTCVideoEncoderFactory.h b/sdk/objc/base/RTCVideoEncoderFactory.h index 3a102c87b5..d3146e10f3 100644 --- a/sdk/objc/base/RTCVideoEncoderFactory.h +++ b/sdk/objc/base/RTCVideoEncoderFactory.h @@ -23,12 +23,15 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCVideoEncoderSelector) - - (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; -- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate; + - (void)registerCurrentEncoderInfo + : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; +- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate: + (NSInteger)bitrate; - (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder; @optional -- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForResolutionChangeBySize:(CGSize)size; +- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForResolutionChangeBySize: + (CGSize)size; @end @@ -41,14 +44,16 @@ RTC_OBJC_EXPORT - (instancetype)initWithSupported:(bool)isSupported; - (instancetype)initWithSupported:(bool)isSupported - isPowerEfficient:(bool)isPowerEfficient NS_DESIGNATED_INITIALIZER; + isPowerEfficient:(bool)isPowerEfficient + NS_DESIGNATED_INITIALIZER; @property(nonatomic, readonly) bool isSupported; @property(nonatomic, readonly) bool isPowerEfficient; @end -/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory. +/** RTCVideoEncoderFactory is an Objective-C version of + * webrtc::VideoEncoderFactory. */ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE @@ -62,7 +67,8 @@ RTC_OBJC_EXPORT @optional - (NSArray *)implementations; - (nullable id)encoderSelector; -/* TODO: b/299588022 - move to non-optional section when implemented by all derived classes. */ +/* TODO: b/299588022 - move to non-optional section when implemented by all + * derived classes. */ - (RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) *) queryCodecSupport:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info scalabilityMode:(nullable NSString *)scalabilityMode; diff --git a/sdk/objc/base/RTCVideoEncoderFactory.mm b/sdk/objc/base/RTCVideoEncoderFactory.mm index 8cf03a7c9c..75ebde149b 100644 --- a/sdk/objc/base/RTCVideoEncoderFactory.mm +++ b/sdk/objc/base/RTCVideoEncoderFactory.mm @@ -20,7 +20,8 @@ return [self initWithSupported:isSupported isPowerEfficient:false]; } -- (instancetype)initWithSupported:(bool)isSupported isPowerEfficient:(bool)isPowerEfficient { +- (instancetype)initWithSupported:(bool)isSupported + isPowerEfficient:(bool)isPowerEfficient { self = [super init]; if (self) { _isSupported = isSupported; diff --git a/sdk/objc/base/RTCVideoEncoderQpThresholds.h b/sdk/objc/base/RTCVideoEncoderQpThresholds.h index 2963824cfc..02cc9224e2 100644 --- a/sdk/objc/base/RTCVideoEncoderQpThresholds.h +++ b/sdk/objc/base/RTCVideoEncoderQpThresholds.h @@ -14,7 +14,8 @@ NS_ASSUME_NONNULL_BEGIN -/** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */ +/** QP thresholds for encoder. Corresponds to + * webrtc::VideoEncoder::QpThresholds. */ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject diff --git a/sdk/objc/base/RTCVideoFrame.h b/sdk/objc/base/RTCVideoFrame.h index 5e8ee143a1..0f87dd8f6d 100644 --- a/sdk/objc/base/RTCVideoFrame.h +++ b/sdk/objc/base/RTCVideoFrame.h @@ -49,7 +49,8 @@ RTC_OBJC_EXPORT /** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. */ -- (instancetype)initWithBuffer:(id)frameBuffer +- (instancetype)initWithBuffer: + (id)frameBuffer rotation:(RTCVideoRotation)rotation timeStampNs:(int64_t)timeStampNs; diff --git a/sdk/objc/base/RTCVideoRenderer.h b/sdk/objc/base/RTCVideoRenderer.h index 6b3c9214f1..71edee7b0e 100644 --- a/sdk/objc/base/RTCVideoRenderer.h +++ b/sdk/objc/base/RTCVideoRenderer.h @@ -35,7 +35,8 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCVideoViewDelegate) - - (void)videoView : (id)videoView didChangeVideoSize + - (void)videoView + : (id)videoView didChangeVideoSize : (CGSize)size; @end diff --git a/sdk/objc/helpers/AVCaptureSession+DevicePosition.h b/sdk/objc/helpers/AVCaptureSession+DevicePosition.h index 32ab6877f0..2a990f3581 100644 --- a/sdk/objc/helpers/AVCaptureSession+DevicePosition.h +++ b/sdk/objc/helpers/AVCaptureSession+DevicePosition.h @@ -16,7 +16,8 @@ NS_ASSUME_NONNULL_BEGIN @interface AVCaptureSession (DevicePosition) // Check the image's EXIF for the camera the image came from. -+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer; ++ (AVCaptureDevicePosition)devicePositionForSampleBuffer: + (CMSampleBufferRef)sampleBuffer; @end diff --git a/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm b/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm index 0814ecc6c5..527827cc94 100644 --- a/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm +++ b/sdk/objc/helpers/AVCaptureSession+DevicePosition.mm @@ -20,7 +20,8 @@ BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) { @implementation AVCaptureSession (DevicePosition) -+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer { ++ (AVCaptureDevicePosition)devicePositionForSampleBuffer: + (CMSampleBufferRef)sampleBuffer { // Check the image's EXIF for the camera the image came from. AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified; CFDictionaryRef attachments = CMCopyDictionaryOfAttachments( @@ -29,8 +30,9 @@ BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) { int size = CFDictionaryGetCount(attachments); if (size > 0) { CFDictionaryRef cfExifDictVal = nil; - if (CFDictionaryGetValueIfPresent( - attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) { + if (CFDictionaryGetValueIfPresent(attachments, + (const void *)CFSTR("{Exif}"), + (const void **)&cfExifDictVal)) { CFStringRef cfLensModelStrVal; if (CFDictionaryGetValueIfPresent(cfExifDictVal, (const void *)CFSTR("LensModel"), diff --git a/sdk/objc/helpers/NSString+StdString.mm b/sdk/objc/helpers/NSString+StdString.mm index c98432c445..5c2d208b9c 100644 --- a/sdk/objc/helpers/NSString+StdString.mm +++ b/sdk/objc/helpers/NSString+StdString.mm @@ -24,7 +24,7 @@ charData.length); } -+ (NSString *)stringForStdString:(const std::string&)stdString { ++ (NSString *)stringForStdString:(const std::string &)stdString { // std::string may contain null termination character so we construct // using length. return [[NSString alloc] initWithBytes:stdString.data() diff --git a/sdk/objc/helpers/RTCDispatcher+Private.h b/sdk/objc/helpers/RTCDispatcher+Private.h index 195c651790..da286c6467 100644 --- a/sdk/objc/helpers/RTCDispatcher+Private.h +++ b/sdk/objc/helpers/RTCDispatcher+Private.h @@ -13,6 +13,7 @@ @interface RTC_OBJC_TYPE (RTCDispatcher) () - + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType; + + (dispatch_queue_t)dispatchQueueForType + : (RTCDispatcherQueueType)dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.h b/sdk/objc/helpers/RTCDispatcher.h index 96e091a594..f6afe24243 100644 --- a/sdk/objc/helpers/RTCDispatcher.h +++ b/sdk/objc/helpers/RTCDispatcher.h @@ -36,7 +36,8 @@ RTC_OBJC_EXPORT * @param dispatchType The queue type to dispatch on. * @param block The block to dispatch asynchronously. */ -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType block:(dispatch_block_t)block; ++ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType + block:(dispatch_block_t)block; /** Returns YES if run on queue for the dispatchType otherwise NO. * Useful for asserting that a method is run on a correct queue. diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm index 86105c6969..0c9af722bd 100644 --- a/sdk/objc/native/api/audio_device_module.mm +++ b/sdk/objc/native/api/audio_device_module.mm @@ -17,24 +17,30 @@ namespace webrtc { -rtc::scoped_refptr CreateAudioDeviceModule(bool bypass_voice_processing) { +rtc::scoped_refptr CreateAudioDeviceModule( + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; #if defined(WEBRTC_IOS) - return rtc::make_ref_counted(bypass_voice_processing, nullptr); + return rtc::make_ref_counted( + bypass_voice_processing, nullptr); #else - RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!"; + RTC_LOG(LS_ERROR) + << "current platform is not supported => this module will self destruct!"; return nullptr; #endif } rtc::scoped_refptr CreateMutedDetectAudioDeviceModule( - AudioDeviceModule::MutedSpeechEventHandler handler, bool bypass_voice_processing) { + AudioDeviceModule::MutedSpeechEventHandler handler, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; #if defined(WEBRTC_IOS) - return rtc::make_ref_counted(bypass_voice_processing, handler); + return rtc::make_ref_counted( + bypass_voice_processing, handler); #else - RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!"; + RTC_LOG(LS_ERROR) + << "current platform is not supported => this module will self destruct!"; return nullptr; #endif } -} +} // namespace webrtc diff --git a/sdk/objc/native/api/network_monitor_factory.mm b/sdk/objc/native/api/network_monitor_factory.mm index acde634b1d..a2f23f9390 100644 --- a/sdk/objc/native/api/network_monitor_factory.mm +++ b/sdk/objc/native/api/network_monitor_factory.mm @@ -27,4 +27,4 @@ std::unique_ptr CreateNetworkMonitorFactory() { #endif } -} +} // namespace webrtc diff --git a/sdk/objc/native/api/ssl_certificate_verifier.mm b/sdk/objc/native/api/ssl_certificate_verifier.mm index 4437402b9c..4bef82dc9a 100644 --- a/sdk/objc/native/api/ssl_certificate_verifier.mm +++ b/sdk/objc/native/api/ssl_certificate_verifier.mm @@ -26,8 +26,9 @@ class SSLCertificateVerifierAdapter final : public rtc::SSLCertificateVerifier { @autoreleasepool { rtc::Buffer der_buffer; certificate.ToDER(&der_buffer); - NSData* serialized_certificate = [[NSData alloc] initWithBytes:der_buffer.data() - length:der_buffer.size()]; + NSData* serialized_certificate = + [[NSData alloc] initWithBytes:der_buffer.data() + length:der_buffer.size()]; return [objc_certificate_verifier_ verify:serialized_certificate]; } } @@ -36,13 +37,14 @@ class SSLCertificateVerifierAdapter final : public rtc::SSLCertificateVerifier { id objc_certificate_verifier_; }; -} +} // namespace namespace webrtc { std::unique_ptr ObjCToNativeCertificateVerifier( id objc_certificate_verifier) { - return std::make_unique(objc_certificate_verifier); + return std::make_unique( + objc_certificate_verifier); } } // namespace webrtc diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index bb06b806b1..49734a2ec9 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -123,7 +123,8 @@ AudioDeviceIOS::AudioDeviceIOS( io_thread_checker_.Detach(); thread_ = rtc::Thread::Current(); - audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; + audio_session_observer_ = + [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; mach_timebase_info_data_t tinfo; mach_timebase_info(&tinfo); machTickUnitsToNanoseconds_ = (double)tinfo.numer / tinfo.denom; @@ -240,12 +241,15 @@ int32_t AudioDeviceIOS::StartPlayout() { if (fine_audio_buffer_) { fine_audio_buffer_->ResetPlayout(); } - if (!recording_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + if (!recording_.load() && + audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { OSStatus result = audio_unit_->Start(); if (result != noErr) { - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session notifyAudioUnitStartFailedWithError:result]; - RTCLogError(@"StartPlayout failed to start audio unit, reason %d", result); + RTCLogError(@"StartPlayout failed to start audio unit, reason %d", + result); return -1; } RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started"; @@ -279,8 +283,9 @@ int32_t AudioDeviceIOS::StopPlayout() { average_number_of_playout_callbacks_between_glitches = num_playout_callbacks_ / num_detected_playout_glitches; } - RTC_HISTOGRAM_COUNTS_100000("WebRTC.Audio.AveragePlayoutCallbacksBetweenGlitches", - average_number_of_playout_callbacks_between_glitches); + RTC_HISTOGRAM_COUNTS_100000( + "WebRTC.Audio.AveragePlayoutCallbacksBetweenGlitches", + average_number_of_playout_callbacks_between_glitches); RTCLog(@"Average number of playout callbacks between glitches: %d", average_number_of_playout_callbacks_between_glitches); return 0; @@ -299,12 +304,15 @@ int32_t AudioDeviceIOS::StartRecording() { if (fine_audio_buffer_) { fine_audio_buffer_->ResetRecord(); } - if (!playing_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + if (!playing_.load() && + audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { OSStatus result = audio_unit_->Start(); if (result != noErr) { - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session notifyAudioUnitStartFailedWithError:result]; - RTCLogError(@"StartRecording failed to start audio unit, reason %d", result); + RTCLogError(@"StartRecording failed to start audio unit, reason %d", + result); return -1; } RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started"; @@ -372,8 +380,9 @@ void AudioDeviceIOS::OnValidRouteChange() { void AudioDeviceIOS::OnCanPlayOrRecordChange(bool can_play_or_record) { RTC_DCHECK(thread_); - thread_->PostTask(SafeTask( - safety_, [this, can_play_or_record] { HandleCanPlayOrRecordChange(can_play_or_record); })); + thread_->PostTask(SafeTask(safety_, [this, can_play_or_record] { + HandleCanPlayOrRecordChange(can_play_or_record); + })); } void AudioDeviceIOS::OnChangedOutputVolume() { @@ -381,11 +390,12 @@ void AudioDeviceIOS::OnChangedOutputVolume() { thread_->PostTask(SafeTask(safety_, [this] { HandleOutputVolumeChange(); })); } -OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags, - const AudioTimeStamp* time_stamp, - UInt32 bus_number, - UInt32 num_frames, - AudioBufferList* /* io_data */) { +OSStatus AudioDeviceIOS::OnDeliverRecordedData( + AudioUnitRenderActionFlags* flags, + const AudioTimeStamp* time_stamp, + UInt32 bus_number, + UInt32 num_frames, + AudioBufferList* /* io_data */) { RTC_DCHECK_RUN_ON(&io_thread_checker_); OSStatus result = noErr; // Simply return if recording is not enabled. @@ -400,7 +410,8 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags // Get audio timestamp for the audio. // The timestamp will not have NTP time epoch, but that will be addressed by // the TimeStampAligner in AudioDeviceBuffer::SetRecordedBuffer(). - SInt64 capture_timestamp_ns = time_stamp->mHostTime * machTickUnitsToNanoseconds_; + SInt64 capture_timestamp_ns = + time_stamp->mHostTime * machTickUnitsToNanoseconds_; // Allocate AudioBuffers to be used as storage for the received audio. // The AudioBufferList structure works as a placeholder for the @@ -421,7 +432,8 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags // We can make the audio unit provide a buffer instead in io_data, but we // currently just use our own. // TODO(henrika): should error handling be improved? - result = audio_unit_->Render(flags, time_stamp, bus_number, num_frames, &audio_buffer_list); + result = audio_unit_->Render( + flags, time_stamp, bus_number, num_frames, &audio_buffer_list); if (result != noErr) { RTCLogError(@"Failed to render audio."); return result; @@ -450,7 +462,8 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags, // activated. if (!playing_.load(std::memory_order_acquire)) { const size_t size_in_bytes = audio_buffer->mDataByteSize; - RTC_CHECK_EQ(size_in_bytes / VoiceProcessingAudioUnit::kBytesPerSample, num_frames); + RTC_CHECK_EQ(size_in_bytes / VoiceProcessingAudioUnit::kBytesPerSample, + num_frames); *flags |= kAudioUnitRenderAction_OutputIsSilence; memset(static_cast(audio_buffer->mData), 0, size_in_bytes); return noErr; @@ -464,7 +477,8 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags, const int64_t now_time = rtc::TimeMillis(); if (time_stamp->mSampleTime != num_frames) { const int64_t delta_time = now_time - last_playout_time_; - const int glitch_threshold = 1.6 * playout_parameters_.GetBufferSizeInMilliseconds(); + const int glitch_threshold = + 1.6 * playout_parameters_.GetBufferSizeInMilliseconds(); if (delta_time > glitch_threshold) { RTCLogWarning(@"Possible playout audio glitch detected.\n" " Time since last OnGetPlayoutData was %lld ms.\n", @@ -493,9 +507,11 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags, if (last_hw_output_latency_update_sample_count_ >= playout_parameters_.sample_rate() * kHwLatencyUpdatePeriodSeconds) { - // We update the hardware output latency every kHwLatencyUpdatePeriodSeconds seconds. - hw_output_latency_.store([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].outputLatency, - std::memory_order_relaxed); + // We update the hardware output latency every kHwLatencyUpdatePeriodSeconds + // seconds. + hw_output_latency_.store( + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].outputLatency, + std::memory_order_relaxed); last_hw_output_latency_update_sample_count_ = 0; } double output_latency_ = hw_output_latency_.load(std::memory_order_relaxed) + @@ -505,19 +521,22 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags, // the native I/O audio unit) and copy the result to the audio buffer in the // `io_data` destination. fine_audio_buffer_->GetPlayoutData( - rtc::ArrayView(static_cast(audio_buffer->mData), num_frames), + rtc::ArrayView(static_cast(audio_buffer->mData), + num_frames), playout_delay_ms); last_hw_output_latency_update_sample_count_ += num_frames; total_playout_samples_count_.fetch_add(num_frames, std::memory_order_relaxed); total_playout_samples_duration_ms_.fetch_add( - num_frames * 1000 / playout_parameters_.sample_rate(), std::memory_order_relaxed); + num_frames * 1000 / playout_parameters_.sample_rate(), + std::memory_order_relaxed); total_playout_delay_ms_.fetch_add(output_latency_ * kSecondToMs * num_frames, std::memory_order_relaxed); return noErr; } -void AudioDeviceIOS::OnReceivedMutedSpeechActivity(AUVoiceIOSpeechActivityEvent event) { +void AudioDeviceIOS::OnReceivedMutedSpeechActivity( + AUVoiceIOSpeechActivityEvent event) { RTCLog(@"Received muted speech activity %d.", event); if (muted_speech_event_handler_ != 0) { if (event == kAUVoiceIOSpeechActivityHasStarted) { @@ -530,8 +549,10 @@ void AudioDeviceIOS::OnReceivedMutedSpeechActivity(AUVoiceIOSpeechActivityEvent void AudioDeviceIOS::HandleInterruptionBegin() { RTC_DCHECK_RUN_ON(thread_); - RTCLog(@"Interruption begin. IsInterrupted changed from %d to 1.", is_interrupted_); - if (audio_unit_ && audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + RTCLog(@"Interruption begin. IsInterrupted changed from %d to 1.", + is_interrupted_); + if (audio_unit_ && + audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { RTCLog(@"Stopping the audio unit due to interruption begin."); if (!audio_unit_->Stop()) { RTCLogError(@"Failed to stop the audio unit for interruption begin."); @@ -549,8 +570,8 @@ void AudioDeviceIOS::HandleInterruptionEnd() { is_interrupted_ = false; if (!audio_unit_) return; if (webrtc::field_trial::IsEnabled("WebRTC-Audio-iOS-Holding")) { - // Work around an issue where audio does not restart properly after an interruption - // by restarting the audio unit when the interruption ends. + // Work around an issue where audio does not restart properly after an + // interruption by restarting the audio unit when the interruption ends. if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { audio_unit_->Stop(); PrepareForNewStart(); @@ -561,12 +582,14 @@ void AudioDeviceIOS::HandleInterruptionEnd() { // Allocate new buffers given the potentially new stream format. SetupAudioBuffersForActiveAudioSession(); } - UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord); + UpdateAudioUnit( + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord); } void AudioDeviceIOS::HandleValidRouteChange() { RTC_DCHECK_RUN_ON(thread_); - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; RTCLog(@"%@", session); HandleSampleRateChange(); } @@ -588,19 +611,22 @@ void AudioDeviceIOS::HandleSampleRateChange() { // If we don't have an audio unit yet, or the audio unit is uninitialized, // there is no work to do. - if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { + if (!audio_unit_ || + audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { return; } // The audio unit is already initialized or started. // Check to see if the sample rate or buffer size has changed. - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; const double new_sample_rate = session.sampleRate; const NSTimeInterval session_buffer_duration = session.IOBufferDuration; const size_t new_frames_per_buffer = static_cast(new_sample_rate * session_buffer_duration + .5); const double current_sample_rate = playout_parameters_.sample_rate(); - const size_t current_frames_per_buffer = playout_parameters_.frames_per_buffer(); + const size_t current_frames_per_buffer = + playout_parameters_.frames_per_buffer(); RTCLog(@"Handling playout sample rate change:\n" " Session sample rate: %f frames_per_buffer: %lu\n" " ADM sample rate: %f frames_per_buffer: %lu", @@ -652,7 +678,8 @@ void AudioDeviceIOS::HandleSampleRateChange() { if (restart_audio_unit) { OSStatus result = audio_unit_->Start(); if (result != noErr) { - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session notifyAudioUnitStartFailedWithError:result]; RTCLogError(@"Failed to start audio unit with sample rate: %d, reason %d", playout_parameters_.sample_rate(), @@ -679,12 +706,15 @@ void AudioDeviceIOS::HandlePlayoutGlitchDetected(uint64_t glitch_duration_ms) { return; } num_detected_playout_glitches_++; - total_playout_glitches_duration_ms_.fetch_add(glitch_duration_ms, std::memory_order_relaxed); - uint64_t glitch_count = num_detected_playout_glitches_.load(std::memory_order_acquire); + total_playout_glitches_duration_ms_.fetch_add(glitch_duration_ms, + std::memory_order_relaxed); + uint64_t glitch_count = + num_detected_playout_glitches_.load(std::memory_order_acquire); RTCLog(@"Number of detected playout glitches: %lld", glitch_count); dispatch_async(dispatch_get_main_queue(), ^{ - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session notifyDidDetectPlayoutGlitch:glitch_count]; }); } @@ -709,14 +739,16 @@ void AudioDeviceIOS::UpdateAudioDeviceBuffer() { // Inform the audio device buffer (ADB) about the new audio format. audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate()); audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels()); - audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate()); + audio_device_buffer_->SetRecordingSampleRate( + record_parameters_.sample_rate()); audio_device_buffer_->SetRecordingChannels(record_parameters_.channels()); } void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { LOGI() << "SetupAudioBuffersForActiveAudioSession"; // Verify the current values once the audio session has been activated. - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; double sample_rate = session.sampleRate; NSTimeInterval io_buffer_duration = session.IOBufferDuration; RTCLog(@"%@", session); @@ -738,7 +770,8 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { if (sample_rate <= DBL_EPSILON && playout_parameters_.sample_rate() > 0) { RTCLogError(@"Reported rate is invalid: %f. " "Using %d as sample rate instead.", - sample_rate, playout_parameters_.sample_rate()); + sample_rate, + playout_parameters_.sample_rate()); sample_rate = playout_parameters_.sample_rate(); } @@ -747,13 +780,18 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { // number of audio frames. // Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz. // Hence, 128 is the size we expect to see in upcoming render callbacks. - playout_parameters_.reset(sample_rate, playout_parameters_.channels(), io_buffer_duration); + playout_parameters_.reset( + sample_rate, playout_parameters_.channels(), io_buffer_duration); RTC_DCHECK(playout_parameters_.is_complete()); - record_parameters_.reset(sample_rate, record_parameters_.channels(), io_buffer_duration); + record_parameters_.reset( + sample_rate, record_parameters_.channels(), io_buffer_duration); RTC_DCHECK(record_parameters_.is_complete()); - RTC_LOG(LS_INFO) << " frames per I/O buffer: " << playout_parameters_.frames_per_buffer(); - RTC_LOG(LS_INFO) << " bytes per I/O buffer: " << playout_parameters_.GetBytesPerBuffer(); - RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(), record_parameters_.GetBytesPerBuffer()); + RTC_LOG(LS_INFO) << " frames per I/O buffer: " + << playout_parameters_.frames_per_buffer(); + RTC_LOG(LS_INFO) << " bytes per I/O buffer: " + << playout_parameters_.GetBytesPerBuffer(); + RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(), + record_parameters_.GetBytesPerBuffer()); // Update the ADB parameters since the sample rate might have changed. UpdateAudioDeviceBuffer(); @@ -768,8 +806,8 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() { bool AudioDeviceIOS::CreateAudioUnit() { RTC_DCHECK(!audio_unit_); BOOL detect_mute_speech_ = (muted_speech_event_handler_ != 0); - audio_unit_.reset( - new VoiceProcessingAudioUnit(bypass_voice_processing_, detect_mute_speech_, this)); + audio_unit_.reset(new VoiceProcessingAudioUnit( + bypass_voice_processing_, detect_mute_speech_, this)); if (!audio_unit_->Init()) { audio_unit_.reset(); return false; @@ -809,12 +847,13 @@ void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) { case VoiceProcessingAudioUnit::kUninitialized: RTCLog(@"VPAU state: Uninitialized"); should_initialize_audio_unit = can_play_or_record; - should_start_audio_unit = - should_initialize_audio_unit && (playing_.load() || recording_.load()); + should_start_audio_unit = should_initialize_audio_unit && + (playing_.load() || recording_.load()); break; case VoiceProcessingAudioUnit::kInitialized: RTCLog(@"VPAU state: Initialized"); - should_start_audio_unit = can_play_or_record && (playing_.load() || recording_.load()); + should_start_audio_unit = + can_play_or_record && (playing_.load() || recording_.load()); should_uninitialize_audio_unit = !can_play_or_record; break; case VoiceProcessingAudioUnit::kStarted: @@ -838,7 +877,8 @@ void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) { if (should_start_audio_unit) { RTCLog(@"Starting audio unit for UpdateAudioUnit"); // Log session settings before trying to start audio streaming. - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; RTCLog(@"%@", session); OSStatus result = audio_unit_->Start(); if (result != noErr) { @@ -872,7 +912,8 @@ bool AudioDeviceIOS::ConfigureAudioSession() { RTCLogWarning(@"Audio session already configured."); return false; } - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; bool success = [session configureWebRTCSession:nil]; [session unlockForConfiguration]; @@ -892,7 +933,8 @@ bool AudioDeviceIOS::ConfigureAudioSessionLocked() { RTCLogWarning(@"Audio session already configured."); return false; } - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; bool success = [session configureWebRTCSession:nil]; if (success) { has_configured_session_ = true; @@ -910,7 +952,8 @@ void AudioDeviceIOS::UnconfigureAudioSession() { RTCLogWarning(@"Audio session already unconfigured."); return; } - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session lockForConfiguration]; [session unconfigureWebRTCSession:nil]; [session endWebRTCSession:nil]; @@ -928,7 +971,8 @@ bool AudioDeviceIOS::InitPlayOrRecord() { return false; } - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; // Subscribe to audio session events. [session pushDelegate:audio_session_observer_]; is_interrupted_ = session.isInterrupted ? true : false; @@ -938,7 +982,8 @@ bool AudioDeviceIOS::InitPlayOrRecord() { NSError* error = nil; if (![session beginWebRTCSession:&error]) { [session unlockForConfiguration]; - RTCLogError(@"Failed to begin WebRTC session: %@", error.localizedDescription); + RTCLogError(@"Failed to begin WebRTC session: %@", + error.localizedDescription); audio_unit_.reset(); return false; } @@ -978,7 +1023,8 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() { io_thread_checker_.Detach(); // Remove audio session notification observers. - RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession)* session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session removeDelegate:audio_session_observer_]; // All I/O should be stopped or paused prior to deactivating the audio @@ -1001,7 +1047,8 @@ bool AudioDeviceIOS::IsInterrupted() { #pragma mark - Not Implemented -int32_t AudioDeviceIOS::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const { +int32_t AudioDeviceIOS::ActiveAudioLayer( + AudioDeviceModule::AudioLayer& audioLayer) const { audioLayer = AudioDeviceModule::kPlatformDefaultAudio; return 0; } @@ -1091,7 +1138,8 @@ int32_t AudioDeviceIOS::MicrophoneMuteIsAvailable(bool& available) { int32_t AudioDeviceIOS::SetMicrophoneMute(bool enable) { // Set microphone mute only if the audio unit is started. - if (audio_unit_ && audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + if (audio_unit_ && + audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { BOOL result = audio_unit_->SetMicrophoneMute(enable); if (!result) { RTCLogError(@"Set microphone %s failed.", enable ? "mute" : "unmute"); @@ -1180,7 +1228,8 @@ int32_t AudioDeviceIOS::SetRecordingDevice(uint16_t index) { return 0; } -int32_t AudioDeviceIOS::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType) { +int32_t AudioDeviceIOS::SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType) { RTC_DCHECK_NOTREACHED() << "Not implemented"; return -1; } @@ -1196,14 +1245,16 @@ int32_t AudioDeviceIOS::RecordingIsAvailable(bool& available) { } std::optional AudioDeviceIOS::GetStats() const { - const uint64_t total_samples_count = total_playout_samples_count_.load(std::memory_order_acquire); + const uint64_t total_samples_count = + total_playout_samples_count_.load(std::memory_order_acquire); AudioDeviceModule::Stats playout_stats = { - .synthesized_samples_duration_s = - kMsToSecond * total_playout_glitches_duration_ms_.load(std::memory_order_acquire), - .synthesized_samples_events = num_detected_playout_glitches_.load(std::memory_order_acquire), - .total_samples_duration_s = - kMsToSecond * total_playout_samples_duration_ms_.load(std::memory_order_acquire), + .synthesized_samples_duration_s = kMsToSecond * + total_playout_glitches_duration_ms_.load(std::memory_order_acquire), + .synthesized_samples_events = + num_detected_playout_glitches_.load(std::memory_order_acquire), + .total_samples_duration_s = kMsToSecond * + total_playout_samples_duration_ms_.load(std::memory_order_acquire), .total_playout_delay_s = kMsToSecond * total_playout_delay_ms_.load(std::memory_order_acquire), .total_samples_count = total_samples_count, diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.mm b/sdk/objc/native/src/audio/audio_device_module_ios.mm index 7940a3dc37..ed6e6633fc 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_module_ios.mm @@ -39,8 +39,9 @@ namespace webrtc { namespace ios_adm { -AudioDeviceModuleIOS::AudioDeviceModuleIOS(bool bypass_voice_processing, - MutedSpeechEventHandler muted_speech_event_handler) +AudioDeviceModuleIOS::AudioDeviceModuleIOS( + bool bypass_voice_processing, + MutedSpeechEventHandler muted_speech_event_handler) : bypass_voice_processing_(bypass_voice_processing), muted_speech_event_handler_(muted_speech_event_handler), task_queue_factory_(CreateDefaultTaskQueueFactory()) { @@ -48,632 +49,633 @@ AudioDeviceModuleIOS::AudioDeviceModuleIOS(bool bypass_voice_processing, RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized."; } - int32_t AudioDeviceModuleIOS::AttachAudioBuffer() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - audio_device_->AttachAudioBuffer(audio_device_buffer_.get()); - return 0; - } +int32_t AudioDeviceModuleIOS::AttachAudioBuffer() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + audio_device_->AttachAudioBuffer(audio_device_buffer_.get()); + return 0; +} - AudioDeviceModuleIOS::~AudioDeviceModuleIOS() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - } +AudioDeviceModuleIOS::~AudioDeviceModuleIOS() { + RTC_DLOG(LS_INFO) << __FUNCTION__; +} - int32_t AudioDeviceModuleIOS::ActiveAudioLayer(AudioLayer* audioLayer) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - AudioLayer activeAudio; - if (audio_device_->ActiveAudioLayer(activeAudio) == -1) { - return -1; - } - *audioLayer = activeAudio; - return 0; - } - - int32_t AudioDeviceModuleIOS::Init() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - if (initialized_) - return 0; - - audio_device_buffer_.reset(new webrtc::AudioDeviceBuffer(task_queue_factory_.get())); - audio_device_.reset( - new ios_adm::AudioDeviceIOS(bypass_voice_processing_, muted_speech_event_handler_)); - RTC_CHECK(audio_device_); - - this->AttachAudioBuffer(); - - AudioDeviceGeneric::InitStatus status = audio_device_->Init(); - RTC_HISTOGRAM_ENUMERATION( - "WebRTC.Audio.InitializationResult", static_cast(status), - static_cast(AudioDeviceGeneric::InitStatus::NUM_STATUSES)); - if (status != AudioDeviceGeneric::InitStatus::OK) { - RTC_LOG(LS_ERROR) << "Audio device initialization failed."; - return -1; - } - initialized_ = true; - return 0; - } - - int32_t AudioDeviceModuleIOS::Terminate() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - if (!initialized_) - return 0; - if (audio_device_->Terminate() == -1) { - return -1; - } - initialized_ = false; - return 0; - } - - bool AudioDeviceModuleIOS::Initialized() const { - RTC_DLOG(LS_INFO) << __FUNCTION__ << ": " << initialized_; - return initialized_; - } - - int32_t AudioDeviceModuleIOS::InitSpeaker() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - return audio_device_->InitSpeaker(); - } - - int32_t AudioDeviceModuleIOS::InitMicrophone() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - return audio_device_->InitMicrophone(); - } - - int32_t AudioDeviceModuleIOS::SpeakerVolumeIsAvailable(bool* available) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->SpeakerVolumeIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } - - int32_t AudioDeviceModuleIOS::SetSpeakerVolume(uint32_t volume) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")"; - CHECKinitialized_(); - return audio_device_->SetSpeakerVolume(volume); - } - - int32_t AudioDeviceModuleIOS::SpeakerVolume(uint32_t* volume) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - uint32_t level = 0; - if (audio_device_->SpeakerVolume(level) == -1) { - return -1; - } - *volume = level; - RTC_DLOG(LS_INFO) << "output: " << *volume; - return 0; - } - - bool AudioDeviceModuleIOS::SpeakerIsInitialized() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - bool isInitialized = audio_device_->SpeakerIsInitialized(); - RTC_DLOG(LS_INFO) << "output: " << isInitialized; - return isInitialized; - } - - bool AudioDeviceModuleIOS::MicrophoneIsInitialized() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - bool isInitialized = audio_device_->MicrophoneIsInitialized(); - RTC_DLOG(LS_INFO) << "output: " << isInitialized; - return isInitialized; - } - - int32_t AudioDeviceModuleIOS::MaxSpeakerVolume(uint32_t* maxVolume) const { - CHECKinitialized_(); - uint32_t maxVol = 0; - if (audio_device_->MaxSpeakerVolume(maxVol) == -1) { - return -1; - } - *maxVolume = maxVol; - return 0; - } - - int32_t AudioDeviceModuleIOS::MinSpeakerVolume(uint32_t* minVolume) const { - CHECKinitialized_(); - uint32_t minVol = 0; - if (audio_device_->MinSpeakerVolume(minVol) == -1) { - return -1; - } - *minVolume = minVol; - return 0; - } - - int32_t AudioDeviceModuleIOS::SpeakerMuteIsAvailable(bool* available) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->SpeakerMuteIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } - - int32_t AudioDeviceModuleIOS::SetSpeakerMute(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - return audio_device_->SetSpeakerMute(enable); - } - - int32_t AudioDeviceModuleIOS::SpeakerMute(bool* enabled) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool muted = false; - if (audio_device_->SpeakerMute(muted) == -1) { - return -1; - } - *enabled = muted; - RTC_DLOG(LS_INFO) << "output: " << muted; - return 0; - } - - int32_t AudioDeviceModuleIOS::MicrophoneMuteIsAvailable(bool* available) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->MicrophoneMuteIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } - - int32_t AudioDeviceModuleIOS::SetMicrophoneMute(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - return (audio_device_->SetMicrophoneMute(enable)); - } - - int32_t AudioDeviceModuleIOS::MicrophoneMute(bool* enabled) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool muted = false; - if (audio_device_->MicrophoneMute(muted) == -1) { - return -1; - } - *enabled = muted; - RTC_DLOG(LS_INFO) << "output: " << muted; - return 0; - } - - int32_t AudioDeviceModuleIOS::MicrophoneVolumeIsAvailable(bool* available) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->MicrophoneVolumeIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } - - int32_t AudioDeviceModuleIOS::SetMicrophoneVolume(uint32_t volume) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")"; - CHECKinitialized_(); - return (audio_device_->SetMicrophoneVolume(volume)); - } - - int32_t AudioDeviceModuleIOS::MicrophoneVolume(uint32_t* volume) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - uint32_t level = 0; - if (audio_device_->MicrophoneVolume(level) == -1) { - return -1; - } - *volume = level; - RTC_DLOG(LS_INFO) << "output: " << *volume; - return 0; - } - - int32_t AudioDeviceModuleIOS::StereoRecordingIsAvailable( - bool* available) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->StereoRecordingIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } - - int32_t AudioDeviceModuleIOS::SetStereoRecording(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - if (enable) { - RTC_LOG(LS_WARNING) << "recording in stereo is not supported"; - } +int32_t AudioDeviceModuleIOS::ActiveAudioLayer(AudioLayer* audioLayer) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + AudioLayer activeAudio; + if (audio_device_->ActiveAudioLayer(activeAudio) == -1) { return -1; } + *audioLayer = activeAudio; + return 0; +} - int32_t AudioDeviceModuleIOS::StereoRecording(bool* enabled) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool stereo = false; - if (audio_device_->StereoRecording(stereo) == -1) { - return -1; - } - *enabled = stereo; - RTC_DLOG(LS_INFO) << "output: " << stereo; +int32_t AudioDeviceModuleIOS::Init() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (initialized_) return 0; + + audio_device_buffer_.reset( + new webrtc::AudioDeviceBuffer(task_queue_factory_.get())); + audio_device_.reset(new ios_adm::AudioDeviceIOS(bypass_voice_processing_, + muted_speech_event_handler_)); + RTC_CHECK(audio_device_); + + this->AttachAudioBuffer(); + + AudioDeviceGeneric::InitStatus status = audio_device_->Init(); + RTC_HISTOGRAM_ENUMERATION( + "WebRTC.Audio.InitializationResult", + static_cast(status), + static_cast(AudioDeviceGeneric::InitStatus::NUM_STATUSES)); + if (status != AudioDeviceGeneric::InitStatus::OK) { + RTC_LOG(LS_ERROR) << "Audio device initialization failed."; + return -1; + } + initialized_ = true; + return 0; +} + +int32_t AudioDeviceModuleIOS::Terminate() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + if (!initialized_) return 0; + if (audio_device_->Terminate() == -1) { + return -1; + } + initialized_ = false; + return 0; +} + +bool AudioDeviceModuleIOS::Initialized() const { + RTC_DLOG(LS_INFO) << __FUNCTION__ << ": " << initialized_; + return initialized_; +} + +int32_t AudioDeviceModuleIOS::InitSpeaker() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + return audio_device_->InitSpeaker(); +} + +int32_t AudioDeviceModuleIOS::InitMicrophone() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + return audio_device_->InitMicrophone(); +} + +int32_t AudioDeviceModuleIOS::SpeakerVolumeIsAvailable(bool* available) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->SpeakerVolumeIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::SetSpeakerVolume(uint32_t volume) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")"; + CHECKinitialized_(); + return audio_device_->SetSpeakerVolume(volume); +} + +int32_t AudioDeviceModuleIOS::SpeakerVolume(uint32_t* volume) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + uint32_t level = 0; + if (audio_device_->SpeakerVolume(level) == -1) { + return -1; + } + *volume = level; + RTC_DLOG(LS_INFO) << "output: " << *volume; + return 0; +} + +bool AudioDeviceModuleIOS::SpeakerIsInitialized() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + bool isInitialized = audio_device_->SpeakerIsInitialized(); + RTC_DLOG(LS_INFO) << "output: " << isInitialized; + return isInitialized; +} + +bool AudioDeviceModuleIOS::MicrophoneIsInitialized() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + bool isInitialized = audio_device_->MicrophoneIsInitialized(); + RTC_DLOG(LS_INFO) << "output: " << isInitialized; + return isInitialized; +} + +int32_t AudioDeviceModuleIOS::MaxSpeakerVolume(uint32_t* maxVolume) const { + CHECKinitialized_(); + uint32_t maxVol = 0; + if (audio_device_->MaxSpeakerVolume(maxVol) == -1) { + return -1; + } + *maxVolume = maxVol; + return 0; +} + +int32_t AudioDeviceModuleIOS::MinSpeakerVolume(uint32_t* minVolume) const { + CHECKinitialized_(); + uint32_t minVol = 0; + if (audio_device_->MinSpeakerVolume(minVol) == -1) { + return -1; + } + *minVolume = minVol; + return 0; +} + +int32_t AudioDeviceModuleIOS::SpeakerMuteIsAvailable(bool* available) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->SpeakerMuteIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::SetSpeakerMute(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + return audio_device_->SetSpeakerMute(enable); +} + +int32_t AudioDeviceModuleIOS::SpeakerMute(bool* enabled) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool muted = false; + if (audio_device_->SpeakerMute(muted) == -1) { + return -1; + } + *enabled = muted; + RTC_DLOG(LS_INFO) << "output: " << muted; + return 0; +} + +int32_t AudioDeviceModuleIOS::MicrophoneMuteIsAvailable(bool* available) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->MicrophoneMuteIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::SetMicrophoneMute(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + return (audio_device_->SetMicrophoneMute(enable)); +} + +int32_t AudioDeviceModuleIOS::MicrophoneMute(bool* enabled) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool muted = false; + if (audio_device_->MicrophoneMute(muted) == -1) { + return -1; + } + *enabled = muted; + RTC_DLOG(LS_INFO) << "output: " << muted; + return 0; +} + +int32_t AudioDeviceModuleIOS::MicrophoneVolumeIsAvailable(bool* available) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->MicrophoneVolumeIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::SetMicrophoneVolume(uint32_t volume) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << volume << ")"; + CHECKinitialized_(); + return (audio_device_->SetMicrophoneVolume(volume)); +} + +int32_t AudioDeviceModuleIOS::MicrophoneVolume(uint32_t* volume) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + uint32_t level = 0; + if (audio_device_->MicrophoneVolume(level) == -1) { + return -1; + } + *volume = level; + RTC_DLOG(LS_INFO) << "output: " << *volume; + return 0; +} + +int32_t AudioDeviceModuleIOS::StereoRecordingIsAvailable( + bool* available) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->StereoRecordingIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::SetStereoRecording(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + if (enable) { + RTC_LOG(LS_WARNING) << "recording in stereo is not supported"; + } + return -1; +} + +int32_t AudioDeviceModuleIOS::StereoRecording(bool* enabled) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool stereo = false; + if (audio_device_->StereoRecording(stereo) == -1) { + return -1; + } + *enabled = stereo; + RTC_DLOG(LS_INFO) << "output: " << stereo; + return 0; +} + +int32_t AudioDeviceModuleIOS::StereoPlayoutIsAvailable(bool* available) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->StereoPlayoutIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::SetStereoPlayout(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + if (audio_device_->PlayoutIsInitialized()) { + RTC_LOG(LS_ERROR) + << "unable to set stereo mode while playing side is initialized"; + return -1; + } + if (audio_device_->SetStereoPlayout(enable)) { + RTC_LOG(LS_WARNING) << "stereo playout is not supported"; + return -1; + } + int8_t nChannels(1); + if (enable) { + nChannels = 2; + } + audio_device_buffer_.get()->SetPlayoutChannels(nChannels); + return 0; +} + +int32_t AudioDeviceModuleIOS::StereoPlayout(bool* enabled) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool stereo = false; + if (audio_device_->StereoPlayout(stereo) == -1) { + return -1; + } + *enabled = stereo; + RTC_DLOG(LS_INFO) << "output: " << stereo; + return 0; +} + +int32_t AudioDeviceModuleIOS::PlayoutIsAvailable(bool* available) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->PlayoutIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::RecordingIsAvailable(bool* available) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + bool isAvailable = false; + if (audio_device_->RecordingIsAvailable(isAvailable) == -1) { + return -1; + } + *available = isAvailable; + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return 0; +} + +int32_t AudioDeviceModuleIOS::MaxMicrophoneVolume(uint32_t* maxVolume) const { + CHECKinitialized_(); + uint32_t maxVol(0); + if (audio_device_->MaxMicrophoneVolume(maxVol) == -1) { + return -1; + } + *maxVolume = maxVol; + return 0; +} + +int32_t AudioDeviceModuleIOS::MinMicrophoneVolume(uint32_t* minVolume) const { + CHECKinitialized_(); + uint32_t minVol(0); + if (audio_device_->MinMicrophoneVolume(minVol) == -1) { + return -1; + } + *minVolume = minVol; + return 0; +} + +int16_t AudioDeviceModuleIOS::PlayoutDevices() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + uint16_t nPlayoutDevices = audio_device_->PlayoutDevices(); + RTC_DLOG(LS_INFO) << "output: " << nPlayoutDevices; + return (int16_t)(nPlayoutDevices); +} + +int32_t AudioDeviceModuleIOS::SetPlayoutDevice(uint16_t index) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")"; + CHECKinitialized_(); + return audio_device_->SetPlayoutDevice(index); +} + +int32_t AudioDeviceModuleIOS::SetPlayoutDevice(WindowsDeviceType device) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + return audio_device_->SetPlayoutDevice(device); +} + +int32_t AudioDeviceModuleIOS::PlayoutDeviceName( + uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)"; + CHECKinitialized_(); + if (name == NULL) { + return -1; + } + if (audio_device_->PlayoutDeviceName(index, name, guid) == -1) { + return -1; + } + if (name != NULL) { + RTC_DLOG(LS_INFO) << "output: name = " << name; + } + if (guid != NULL) { + RTC_DLOG(LS_INFO) << "output: guid = " << guid; + } + return 0; +} + +int32_t AudioDeviceModuleIOS::RecordingDeviceName( + uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)"; + CHECKinitialized_(); + if (name == NULL) { + return -1; + } + if (audio_device_->RecordingDeviceName(index, name, guid) == -1) { + return -1; + } + if (name != NULL) { + RTC_DLOG(LS_INFO) << "output: name = " << name; + } + if (guid != NULL) { + RTC_DLOG(LS_INFO) << "output: guid = " << guid; + } + return 0; +} + +int16_t AudioDeviceModuleIOS::RecordingDevices() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + uint16_t nRecordingDevices = audio_device_->RecordingDevices(); + RTC_DLOG(LS_INFO) << "output: " << nRecordingDevices; + return (int16_t)nRecordingDevices; +} + +int32_t AudioDeviceModuleIOS::SetRecordingDevice(uint16_t index) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")"; + CHECKinitialized_(); + return audio_device_->SetRecordingDevice(index); +} + +int32_t AudioDeviceModuleIOS::SetRecordingDevice(WindowsDeviceType device) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + return audio_device_->SetRecordingDevice(device); +} + +int32_t AudioDeviceModuleIOS::InitPlayout() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + if (PlayoutIsInitialized()) { return 0; } + int32_t result = audio_device_->InitPlayout(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess", + static_cast(result == 0)); + return result; +} - int32_t AudioDeviceModuleIOS::StereoPlayoutIsAvailable(bool* available) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->StereoPlayoutIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; +int32_t AudioDeviceModuleIOS::InitRecording() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + if (RecordingIsInitialized()) { return 0; } + int32_t result = audio_device_->InitRecording(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess", + static_cast(result == 0)); + return result; +} - int32_t AudioDeviceModuleIOS::SetStereoPlayout(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - if (audio_device_->PlayoutIsInitialized()) { - RTC_LOG(LS_ERROR) << "unable to set stereo mode while playing side is initialized"; - return -1; - } - if (audio_device_->SetStereoPlayout(enable)) { - RTC_LOG(LS_WARNING) << "stereo playout is not supported"; - return -1; - } - int8_t nChannels(1); - if (enable) { - nChannels = 2; - } - audio_device_buffer_.get()->SetPlayoutChannels(nChannels); +bool AudioDeviceModuleIOS::PlayoutIsInitialized() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + return audio_device_->PlayoutIsInitialized(); +} + +bool AudioDeviceModuleIOS::RecordingIsInitialized() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + return audio_device_->RecordingIsInitialized(); +} + +int32_t AudioDeviceModuleIOS::StartPlayout() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + if (Playing()) { return 0; } + audio_device_buffer_.get()->StartPlayout(); + int32_t result = audio_device_->StartPlayout(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess", + static_cast(result == 0)); + return result; +} - int32_t AudioDeviceModuleIOS::StereoPlayout(bool* enabled) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool stereo = false; - if (audio_device_->StereoPlayout(stereo) == -1) { - return -1; - } - *enabled = stereo; - RTC_DLOG(LS_INFO) << "output: " << stereo; +int32_t AudioDeviceModuleIOS::StopPlayout() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + int32_t result = audio_device_->StopPlayout(); + audio_device_buffer_.get()->StopPlayout(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess", + static_cast(result == 0)); + return result; +} + +bool AudioDeviceModuleIOS::Playing() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + return audio_device_->Playing(); +} + +int32_t AudioDeviceModuleIOS::StartRecording() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + if (Recording()) { return 0; } + audio_device_buffer_.get()->StartRecording(); + int32_t result = audio_device_->StartRecording(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess", + static_cast(result == 0)); + return result; +} - int32_t AudioDeviceModuleIOS::PlayoutIsAvailable(bool* available) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->PlayoutIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } +int32_t AudioDeviceModuleIOS::StopRecording() { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized_(); + int32_t result = audio_device_->StopRecording(); + audio_device_buffer_.get()->StopRecording(); + RTC_DLOG(LS_INFO) << "output: " << result; + RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess", + static_cast(result == 0)); + return result; +} - int32_t AudioDeviceModuleIOS::RecordingIsAvailable(bool* available) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - bool isAvailable = false; - if (audio_device_->RecordingIsAvailable(isAvailable) == -1) { - return -1; - } - *available = isAvailable; - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return 0; - } +bool AudioDeviceModuleIOS::Recording() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + return audio_device_->Recording(); +} - int32_t AudioDeviceModuleIOS::MaxMicrophoneVolume(uint32_t* maxVolume) const { - CHECKinitialized_(); - uint32_t maxVol(0); - if (audio_device_->MaxMicrophoneVolume(maxVol) == -1) { - return -1; - } - *maxVolume = maxVol; - return 0; - } +int32_t AudioDeviceModuleIOS::RegisterAudioCallback( + AudioTransport* audioCallback) { + RTC_DLOG(LS_INFO) << __FUNCTION__; + return audio_device_buffer_.get()->RegisterAudioCallback(audioCallback); +} - int32_t AudioDeviceModuleIOS::MinMicrophoneVolume(uint32_t* minVolume) const { - CHECKinitialized_(); - uint32_t minVol(0); - if (audio_device_->MinMicrophoneVolume(minVol) == -1) { - return -1; - } - *minVolume = minVol; - return 0; +int32_t AudioDeviceModuleIOS::PlayoutDelay(uint16_t* delayMS) const { + CHECKinitialized_(); + uint16_t delay = 0; + if (audio_device_->PlayoutDelay(delay) == -1) { + RTC_LOG(LS_ERROR) << "failed to retrieve the playout delay"; + return -1; } + *delayMS = delay; + return 0; +} - int16_t AudioDeviceModuleIOS::PlayoutDevices() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - uint16_t nPlayoutDevices = audio_device_->PlayoutDevices(); - RTC_DLOG(LS_INFO) << "output: " << nPlayoutDevices; - return (int16_t)(nPlayoutDevices); - } +bool AudioDeviceModuleIOS::BuiltInAECIsAvailable() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + bool isAvailable = audio_device_->BuiltInAECIsAvailable(); + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return isAvailable; +} - int32_t AudioDeviceModuleIOS::SetPlayoutDevice(uint16_t index) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")"; - CHECKinitialized_(); - return audio_device_->SetPlayoutDevice(index); - } +int32_t AudioDeviceModuleIOS::EnableBuiltInAEC(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + int32_t ok = audio_device_->EnableBuiltInAEC(enable); + RTC_DLOG(LS_INFO) << "output: " << ok; + return ok; +} - int32_t AudioDeviceModuleIOS::SetPlayoutDevice(WindowsDeviceType device) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - return audio_device_->SetPlayoutDevice(device); - } +bool AudioDeviceModuleIOS::BuiltInAGCIsAvailable() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + bool isAvailable = audio_device_->BuiltInAGCIsAvailable(); + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return isAvailable; +} - int32_t AudioDeviceModuleIOS::PlayoutDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)"; - CHECKinitialized_(); - if (name == NULL) { - return -1; - } - if (audio_device_->PlayoutDeviceName(index, name, guid) == -1) { - return -1; - } - if (name != NULL) { - RTC_DLOG(LS_INFO) << "output: name = " << name; - } - if (guid != NULL) { - RTC_DLOG(LS_INFO) << "output: guid = " << guid; - } - return 0; - } +int32_t AudioDeviceModuleIOS::EnableBuiltInAGC(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + int32_t ok = audio_device_->EnableBuiltInAGC(enable); + RTC_DLOG(LS_INFO) << "output: " << ok; + return ok; +} - int32_t AudioDeviceModuleIOS::RecordingDeviceName( - uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ", ...)"; - CHECKinitialized_(); - if (name == NULL) { - return -1; - } - if (audio_device_->RecordingDeviceName(index, name, guid) == -1) { - return -1; - } - if (name != NULL) { - RTC_DLOG(LS_INFO) << "output: name = " << name; - } - if (guid != NULL) { - RTC_DLOG(LS_INFO) << "output: guid = " << guid; - } - return 0; - } +bool AudioDeviceModuleIOS::BuiltInNSIsAvailable() const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + CHECKinitialized__BOOL(); + bool isAvailable = audio_device_->BuiltInNSIsAvailable(); + RTC_DLOG(LS_INFO) << "output: " << isAvailable; + return isAvailable; +} - int16_t AudioDeviceModuleIOS::RecordingDevices() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - uint16_t nRecordingDevices = audio_device_->RecordingDevices(); - RTC_DLOG(LS_INFO) << "output: " << nRecordingDevices; - return (int16_t)nRecordingDevices; - } +int32_t AudioDeviceModuleIOS::EnableBuiltInNS(bool enable) { + RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; + CHECKinitialized_(); + int32_t ok = audio_device_->EnableBuiltInNS(enable); + RTC_DLOG(LS_INFO) << "output: " << ok; + return ok; +} - int32_t AudioDeviceModuleIOS::SetRecordingDevice(uint16_t index) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << index << ")"; - CHECKinitialized_(); - return audio_device_->SetRecordingDevice(index); - } +int32_t AudioDeviceModuleIOS::GetPlayoutUnderrunCount() const { + // Don't log here, as this method can be called very often. + CHECKinitialized_(); + int32_t ok = audio_device_->GetPlayoutUnderrunCount(); + return ok; +} - int32_t AudioDeviceModuleIOS::SetRecordingDevice(WindowsDeviceType device) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - return audio_device_->SetRecordingDevice(device); - } - - int32_t AudioDeviceModuleIOS::InitPlayout() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - if (PlayoutIsInitialized()) { - return 0; - } - int32_t result = audio_device_->InitPlayout(); - RTC_DLOG(LS_INFO) << "output: " << result; - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess", - static_cast(result == 0)); - return result; - } - - int32_t AudioDeviceModuleIOS::InitRecording() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - if (RecordingIsInitialized()) { - return 0; - } - int32_t result = audio_device_->InitRecording(); - RTC_DLOG(LS_INFO) << "output: " << result; - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess", - static_cast(result == 0)); - return result; - } - - bool AudioDeviceModuleIOS::PlayoutIsInitialized() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - return audio_device_->PlayoutIsInitialized(); - } - - bool AudioDeviceModuleIOS::RecordingIsInitialized() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - return audio_device_->RecordingIsInitialized(); - } - - int32_t AudioDeviceModuleIOS::StartPlayout() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - if (Playing()) { - return 0; - } - audio_device_buffer_.get()->StartPlayout(); - int32_t result = audio_device_->StartPlayout(); - RTC_DLOG(LS_INFO) << "output: " << result; - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess", - static_cast(result == 0)); - return result; - } - - int32_t AudioDeviceModuleIOS::StopPlayout() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - int32_t result = audio_device_->StopPlayout(); - audio_device_buffer_.get()->StopPlayout(); - RTC_DLOG(LS_INFO) << "output: " << result; - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess", - static_cast(result == 0)); - return result; - } - - bool AudioDeviceModuleIOS::Playing() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - return audio_device_->Playing(); - } - - int32_t AudioDeviceModuleIOS::StartRecording() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - if (Recording()) { - return 0; - } - audio_device_buffer_.get()->StartRecording(); - int32_t result = audio_device_->StartRecording(); - RTC_DLOG(LS_INFO) << "output: " << result; - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess", - static_cast(result == 0)); - return result; - } - - int32_t AudioDeviceModuleIOS::StopRecording() { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized_(); - int32_t result = audio_device_->StopRecording(); - audio_device_buffer_.get()->StopRecording(); - RTC_DLOG(LS_INFO) << "output: " << result; - RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess", - static_cast(result == 0)); - return result; - } - - bool AudioDeviceModuleIOS::Recording() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - return audio_device_->Recording(); - } - - int32_t AudioDeviceModuleIOS::RegisterAudioCallback( - AudioTransport* audioCallback) { - RTC_DLOG(LS_INFO) << __FUNCTION__; - return audio_device_buffer_.get()->RegisterAudioCallback(audioCallback); - } - - int32_t AudioDeviceModuleIOS::PlayoutDelay(uint16_t* delayMS) const { - CHECKinitialized_(); - uint16_t delay = 0; - if (audio_device_->PlayoutDelay(delay) == -1) { - RTC_LOG(LS_ERROR) << "failed to retrieve the playout delay"; - return -1; - } - *delayMS = delay; - return 0; - } - - bool AudioDeviceModuleIOS::BuiltInAECIsAvailable() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - bool isAvailable = audio_device_->BuiltInAECIsAvailable(); - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return isAvailable; - } - - int32_t AudioDeviceModuleIOS::EnableBuiltInAEC(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - int32_t ok = audio_device_->EnableBuiltInAEC(enable); - RTC_DLOG(LS_INFO) << "output: " << ok; - return ok; - } - - bool AudioDeviceModuleIOS::BuiltInAGCIsAvailable() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - bool isAvailable = audio_device_->BuiltInAGCIsAvailable(); - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return isAvailable; - } - - int32_t AudioDeviceModuleIOS::EnableBuiltInAGC(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - int32_t ok = audio_device_->EnableBuiltInAGC(enable); - RTC_DLOG(LS_INFO) << "output: " << ok; - return ok; - } - - bool AudioDeviceModuleIOS::BuiltInNSIsAvailable() const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - CHECKinitialized__BOOL(); - bool isAvailable = audio_device_->BuiltInNSIsAvailable(); - RTC_DLOG(LS_INFO) << "output: " << isAvailable; - return isAvailable; - } - - int32_t AudioDeviceModuleIOS::EnableBuiltInNS(bool enable) { - RTC_DLOG(LS_INFO) << __FUNCTION__ << "(" << enable << ")"; - CHECKinitialized_(); - int32_t ok = audio_device_->EnableBuiltInNS(enable); - RTC_DLOG(LS_INFO) << "output: " << ok; - return ok; - } - - int32_t AudioDeviceModuleIOS::GetPlayoutUnderrunCount() const { - // Don't log here, as this method can be called very often. - CHECKinitialized_(); - int32_t ok = audio_device_->GetPlayoutUnderrunCount(); - return ok; - } - - std::optional AudioDeviceModuleIOS::GetStats() const { - if (audio_device_ == nullptr) { - return std::nullopt; - }; - return audio_device_->GetStats(); - } +std::optional AudioDeviceModuleIOS::GetStats() const { + if (audio_device_ == nullptr) { + return std::nullopt; + }; + return audio_device_->GetStats(); +} #if defined(WEBRTC_IOS) - int AudioDeviceModuleIOS::GetPlayoutAudioParameters( - AudioParameters* params) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - int r = audio_device_->GetPlayoutAudioParameters(params); - RTC_DLOG(LS_INFO) << "output: " << r; - return r; - } +int AudioDeviceModuleIOS::GetPlayoutAudioParameters( + AudioParameters* params) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + int r = audio_device_->GetPlayoutAudioParameters(params); + RTC_DLOG(LS_INFO) << "output: " << r; + return r; +} - int AudioDeviceModuleIOS::GetRecordAudioParameters( - AudioParameters* params) const { - RTC_DLOG(LS_INFO) << __FUNCTION__; - int r = audio_device_->GetRecordAudioParameters(params); - RTC_DLOG(LS_INFO) << "output: " << r; - return r; - } +int AudioDeviceModuleIOS::GetRecordAudioParameters( + AudioParameters* params) const { + RTC_DLOG(LS_INFO) << __FUNCTION__; + int r = audio_device_->GetRecordAudioParameters(params); + RTC_DLOG(LS_INFO) << "output: " << r; + return r; +} #endif // WEBRTC_IOS -} -} +} // namespace ios_adm +} // namespace webrtc diff --git a/sdk/objc/native/src/audio/helpers.mm b/sdk/objc/native/src/audio/helpers.mm index cd0469656a..fdc9add6d5 100644 --- a/sdk/objc/native/src/audio/helpers.mm +++ b/sdk/objc/native/src/audio/helpers.mm @@ -39,10 +39,10 @@ std::string StdStringFromNSString(NSString* nsString) { bool CheckAndLogError(BOOL success, NSError* error) { if (!success) { - NSString* msg = - [NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code, - error.localizedDescription, - error.localizedFailureReason]; + NSString* msg = [NSString stringWithFormat:@"Error: %ld, %@, %@", + (long)error.code, + error.localizedDescription, + error.localizedFailureReason]; RTC_LOG(LS_ERROR) << StdStringFromNSString(msg); return false; } diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index fe35ce3609..066f3b161c 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -32,7 +32,8 @@ static void LogStreamDescription(AudioStreamBasicDescription description) { " mChannelsPerFrame: %u\n" " mBitsPerChannel: %u\n" " mReserved: %u\n}", - description.mSampleRate, formatIdString, + description.mSampleRate, + formatIdString, static_cast(description.mFormatFlags), static_cast(description.mBytesPerPacket), static_cast(description.mFramesPerPacket), @@ -61,19 +62,21 @@ static const AudioUnitElement kOutputBus = 0; static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { RTC_DCHECK(audio_unit); UInt32 size = sizeof(*enabled); - OSStatus result = AudioUnitGetProperty(audio_unit, - kAUVoiceIOProperty_VoiceProcessingEnableAGC, - kAudioUnitScope_Global, - kInputBus, - enabled, - &size); + OSStatus result = + AudioUnitGetProperty(audio_unit, + kAUVoiceIOProperty_VoiceProcessingEnableAGC, + kAudioUnitScope_Global, + kInputBus, + enabled, + &size); RTCLog(@"VPIO unit AGC: %u", static_cast(*enabled)); return result; } -VoiceProcessingAudioUnit::VoiceProcessingAudioUnit(bool bypass_voice_processing, - bool detect_mute_speech, - VoiceProcessingAudioUnitObserver* observer) +VoiceProcessingAudioUnit::VoiceProcessingAudioUnit( + bool bypass_voice_processing, + bool detect_mute_speech, + VoiceProcessingAudioUnitObserver* observer) : bypass_voice_processing_(bypass_voice_processing), detect_mute_speech_(detect_mute_speech), observer_(observer), @@ -115,8 +118,11 @@ bool VoiceProcessingAudioUnit::Init() { // Enable input on the input scope of the input element. UInt32 enable_input = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Input, kInputBus, &enable_input, + result = AudioUnitSetProperty(vpio_unit_, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, + kInputBus, + &enable_input, sizeof(enable_input)); if (result != noErr) { DisposeAudioUnit(); @@ -128,9 +134,12 @@ bool VoiceProcessingAudioUnit::Init() { // Enable output on the output scope of the output element. UInt32 enable_output = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Output, kOutputBus, - &enable_output, sizeof(enable_output)); + result = AudioUnitSetProperty(vpio_unit_, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Output, + kOutputBus, + &enable_output, + sizeof(enable_output)); if (result != noErr) { DisposeAudioUnit(); RTCLogError(@"Failed to enable output on output scope of output element. " @@ -144,9 +153,12 @@ bool VoiceProcessingAudioUnit::Init() { AURenderCallbackStruct render_callback; render_callback.inputProc = OnGetPlayoutData; render_callback.inputProcRefCon = this; - result = AudioUnitSetProperty( - vpio_unit_, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, - kOutputBus, &render_callback, sizeof(render_callback)); + result = AudioUnitSetProperty(vpio_unit_, + kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, + kOutputBus, + &render_callback, + sizeof(render_callback)); if (result != noErr) { DisposeAudioUnit(); RTCLogError(@"Failed to specify the render callback on the output bus. " @@ -158,9 +170,12 @@ bool VoiceProcessingAudioUnit::Init() { // Disable AU buffer allocation for the recorder, we allocate our own. // TODO(henrika): not sure that it actually saves resource to make this call. UInt32 flag = 0; - result = AudioUnitSetProperty( - vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer, - kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag)); + result = AudioUnitSetProperty(vpio_unit_, + kAudioUnitProperty_ShouldAllocateBuffer, + kAudioUnitScope_Output, + kInputBus, + &flag, + sizeof(flag)); if (result != noErr) { DisposeAudioUnit(); RTCLogError(@"Failed to disable buffer allocation on the input bus. " @@ -177,8 +192,10 @@ bool VoiceProcessingAudioUnit::Init() { input_callback.inputProcRefCon = this; result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_SetInputCallback, - kAudioUnitScope_Global, kInputBus, - &input_callback, sizeof(input_callback)); + kAudioUnitScope_Global, + kInputBus, + &input_callback, + sizeof(input_callback)); if (result != noErr) { DisposeAudioUnit(); RTCLogError(@"Failed to specify the input callback on the input bus. " @@ -207,9 +224,12 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { #endif // Set the format on the output scope of the input element/bus. - result = - AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, - kAudioUnitScope_Output, kInputBus, &format, size); + result = AudioUnitSetProperty(vpio_unit_, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, + kInputBus, + &format, + size); if (result != noErr) { RTCLogError(@"Failed to set format on output scope of input bus. " "Error=%ld.", @@ -218,9 +238,12 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { } // Set the format on the input scope of the output element/bus. - result = - AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, - kAudioUnitScope_Input, kOutputBus, &format, size); + result = AudioUnitSetProperty(vpio_unit_, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, + kOutputBus, + &format, + size); if (result != noErr) { RTCLogError(@"Failed to set format on input scope of output bus. " "Error=%ld.", @@ -257,17 +280,21 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { if (detect_mute_speech_) { if (@available(iOS 15, *)) { // Set listener for muted speech event. - AUVoiceIOMutedSpeechActivityEventListener listener = ^(AUVoiceIOSpeechActivityEvent event) { - observer_->OnReceivedMutedSpeechActivity(event); - }; - result = AudioUnitSetProperty(vpio_unit_, - kAUVoiceIOProperty_MutedSpeechActivityEventListener, - kAudioUnitScope_Global, - 0, - &listener, - sizeof(AUVoiceIOMutedSpeechActivityEventListener)); + AUVoiceIOMutedSpeechActivityEventListener listener = + ^(AUVoiceIOSpeechActivityEvent event) { + observer_->OnReceivedMutedSpeechActivity(event); + }; + result = AudioUnitSetProperty( + vpio_unit_, + kAUVoiceIOProperty_MutedSpeechActivityEventListener, + kAudioUnitScope_Global, + 0, + &listener, + sizeof(AUVoiceIOMutedSpeechActivityEventListener)); if (result != noErr) { - RTCLog(@"Failed to set muted speech activity event listener. Error=%ld.", (long)result); + RTCLog( + @"Failed to set muted speech activity event listener. Error=%ld.", + (long)result); } } } @@ -284,7 +311,8 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { if (result == noErr) { RTCLog(@"Successfully bypassed voice processing."); } else { - RTCLogError(@"Failed to bypass voice processing. Error=%ld.", (long)result); + RTCLogError(@"Failed to bypass voice processing. Error=%ld.", + (long)result); } state_ = kInitialized; return true; @@ -305,33 +333,34 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { // Example of error code: kAudioUnitErr_NoConnection (-10876). // All error codes related to audio units are negative and are therefore // converted into a postive value to match the UMA APIs. - RTC_HISTOGRAM_COUNTS_SPARSE_100000( - "WebRTC.Audio.GetAGCStateErrorCode1", (-1) * result); + RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Audio.GetAGCStateErrorCode1", + (-1) * result); } else if (agc_is_enabled) { // Remember that the AGC was enabled by default. Will be used in UMA. agc_was_enabled_by_default = 1; } else { // AGC was initially disabled => try to enable it explicitly. UInt32 enable_agc = 1; - result = - AudioUnitSetProperty(vpio_unit_, - kAUVoiceIOProperty_VoiceProcessingEnableAGC, - kAudioUnitScope_Global, kInputBus, &enable_agc, - sizeof(enable_agc)); + result = AudioUnitSetProperty(vpio_unit_, + kAUVoiceIOProperty_VoiceProcessingEnableAGC, + kAudioUnitScope_Global, + kInputBus, + &enable_agc, + sizeof(enable_agc)); if (result != noErr) { RTCLogError(@"Failed to enable the built-in AGC. " "Error=%ld.", (long)result); - RTC_HISTOGRAM_COUNTS_SPARSE_100000( - "WebRTC.Audio.SetAGCStateErrorCode", (-1) * result); + RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Audio.SetAGCStateErrorCode", + (-1) * result); } result = GetAGCState(vpio_unit_, &agc_is_enabled); if (result != noErr) { RTCLogError(@"Failed to get AGC state (2nd attempt). " "Error=%ld.", (long)result); - RTC_HISTOGRAM_COUNTS_SPARSE_100000( - "WebRTC.Audio.GetAGCStateErrorCode2", (-1) * result); + RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Audio.GetAGCStateErrorCode2", + (-1) * result); } } @@ -423,7 +452,9 @@ bool VoiceProcessingAudioUnit::SetMicrophoneMute(bool enable) { } if (result != noErr) { - RTCLogError(@"Failed to %s microphone. Error=%ld", (enable ? "mute" : "unmute"), (long)result); + RTCLogError(@"Failed to %s microphone. Error=%ld", + (enable ? "mute" : "unmute"), + (long)result); return false; } @@ -438,8 +469,8 @@ OSStatus VoiceProcessingAudioUnit::Render(AudioUnitRenderActionFlags* flags, AudioBufferList* io_data) { RTC_DCHECK(vpio_unit_) << "Init() not called."; - OSStatus result = AudioUnitRender(vpio_unit_, flags, time_stamp, - output_bus_number, num_frames, io_data); + OSStatus result = AudioUnitRender( + vpio_unit_, flags, time_stamp, output_bus_number, num_frames, io_data); if (result != noErr) { RTCLogError(@"Failed to render audio unit. Error=%ld", (long)result); } @@ -455,8 +486,8 @@ OSStatus VoiceProcessingAudioUnit::OnGetPlayoutData( AudioBufferList* io_data) { VoiceProcessingAudioUnit* audio_unit = static_cast(in_ref_con); - return audio_unit->NotifyGetPlayoutData(flags, time_stamp, bus_number, - num_frames, io_data); + return audio_unit->NotifyGetPlayoutData( + flags, time_stamp, bus_number, num_frames, io_data); } OSStatus VoiceProcessingAudioUnit::OnDeliverRecordedData( @@ -468,8 +499,8 @@ OSStatus VoiceProcessingAudioUnit::OnDeliverRecordedData( AudioBufferList* io_data) { VoiceProcessingAudioUnit* audio_unit = static_cast(in_ref_con); - return audio_unit->NotifyDeliverRecordedData(flags, time_stamp, bus_number, - num_frames, io_data); + return audio_unit->NotifyDeliverRecordedData( + flags, time_stamp, bus_number, num_frames, io_data); } OSStatus VoiceProcessingAudioUnit::NotifyGetPlayoutData( @@ -478,8 +509,8 @@ OSStatus VoiceProcessingAudioUnit::NotifyGetPlayoutData( UInt32 bus_number, UInt32 num_frames, AudioBufferList* io_data) { - return observer_->OnGetPlayoutData(flags, time_stamp, bus_number, num_frames, - io_data); + return observer_->OnGetPlayoutData( + flags, time_stamp, bus_number, num_frames, io_data); } OSStatus VoiceProcessingAudioUnit::NotifyDeliverRecordedData( @@ -488,8 +519,8 @@ OSStatus VoiceProcessingAudioUnit::NotifyDeliverRecordedData( UInt32 bus_number, UInt32 num_frames, AudioBufferList* io_data) { - return observer_->OnDeliverRecordedData(flags, time_stamp, bus_number, - num_frames, io_data); + return observer_->OnDeliverRecordedData( + flags, time_stamp, bus_number, num_frames, io_data); } AudioStreamBasicDescription VoiceProcessingAudioUnit::GetFormat( diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index a9c1ae973e..23368c98ec 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -29,7 +29,8 @@ namespace objc_adm { class ObjCAudioDeviceModule : public AudioDeviceModule { public: - explicit ObjCAudioDeviceModule(id audio_device); + explicit ObjCAudioDeviceModule( + id audio_device); ~ObjCAudioDeviceModule() override; // Retrieve the currently utilized audio layer @@ -135,13 +136,14 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { #endif // WEBRTC_IOS public: - OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags, - const AudioTimeStamp* time_stamp, - NSInteger bus_number, - UInt32 num_frames, - const AudioBufferList* io_data, - void* render_context, - RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) render_block); + OSStatus OnDeliverRecordedData( + AudioUnitRenderActionFlags* flags, + const AudioTimeStamp* time_stamp, + NSInteger bus_number, + UInt32 num_frames, + const AudioBufferList* io_data, + void* render_context, + RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) render_block); OSStatus OnGetPlayoutData(AudioUnitRenderActionFlags* flags, const AudioTimeStamp* time_stamp, @@ -150,20 +152,21 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { AudioBufferList* io_data); // Notifies `ObjCAudioDeviceModule` that at least one of the audio input - // parameters or audio input latency of `RTCAudioDevice` has changed. It necessary to - // update `record_parameters_` with current audio parameter of `RTCAudioDevice` - // via `UpdateAudioParameters` and if parameters are actually change then - // ADB parameters are updated with `UpdateInputAudioDeviceBuffer`. Audio input latency - // stored in `cached_recording_delay_ms_` is also updated with current latency - // of `RTCAudioDevice`. + // parameters or audio input latency of `RTCAudioDevice` has changed. It + // necessary to update `record_parameters_` with current audio parameter of + // `RTCAudioDevice` via `UpdateAudioParameters` and if parameters are actually + // change then ADB parameters are updated with `UpdateInputAudioDeviceBuffer`. + // Audio input latency stored in `cached_recording_delay_ms_` is also updated + // with current latency of `RTCAudioDevice`. void HandleAudioInputParametersChange(); - // Same as `HandleAudioInputParametersChange` but should be called when audio output - // parameters of `RTCAudioDevice` has changed. + // Same as `HandleAudioInputParametersChange` but should be called when audio + // output parameters of `RTCAudioDevice` has changed. void HandleAudioOutputParametersChange(); - // Notifies `ObjCAudioDeviceModule` about audio input interruption happen due to - // any reason so `ObjCAudioDeviceModule` is can prepare to restart of audio IO. + // Notifies `ObjCAudioDeviceModule` about audio input interruption happen due + // to any reason so `ObjCAudioDeviceModule` is can prepare to restart of audio + // IO. void HandleAudioInputInterrupted(); // Same as `ObjCAudioDeviceModule` but should be called when audio output @@ -171,33 +174,39 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { void HandleAudioOutputInterrupted(); private: - // Update our audio parameters if they are different from current device audio parameters - // Returns true when our parameters are update, false - otherwise. - // `ObjCAudioDeviceModule` has audio device buffer (ADB) which has audio parameters - // of playout & recording. The ADB is configured to work with specific sample rate & channel - // count. `ObjCAudioDeviceModule` stores audio parameters which were used to configure ADB in the - // fields `playout_parameters_` and `recording_parameters_`. - // `RTCAudioDevice` protocol has its own audio parameters exposed as individual properties. - // `RTCAudioDevice` audio parameters might change when playout/recording is already in progress, - // for example, when device is switched. `RTCAudioDevice` audio parameters must be kept in sync - // with ADB audio parameters. This method is invoked when `RTCAudioDevice` reports that it's audio - // parameters (`device_params`) are changed and it detects if there any difference with our - // current audio parameters (`params`). Our parameters are updated in case of actual change and - // method returns true. In case of actual change there is follow-up call to either - // `UpdateOutputAudioDeviceBuffer` or `UpdateInputAudioDeviceBuffer` to apply updated - // `playout_parameters_` or `recording_parameters_` to ADB. + // Update our audio parameters if they are different from current device audio + // parameters Returns true when our parameters are update, false - otherwise. + // `ObjCAudioDeviceModule` has audio device buffer (ADB) which has audio + // parameters of playout & recording. The ADB is configured to work with + // specific sample rate & channel count. `ObjCAudioDeviceModule` stores audio + // parameters which were used to configure ADB in the fields + // `playout_parameters_` and `recording_parameters_`. `RTCAudioDevice` + // protocol has its own audio parameters exposed as individual properties. + // `RTCAudioDevice` audio parameters might change when playout/recording is + // already in progress, for example, when device is switched. `RTCAudioDevice` + // audio parameters must be kept in sync with ADB audio parameters. This + // method is invoked when `RTCAudioDevice` reports that it's audio parameters + // (`device_params`) are changed and it detects if there any difference with + // our current audio parameters (`params`). Our parameters are updated in case + // of actual change and method returns true. In case of actual change there is + // follow-up call to either `UpdateOutputAudioDeviceBuffer` or + // `UpdateInputAudioDeviceBuffer` to apply updated `playout_parameters_` or + // `recording_parameters_` to ADB. - bool UpdateAudioParameters(AudioParameters& params, const AudioParameters& device_params); + bool UpdateAudioParameters(AudioParameters& params, + const AudioParameters& device_params); - // Update our cached audio latency with device latency. Device latency is reported by - // `RTCAudioDevice` object. Whenever latency is changed, `RTCAudioDevice` is obliged to notify ADM - // about the change via `HandleAudioInputParametersChange` or `HandleAudioOutputParametersChange`. - // Current device IO latency is cached in the atomic field and used from audio IO thread - // to be reported to audio device buffer. It is highly recommended by Apple not to call any - // ObjC methods from audio IO thread, that is why implementation relies on caching latency - // into a field and being notified when latency is changed, which is the case when device - // is switched. - void UpdateAudioDelay(std::atomic& delay_ms, const NSTimeInterval device_latency); + // Update our cached audio latency with device latency. Device latency is + // reported by `RTCAudioDevice` object. Whenever latency is changed, + // `RTCAudioDevice` is obliged to notify ADM about the change via + // `HandleAudioInputParametersChange` or `HandleAudioOutputParametersChange`. + // Current device IO latency is cached in the atomic field and used from audio + // IO thread to be reported to audio device buffer. It is highly recommended + // by Apple not to call any ObjC methods from audio IO thread, that is why + // implementation relies on caching latency into a field and being notified + // when latency is changed, which is the case when device is switched. + void UpdateAudioDelay(std::atomic& delay_ms, + const NSTimeInterval device_latency); // Uses current `playout_parameters_` to inform the audio device buffer (ADB) // about our internal audio parameters. @@ -214,11 +223,12 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { // AudioDeviceBuffer is a buffer to consume audio recorded by `RTCAudioDevice` // and provide audio to be played via `RTCAudioDevice`. - // Audio PCMs could have different sample rate and channels count, but expected - // to be in 16-bit integer interleaved linear PCM format. - // The current parameters ADB configured to work with is stored in field + // Audio PCMs could have different sample rate and channels count, but + // expected to be in 16-bit integer interleaved linear PCM format. The current + // parameters ADB configured to work with is stored in field // `playout_parameters_` for playout and `record_parameters_` for recording. - // These parameters and ADB must kept in sync with `RTCAudioDevice` audio parameters. + // These parameters and ADB must kept in sync with `RTCAudioDevice` audio + // parameters. std::unique_ptr audio_device_buffer_; // Set to 1 when recording is active and 0 otherwise. diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index d629fae20f..5b51095494 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -21,19 +21,25 @@ namespace { -webrtc::AudioParameters RecordParameters(id audio_device) { - const double sample_rate = static_cast([audio_device deviceInputSampleRate]); - const size_t channels = static_cast([audio_device inputNumberOfChannels]); - const size_t frames_per_buffer = - static_cast(sample_rate * [audio_device inputIOBufferDuration] + .5); +webrtc::AudioParameters RecordParameters( + id audio_device) { + const double sample_rate = + static_cast([audio_device deviceInputSampleRate]); + const size_t channels = + static_cast([audio_device inputNumberOfChannels]); + const size_t frames_per_buffer = static_cast( + sample_rate * [audio_device inputIOBufferDuration] + .5); return webrtc::AudioParameters(sample_rate, channels, frames_per_buffer); } -webrtc::AudioParameters PlayoutParameters(id audio_device) { - const double sample_rate = static_cast([audio_device deviceOutputSampleRate]); - const size_t channels = static_cast([audio_device outputNumberOfChannels]); - const size_t frames_per_buffer = - static_cast(sample_rate * [audio_device outputIOBufferDuration] + .5); +webrtc::AudioParameters PlayoutParameters( + id audio_device) { + const double sample_rate = + static_cast([audio_device deviceOutputSampleRate]); + const size_t channels = + static_cast([audio_device outputNumberOfChannels]); + const size_t frames_per_buffer = static_cast( + sample_rate * [audio_device outputIOBufferDuration] + .5); return webrtc::AudioParameters(sample_rate, channels, frames_per_buffer); } @@ -42,8 +48,10 @@ webrtc::AudioParameters PlayoutParameters(id audi namespace webrtc { namespace objc_adm { -ObjCAudioDeviceModule::ObjCAudioDeviceModule(id audio_device) - : audio_device_(audio_device), task_queue_factory_(CreateDefaultTaskQueueFactory()) { +ObjCAudioDeviceModule::ObjCAudioDeviceModule( + id audio_device) + : audio_device_(audio_device), + task_queue_factory_(CreateDefaultTaskQueueFactory()) { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK(audio_device_); thread_checker_.Detach(); @@ -55,7 +63,8 @@ ObjCAudioDeviceModule::~ObjCAudioDeviceModule() { RTC_DLOG_F(LS_VERBOSE) << ""; } -int32_t ObjCAudioDeviceModule::RegisterAudioCallback(AudioTransport* audioCallback) { +int32_t ObjCAudioDeviceModule::RegisterAudioCallback( + AudioTransport* audioCallback) { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK(audio_device_buffer_); return audio_device_buffer_->RegisterAudioCallback(audioCallback); @@ -73,12 +82,14 @@ int32_t ObjCAudioDeviceModule::Init() { io_record_thread_checker_.Detach(); thread_ = rtc::Thread::Current(); - audio_device_buffer_.reset(new webrtc::AudioDeviceBuffer(task_queue_factory_.get())); + audio_device_buffer_.reset( + new webrtc::AudioDeviceBuffer(task_queue_factory_.get())); if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc] - initWithAudioDeviceModule:rtc::scoped_refptr(this) + initWithAudioDeviceModule:rtc::scoped_refptr( + this) audioDeviceThread:thread_]; } @@ -90,10 +101,12 @@ int32_t ObjCAudioDeviceModule::Init() { } } - playout_parameters_.reset([audio_device_delegate_ preferredOutputSampleRate], 1); + playout_parameters_.reset([audio_device_delegate_ preferredOutputSampleRate], + 1); UpdateOutputAudioDeviceBuffer(); - record_parameters_.reset([audio_device_delegate_ preferredInputSampleRate], 1); + record_parameters_.reset([audio_device_delegate_ preferredInputSampleRate], + 1); UpdateInputAudioDeviceBuffer(); is_initialized_ = true; @@ -148,7 +161,8 @@ int32_t ObjCAudioDeviceModule::PlayoutIsAvailable(bool* available) { bool ObjCAudioDeviceModule::PlayoutIsInitialized() const { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK_RUN_ON(&thread_checker_); - return Initialized() && is_playout_initialized_ && [audio_device_ isPlayoutInitialized]; + return Initialized() && is_playout_initialized_ && + [audio_device_ isPlayoutInitialized]; } int32_t ObjCAudioDeviceModule::InitPlayout() { @@ -169,7 +183,8 @@ int32_t ObjCAudioDeviceModule::InitPlayout() { } } - if (UpdateAudioParameters(playout_parameters_, PlayoutParameters(audio_device_))) { + if (UpdateAudioParameters(playout_parameters_, + PlayoutParameters(audio_device_))) { UpdateOutputAudioDeviceBuffer(); } @@ -224,8 +239,10 @@ int32_t ObjCAudioDeviceModule::StopPlayout() { int32_t ObjCAudioDeviceModule::PlayoutDelay(uint16_t* delayMS) const { RTC_DCHECK_RUN_ON(&thread_checker_); - *delayMS = static_cast(rtc::SafeClamp( - cached_playout_delay_ms_.load(), 0, std::numeric_limits::max())); + *delayMS = static_cast( + rtc::SafeClamp(cached_playout_delay_ms_.load(), + 0, + std::numeric_limits::max())); return 0; } @@ -239,7 +256,8 @@ int32_t ObjCAudioDeviceModule::RecordingIsAvailable(bool* available) { bool ObjCAudioDeviceModule::RecordingIsInitialized() const { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK_RUN_ON(&thread_checker_); - return Initialized() && is_recording_initialized_ && [audio_device_ isRecordingInitialized]; + return Initialized() && is_recording_initialized_ && + [audio_device_ isRecordingInitialized]; } int32_t ObjCAudioDeviceModule::InitRecording() { @@ -260,7 +278,8 @@ int32_t ObjCAudioDeviceModule::InitRecording() { } } - if (UpdateAudioParameters(record_parameters_, RecordParameters(audio_device_))) { + if (UpdateAudioParameters(record_parameters_, + RecordParameters(audio_device_))) { UpdateInputAudioDeviceBuffer(); } @@ -315,7 +334,8 @@ int32_t ObjCAudioDeviceModule::StopRecording() { #if defined(WEBRTC_IOS) -int ObjCAudioDeviceModule::GetPlayoutAudioParameters(AudioParameters* params) const { +int ObjCAudioDeviceModule::GetPlayoutAudioParameters( + AudioParameters* params) const { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK(playout_parameters_.is_valid()); RTC_DCHECK_RUN_ON(&thread_checker_); @@ -323,7 +343,8 @@ int ObjCAudioDeviceModule::GetPlayoutAudioParameters(AudioParameters* params) co return 0; } -int ObjCAudioDeviceModule::GetRecordAudioParameters(AudioParameters* params) const { +int ObjCAudioDeviceModule::GetRecordAudioParameters( + AudioParameters* params) const { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK(record_parameters_.is_valid()); RTC_DCHECK_RUN_ON(&thread_checker_); @@ -339,11 +360,13 @@ void ObjCAudioDeviceModule::UpdateOutputAudioDeviceBuffer() { RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first"; RTC_DCHECK_GT(playout_parameters_.sample_rate(), 0); - RTC_DCHECK(playout_parameters_.channels() == 1 || playout_parameters_.channels() == 2); + RTC_DCHECK(playout_parameters_.channels() == 1 || + playout_parameters_.channels() == 2); audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate()); audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels()); - playout_fine_audio_buffer_.reset(new FineAudioBuffer(audio_device_buffer_.get())); + playout_fine_audio_buffer_.reset( + new FineAudioBuffer(audio_device_buffer_.get())); } void ObjCAudioDeviceModule::UpdateInputAudioDeviceBuffer() { @@ -352,15 +375,18 @@ void ObjCAudioDeviceModule::UpdateInputAudioDeviceBuffer() { RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first"; RTC_DCHECK_GT(record_parameters_.sample_rate(), 0); - RTC_DCHECK(record_parameters_.channels() == 1 || record_parameters_.channels() == 2); + RTC_DCHECK(record_parameters_.channels() == 1 || + record_parameters_.channels() == 2); - audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate()); + audio_device_buffer_->SetRecordingSampleRate( + record_parameters_.sample_rate()); audio_device_buffer_->SetRecordingChannels(record_parameters_.channels()); - record_fine_audio_buffer_.reset(new FineAudioBuffer(audio_device_buffer_.get())); + record_fine_audio_buffer_.reset( + new FineAudioBuffer(audio_device_buffer_.get())); } -void ObjCAudioDeviceModule::UpdateAudioDelay(std::atomic& delay_ms, - const NSTimeInterval device_latency) { +void ObjCAudioDeviceModule::UpdateAudioDelay( + std::atomic& delay_ms, const NSTimeInterval device_latency) { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK_RUN_ON(&thread_checker_); int latency_ms = static_cast(rtc::kNumMillisecsPerSec * device_latency); @@ -374,12 +400,13 @@ void ObjCAudioDeviceModule::UpdateAudioDelay(std::atomic& delay_ms, } } -bool ObjCAudioDeviceModule::UpdateAudioParameters(AudioParameters& params, - const AudioParameters& device_params) { +bool ObjCAudioDeviceModule::UpdateAudioParameters( + AudioParameters& params, const AudioParameters& device_params) { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK_RUN_ON(&thread_checker_); if (!device_params.is_complete()) { - RTC_LOG_F(LS_INFO) << "Device params are incomplete: " << device_params.ToString(); + RTC_LOG_F(LS_INFO) << "Device params are incomplete: " + << device_params.ToString(); return false; } if (params.channels() == device_params.channels() && @@ -390,10 +417,12 @@ bool ObjCAudioDeviceModule::UpdateAudioParameters(AudioParameters& params, return false; } - RTC_LOG_F(LS_INFO) << "Audio params will be changed from: " << params.ToString() + RTC_LOG_F(LS_INFO) << "Audio params will be changed from: " + << params.ToString() << " to: " << device_params.ToString(); - params.reset( - device_params.sample_rate(), device_params.channels(), device_params.frames_per_buffer()); + params.reset(device_params.sample_rate(), + device_params.channels(), + device_params.frames_per_buffer()); return true; } @@ -414,14 +443,17 @@ OSStatus ObjCAudioDeviceModule::OnDeliverRecordedData( // AudioBuffer already fullfilled with audio data RTC_DCHECK_EQ(1, io_data->mNumberBuffers); const AudioBuffer* audio_buffer = &io_data->mBuffers[0]; - RTC_DCHECK(audio_buffer->mNumberChannels == 1 || audio_buffer->mNumberChannels == 2); + RTC_DCHECK(audio_buffer->mNumberChannels == 1 || + audio_buffer->mNumberChannels == 2); record_fine_audio_buffer_->DeliverRecordedData( - rtc::ArrayView(static_cast(audio_buffer->mData), num_frames), + rtc::ArrayView( + static_cast(audio_buffer->mData), num_frames), cached_recording_delay_ms_.load()); return noErr; } - RTC_DCHECK(render_block != nullptr) << "Either io_data or render_block must be provided"; + RTC_DCHECK(render_block != nullptr) + << "Either io_data or render_block must be provided"; // Set the size of our own audio buffer and clear it first to avoid copying // in combination with potential reallocations. @@ -439,13 +471,18 @@ OSStatus ObjCAudioDeviceModule::OnDeliverRecordedData( audio_buffer_list.mNumberBuffers = 1; AudioBuffer* audio_buffer = &audio_buffer_list.mBuffers[0]; audio_buffer->mNumberChannels = channels_count; - audio_buffer->mDataByteSize = - record_audio_buffer_.size() * sizeof(decltype(record_audio_buffer_)::value_type); + audio_buffer->mDataByteSize = record_audio_buffer_.size() * + sizeof(decltype(record_audio_buffer_)::value_type); audio_buffer->mData = reinterpret_cast(record_audio_buffer_.data()); - // Obtain the recorded audio samples by initiating a rendering cycle into own buffer. - result = - render_block(flags, time_stamp, bus_number, num_frames, &audio_buffer_list, render_context); + // Obtain the recorded audio samples by initiating a rendering cycle into own + // buffer. + result = render_block(flags, + time_stamp, + bus_number, + num_frames, + &audio_buffer_list, + render_context); if (result != noErr) { RTC_LOG_F(LS_ERROR) << "Failed to render audio: " << result; return result; @@ -454,21 +491,23 @@ OSStatus ObjCAudioDeviceModule::OnDeliverRecordedData( // Get a pointer to the recorded audio and send it to the WebRTC ADB. // Use the FineAudioBuffer instance to convert between native buffer size // and the 10ms buffer size used by WebRTC. - record_fine_audio_buffer_->DeliverRecordedData(record_audio_buffer_, - cached_recording_delay_ms_.load()); + record_fine_audio_buffer_->DeliverRecordedData( + record_audio_buffer_, cached_recording_delay_ms_.load()); return noErr; } -OSStatus ObjCAudioDeviceModule::OnGetPlayoutData(AudioUnitRenderActionFlags* flags, - const AudioTimeStamp* time_stamp, - NSInteger bus_number, - UInt32 num_frames, - AudioBufferList* io_data) { +OSStatus ObjCAudioDeviceModule::OnGetPlayoutData( + AudioUnitRenderActionFlags* flags, + const AudioTimeStamp* time_stamp, + NSInteger bus_number, + UInt32 num_frames, + AudioBufferList* io_data) { RTC_DCHECK_RUN_ON(&io_playout_thread_checker_); // Verify 16-bit, noninterleaved mono or stereo PCM signal format. RTC_DCHECK_EQ(1, io_data->mNumberBuffers); AudioBuffer* audio_buffer = &io_data->mBuffers[0]; - RTC_DCHECK(audio_buffer->mNumberChannels == 1 || audio_buffer->mNumberChannels == 2); + RTC_DCHECK(audio_buffer->mNumberChannels == 1 || + audio_buffer->mNumberChannels == 2); RTC_DCHECK_EQ(audio_buffer->mDataByteSize, sizeof(int16_t) * num_frames * audio_buffer->mNumberChannels); @@ -476,7 +515,9 @@ OSStatus ObjCAudioDeviceModule::OnGetPlayoutData(AudioUnitRenderActionFlags* fla // activated. if (!playing_.load()) { *flags |= kAudioUnitRenderAction_OutputIsSilence; - memset(static_cast(audio_buffer->mData), 0, audio_buffer->mDataByteSize); + memset(static_cast(audio_buffer->mData), + 0, + audio_buffer->mDataByteSize); return noErr; } @@ -506,7 +547,8 @@ void ObjCAudioDeviceModule::HandleAudioInputParametersChange() { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK_RUN_ON(&thread_checker_); - if (UpdateAudioParameters(record_parameters_, RecordParameters(audio_device_))) { + if (UpdateAudioParameters(record_parameters_, + RecordParameters(audio_device_))) { UpdateInputAudioDeviceBuffer(); } @@ -517,7 +559,8 @@ void ObjCAudioDeviceModule::HandleAudioOutputParametersChange() { RTC_DLOG_F(LS_VERBOSE) << ""; RTC_DCHECK_RUN_ON(&thread_checker_); - if (UpdateAudioParameters(playout_parameters_, PlayoutParameters(audio_device_))) { + if (UpdateAudioParameters(playout_parameters_, + PlayoutParameters(audio_device_))) { UpdateOutputAudioDeviceBuffer(); } @@ -538,15 +581,17 @@ int16_t ObjCAudioDeviceModule::RecordingDevices() { return 0; } -int32_t ObjCAudioDeviceModule::PlayoutDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { +int32_t ObjCAudioDeviceModule::PlayoutDeviceName( + uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { return -1; } -int32_t ObjCAudioDeviceModule::RecordingDeviceName(uint16_t index, - char name[kAdmMaxDeviceNameSize], - char guid[kAdmMaxGuidSize]) { +int32_t ObjCAudioDeviceModule::RecordingDeviceName( + uint16_t index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { return -1; } @@ -664,7 +709,8 @@ int32_t ObjCAudioDeviceModule::StereoPlayout(bool* enabled) const { return 0; } -int32_t ObjCAudioDeviceModule::StereoRecordingIsAvailable(bool* available) const { +int32_t ObjCAudioDeviceModule::StereoRecordingIsAvailable( + bool* available) const { *available = false; return 0; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index 3af079dad9..38c9374d2e 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,11 +22,14 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface ObjCAudioDeviceDelegate : NSObject +@interface ObjCAudioDeviceDelegate + : NSObject -- (instancetype)initWithAudioDeviceModule: - (rtc::scoped_refptr)audioDeviceModule - audioDeviceThread:(rtc::Thread*)thread; +- (instancetype) + initWithAudioDeviceModule: + (rtc::scoped_refptr) + audioDeviceModule + audioDeviceThread:(rtc::Thread*)thread; - (void)resetAudioDeviceModule; diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index d78d20e609..da26acdb53 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -30,10 +30,12 @@ constexpr double kPreferredOutputSampleRate = 48000.0; constexpr NSTimeInterval kPeferredInputIOBufferDuration = 0.02; constexpr NSTimeInterval kPeferredOutputIOBufferDuration = 0.02; -class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual { +class AudioDeviceDelegateImpl final + : public rtc::RefCountedNonVirtual { public: AudioDeviceDelegateImpl( - rtc::scoped_refptr audio_device_module, + rtc::scoped_refptr + audio_device_module, rtc::Thread* thread) : audio_device_module_(audio_device_module), thread_(thread) { RTC_DCHECK(audio_device_module_); @@ -49,7 +51,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual audio_device_module_; + rtc::scoped_refptr + audio_device_module_; rtc::Thread* thread_; }; @@ -71,67 +74,73 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual)audioDeviceModule - audioDeviceThread:(rtc::Thread*)thread { +- (instancetype) + initWithAudioDeviceModule: + (rtc::scoped_refptr) + audioDeviceModule + audioDeviceThread:(rtc::Thread*)thread { RTC_DCHECK_RUN_ON(thread); self = [super init]; if (self) { - impl_ = rtc::make_ref_counted(audioDeviceModule, thread); + impl_ = rtc::make_ref_counted(audioDeviceModule, + thread); preferredInputSampleRate_ = kPreferredInputSampleRate; preferredInputIOBufferDuration_ = kPeferredInputIOBufferDuration; preferredOutputSampleRate_ = kPreferredOutputSampleRate; preferredOutputIOBufferDuration_ = kPeferredOutputIOBufferDuration; rtc::scoped_refptr playout_delegate = impl_; - getPlayoutData_ = ^OSStatus(AudioUnitRenderActionFlags* _Nonnull actionFlags, - const AudioTimeStamp* _Nonnull timestamp, - NSInteger inputBusNumber, - UInt32 frameCount, - AudioBufferList* _Nonnull outputData) { - webrtc::objc_adm::ObjCAudioDeviceModule* audio_device = - playout_delegate->audio_device_module(); - if (audio_device) { - return audio_device->OnGetPlayoutData( - actionFlags, timestamp, inputBusNumber, frameCount, outputData); - } else { - *actionFlags |= kAudioUnitRenderAction_OutputIsSilence; - RTC_LOG(LS_VERBOSE) << "No alive audio device"; - return noErr; - } - }; - - rtc::scoped_refptr record_delegate = impl_; - deliverRecordedData_ = + getPlayoutData_ = ^OSStatus(AudioUnitRenderActionFlags* _Nonnull actionFlags, const AudioTimeStamp* _Nonnull timestamp, NSInteger inputBusNumber, UInt32 frameCount, - const AudioBufferList* _Nullable inputData, - void* renderContext, - RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) _Nullable renderBlock) { + AudioBufferList* _Nonnull outputData) { webrtc::objc_adm::ObjCAudioDeviceModule* audio_device = - record_delegate->audio_device_module(); + playout_delegate->audio_device_module(); if (audio_device) { - return audio_device->OnDeliverRecordedData(actionFlags, - timestamp, - inputBusNumber, - frameCount, - inputData, - renderContext, - renderBlock); + return audio_device->OnGetPlayoutData( + actionFlags, timestamp, inputBusNumber, frameCount, outputData); } else { + *actionFlags |= kAudioUnitRenderAction_OutputIsSilence; RTC_LOG(LS_VERBOSE) << "No alive audio device"; return noErr; } }; + + rtc::scoped_refptr record_delegate = impl_; + deliverRecordedData_ = ^OSStatus( + AudioUnitRenderActionFlags* _Nonnull actionFlags, + const AudioTimeStamp* _Nonnull timestamp, + NSInteger inputBusNumber, + UInt32 frameCount, + const AudioBufferList* _Nullable inputData, + void* renderContext, + RTC_OBJC_TYPE( + RTCAudioDeviceRenderRecordedDataBlock) _Nullable renderBlock) { + webrtc::objc_adm::ObjCAudioDeviceModule* audio_device = + record_delegate->audio_device_module(); + if (audio_device) { + return audio_device->OnDeliverRecordedData(actionFlags, + timestamp, + inputBusNumber, + frameCount, + inputData, + renderContext, + renderBlock); + } else { + RTC_LOG(LS_VERBOSE) << "No alive audio device"; + return noErr; + } + }; } return self; } - (void)notifyAudioInputParametersChange { RTC_DCHECK_RUN_ON(impl_->thread()); - webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module(); + webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = + impl_->audio_device_module(); if (audio_device_module) { audio_device_module->HandleAudioInputParametersChange(); } @@ -139,7 +148,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtualthread()); - webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module(); + webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = + impl_->audio_device_module(); if (audio_device_module) { audio_device_module->HandleAudioOutputParametersChange(); } @@ -147,7 +157,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtualthread()); - webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module(); + webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = + impl_->audio_device_module(); if (audio_device_module) { audio_device_module->HandleAudioInputInterrupted(); } @@ -155,7 +166,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtualthread()); - webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module(); + webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = + impl_->audio_device_module(); if (audio_device_module) { audio_device_module->HandleAudioOutputInterrupted(); } diff --git a/sdk/objc/native/src/objc_frame_buffer.mm b/sdk/objc/native/src/objc_frame_buffer.mm index 00e4b4be85..b3d89e058b 100644 --- a/sdk/objc/native/src/objc_frame_buffer.mm +++ b/sdk/objc/native/src/objc_frame_buffer.mm @@ -18,11 +18,14 @@ namespace webrtc { namespace { -/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */ +/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping + * RTC_OBJC_TYPE(RTCI420Buffer) */ class ObjCI420FrameBuffer : public I420BufferInterface { public: explicit ObjCI420FrameBuffer(id frame_buffer) - : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {} + : frame_buffer_(frame_buffer), + width_(frame_buffer.width), + height_(frame_buffer.height) {} ~ObjCI420FrameBuffer() override {} int width() const override { return width_; } @@ -49,8 +52,11 @@ class ObjCI420FrameBuffer : public I420BufferInterface { } // namespace -ObjCFrameBuffer::ObjCFrameBuffer(id frame_buffer) - : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {} +ObjCFrameBuffer::ObjCFrameBuffer( + id frame_buffer) + : frame_buffer_(frame_buffer), + width_(frame_buffer.width), + height_(frame_buffer.height) {} ObjCFrameBuffer::~ObjCFrameBuffer() {} @@ -70,20 +76,24 @@ rtc::scoped_refptr ObjCFrameBuffer::ToI420() { return rtc::make_ref_counted([frame_buffer_ toI420]); } -rtc::scoped_refptr ObjCFrameBuffer::CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) { - if ([frame_buffer_ respondsToSelector:@selector - (cropAndScaleWith:offsetY:cropWidth:cropHeight:scaleWidth:scaleHeight:)]) { - return rtc::make_ref_counted([frame_buffer_ cropAndScaleWith:offset_x - offsetY:offset_y - cropWidth:crop_width - cropHeight:crop_height - scaleWidth:scaled_width - scaleHeight:scaled_height]); +rtc::scoped_refptr ObjCFrameBuffer::CropAndScale( + int offset_x, + int offset_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) { + if ([frame_buffer_ + respondsToSelector:@selector + (cropAndScaleWith: + offsetY:cropWidth:cropHeight:scaleWidth:scaleHeight:)]) { + return rtc::make_ref_counted([frame_buffer_ + cropAndScaleWith:offset_x + offsetY:offset_y + cropWidth:crop_width + cropHeight:crop_height + scaleWidth:scaled_width + scaleHeight:scaled_height]); } // Use the default implementation. @@ -91,7 +101,8 @@ rtc::scoped_refptr ObjCFrameBuffer::CropAndScale(int offset_x, offset_x, offset_y, crop_width, crop_height, scaled_width, scaled_height); } -id ObjCFrameBuffer::wrapped_frame_buffer() const { +id ObjCFrameBuffer::wrapped_frame_buffer() + const { return frame_buffer_; } @@ -100,7 +111,8 @@ id ToObjCVideoFrameBuffer( if (buffer->type() == VideoFrameBuffer::Type::kNative) { return static_cast(buffer.get())->wrapped_frame_buffer(); } else { - return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()]; + return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] + initWithFrameBuffer:buffer->ToI420()]; } } diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm index 1a1f445e1d..bbe6e18546 100644 --- a/sdk/objc/native/src/objc_network_monitor.mm +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -41,7 +41,8 @@ void ObjCNetworkMonitor::Start() { safety_flag_->SetAlive(); network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; if (network_monitor_ == nil) { - RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; + RTC_LOG(LS_WARNING) + << "Failed to create RTCNetworkMonitor; not available on this OS?"; } started_ = true; } @@ -57,8 +58,8 @@ void ObjCNetworkMonitor::Stop() { started_ = false; } -rtc::NetworkMonitorInterface::InterfaceInfo ObjCNetworkMonitor::GetInterfaceInfo( - absl::string_view interface_name) { +rtc::NetworkMonitorInterface::InterfaceInfo + ObjCNetworkMonitor::GetInterfaceInfo(absl::string_view interface_name) { RTC_DCHECK_RUN_ON(thread_); if (adapter_type_by_name_.empty()) { // If we have no path update, assume everything's available, because it's @@ -83,7 +84,8 @@ rtc::NetworkMonitorInterface::InterfaceInfo ObjCNetworkMonitor::GetInterfaceInfo } void ObjCNetworkMonitor::OnPathUpdate( - std::map adapter_type_by_name) { + std::map + adapter_type_by_name) { thread_->PostTask(SafeTask(safety_flag_, [this, adapter_type_by_name] { RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(network_monitor_ != nil); diff --git a/sdk/objc/native/src/objc_video_decoder_factory.mm b/sdk/objc/native/src/objc_video_decoder_factory.mm index 84f97d6539..d30dc456ae 100644 --- a/sdk/objc/native/src/objc_video_decoder_factory.mm +++ b/sdk/objc/native/src/objc_video_decoder_factory.mm @@ -35,16 +35,19 @@ namespace { class ObjCVideoDecoder : public VideoDecoder { public: ObjCVideoDecoder(id decoder) - : decoder_(decoder), implementation_name_([decoder implementationName].stdString) {} + : decoder_(decoder), + implementation_name_([decoder implementationName].stdString) {} bool Configure(const Settings &settings) override { - return - [decoder_ startDecodeWithNumberOfCores:settings.number_of_cores()] == WEBRTC_VIDEO_CODEC_OK; + return [decoder_ startDecodeWithNumberOfCores:settings.number_of_cores()] == + WEBRTC_VIDEO_CODEC_OK; } - int32_t Decode(const EncodedImage &input_image, int64_t render_time_ms = -1) override { + int32_t Decode(const EncodedImage &input_image, + int64_t render_time_ms = -1) override { RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = - [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image]; + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] + initWithNativeEncodedImage:input_image]; return [decoder_ decode:encodedImage missingFrames:false @@ -52,7 +55,8 @@ class ObjCVideoDecoder : public VideoDecoder { renderTimeMs:render_time_ms]; } - int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override { + int32_t RegisterDecodeCompleteCallback( + DecodedImageCallback *callback) override { [decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { const auto buffer = rtc::make_ref_counted(frame.buffer); VideoFrame videoFrame = VideoFrame::Builder() @@ -69,7 +73,9 @@ class ObjCVideoDecoder : public VideoDecoder { int32_t Release() override { return [decoder_ releaseDecoder]; } - const char *ImplementationName() const override { return implementation_name_.c_str(); } + const char *ImplementationName() const override { + return implementation_name_.c_str(); + } private: id decoder_; @@ -83,19 +89,24 @@ ObjCVideoDecoderFactory::ObjCVideoDecoderFactory( ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {} -id ObjCVideoDecoderFactory::wrapped_decoder_factory() const { +id + ObjCVideoDecoderFactory::wrapped_decoder_factory() const { return decoder_factory_; } -std::unique_ptr ObjCVideoDecoderFactory::Create(const Environment &env, - const SdpVideoFormat &format) { +std::unique_ptr ObjCVideoDecoderFactory::Create( + const Environment &env, const SdpVideoFormat &format) { NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()]; - for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * + codecInfo in decoder_factory_.supportedCodecs) { if ([codecName isEqualToString:codecInfo.name]) { - id decoder = [decoder_factory_ createDecoder:codecInfo]; + id decoder = + [decoder_factory_ createDecoder:codecInfo]; - if ([decoder conformsToProtocol:@protocol(RTC_OBJC_TYPE(RTCNativeVideoDecoderBuilder))]) { - return [((id)decoder) build:env]; + if ([decoder conformsToProtocol:@protocol(RTC_OBJC_TYPE( + RTCNativeVideoDecoderBuilder))]) { + return [(( + id)decoder) build:env]; } else { return std::unique_ptr(new ObjCVideoDecoder(decoder)); } @@ -105,9 +116,11 @@ std::unique_ptr ObjCVideoDecoderFactory::Create(const Environment return nullptr; } -std::vector ObjCVideoDecoderFactory::GetSupportedFormats() const { +std::vector ObjCVideoDecoderFactory::GetSupportedFormats() + const { std::vector supported_formats; - for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * + supportedCodec in decoder_factory_.supportedCodecs) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } diff --git a/sdk/objc/native/src/objc_video_encoder_factory.h b/sdk/objc/native/src/objc_video_encoder_factory.h index ba355f48eb..a42f75878a 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.h +++ b/sdk/objc/native/src/objc_video_encoder_factory.h @@ -35,8 +35,9 @@ class ObjCVideoEncoderFactory : public VideoEncoderFactory { std::vector GetSupportedFormats() const override; std::vector GetImplementations() const override; - CodecSupport QueryCodecSupport(const SdpVideoFormat& format, - std::optional scalability_mode) const override; + CodecSupport QueryCodecSupport( + const SdpVideoFormat& format, + std::optional scalability_mode) const override; std::unique_ptr Create(const Environment& env, const SdpVideoFormat& format) override; std::unique_ptr GetEncoderSelector() const override; diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm index 919848a161..5d46915b22 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.mm +++ b/sdk/objc/native/src/objc_video_encoder_factory.mm @@ -40,29 +40,37 @@ namespace { class ObjCVideoEncoder : public VideoEncoder { public: ObjCVideoEncoder(id encoder) - : encoder_(encoder), implementation_name_([encoder implementationName].stdString) {} + : encoder_(encoder), + implementation_name_([encoder implementationName].stdString) {} - int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override { + int32_t InitEncode(const VideoCodec *codec_settings, + const Settings &encoder_settings) override { RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings = - [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings]; + [[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] + initWithNativeVideoCodec:codec_settings]; return [encoder_ startEncodeWithSettings:settings numberOfCores:encoder_settings.number_of_cores]; } - int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override { + int32_t RegisterEncodeCompleteCallback( + EncodedImageCallback *callback) override { if (callback) { - [encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame, - id _Nonnull info) { + [encoder_ setCallback:^BOOL( + RTC_OBJC_TYPE(RTCEncodedImage) *_Nonnull frame, + id _Nonnull info) { EncodedImage encodedImage = [frame nativeEncodedImage]; - // Handle types that can be converted into one of CodecSpecificInfo's hard coded cases. + // Handle types that can be converted into one of CodecSpecificInfo's + // hard coded cases. CodecSpecificInfo codecSpecificInfo; - if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) { - codecSpecificInfo = - [(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo]; + if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) + class]]) { + codecSpecificInfo = [(RTC_OBJC_TYPE( + RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo]; } - EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo); + EncodedImageCallback::Result res = + callback->OnEncodedImage(encodedImage, &codecSpecificInfo); return res.error == EncodedImageCallback::Result::OK; }]; } else { @@ -87,7 +95,8 @@ class ObjCVideoEncoder : public VideoEncoder { void SetRates(const RateControlParameters ¶meters) override { const uint32_t bitrate = parameters.bitrate.get_sum_kbps(); - const uint32_t framerate = static_cast(parameters.framerate_fps + 0.5); + const uint32_t framerate = + static_cast(parameters.framerate_fps + 0.5); [encoder_ setBitrate:bitrate framerate:framerate]; } @@ -95,12 +104,15 @@ class ObjCVideoEncoder : public VideoEncoder { EncoderInfo info; info.implementation_name = implementation_name_; - RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings]; - info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) : - ScalingSettings::kOff; + RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = + [encoder_ scalingSettings]; + info.scaling_settings = qp_thresholds ? + ScalingSettings(qp_thresholds.low, qp_thresholds.high) : + ScalingSettings::kOff; info.requested_resolution_alignment = encoder_.resolutionAlignment > 0 ?: 1; - info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers; + info.apply_alignment_to_all_simulcast_layers = + encoder_.applyAlignmentToAllSimulcastLayers; info.supports_native_handle = encoder_.supportsNativeHandle; info.is_hardware_accelerated = true; return info; @@ -111,35 +123,43 @@ class ObjCVideoEncoder : public VideoEncoder { const std::string implementation_name_; }; -class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface { +class ObjcVideoEncoderSelector + : public VideoEncoderFactory::EncoderSelectorInterface { public: - ObjcVideoEncoderSelector(id selector) { + ObjcVideoEncoderSelector( + id selector) { selector_ = selector; } void OnCurrentEncoder(const SdpVideoFormat &format) override { - RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) + alloc] initWithNativeSdpVideoFormat:format]; [selector_ registerCurrentEncoderInfo:info]; } std::optional OnEncoderBroken() override { - RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = + [selector_ encoderForBrokenEncoder]; if (info) { return [info nativeSdpVideoFormat]; } return std::nullopt; } - std::optional OnAvailableBitrate(const DataRate &rate) override { - RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps()]; + std::optional OnAvailableBitrate( + const DataRate &rate) override { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = + [selector_ encoderForBitrate:rate.kbps()]; if (info) { return [info nativeSdpVideoFormat]; } return std::nullopt; } - std::optional OnResolutionChange(const RenderResolution &resolution) override { - if ([selector_ respondsToSelector:@selector(encoderForResolutionChangeBySize:)]) { + std::optional OnResolutionChange( + const RenderResolution &resolution) override { + if ([selector_ + respondsToSelector:@selector(encoderForResolutionChangeBySize:)]) { RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ - encoderForResolutionChangeBySize:CGSizeMake(resolution.Width(), resolution.Height())]; + encoderForResolutionChangeBySize:CGSizeMake(resolution.Width(), + resolution.Height())]; if (info) { return [info nativeSdpVideoFormat]; } @@ -159,13 +179,16 @@ ObjCVideoEncoderFactory::ObjCVideoEncoderFactory( ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {} -id ObjCVideoEncoderFactory::wrapped_encoder_factory() const { +id + ObjCVideoEncoderFactory::wrapped_encoder_factory() const { return encoder_factory_; } -std::vector ObjCVideoEncoderFactory::GetSupportedFormats() const { +std::vector ObjCVideoEncoderFactory::GetSupportedFormats() + const { std::vector supported_formats; - for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * + supportedCodec in [encoder_factory_ supportedCodecs]) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } @@ -173,10 +196,12 @@ std::vector ObjCVideoEncoderFactory::GetSupportedFormats() const return supported_formats; } -std::vector ObjCVideoEncoderFactory::GetImplementations() const { +std::vector ObjCVideoEncoderFactory::GetImplementations() + const { if ([encoder_factory_ respondsToSelector:@selector(implementations)]) { std::vector supported_formats; - for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) { + for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * + supportedCodec in [encoder_factory_ implementations]) { SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat]; supported_formats.push_back(format); } @@ -186,31 +211,37 @@ std::vector ObjCVideoEncoderFactory::GetImplementations() const } VideoEncoderFactory::CodecSupport ObjCVideoEncoderFactory::QueryCodecSupport( - const SdpVideoFormat &format, std::optional scalability_mode) const { - if ([encoder_factory_ respondsToSelector:@selector(queryCodecSupport:scalabilityMode:)]) { - RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; + const SdpVideoFormat &format, + std::optional scalability_mode) const { + if ([encoder_factory_ respondsToSelector:@selector(queryCodecSupport: + scalabilityMode:)]) { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) + alloc] initWithNativeSdpVideoFormat:format]; NSString *mode; if (scalability_mode.has_value()) { mode = [NSString stringForAbslStringView:*scalability_mode]; } - RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) *result = [encoder_factory_ queryCodecSupport:info - scalabilityMode:mode]; - return {.is_supported = result.isSupported, .is_power_efficient = result.isPowerEfficient}; + RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) *result = + [encoder_factory_ queryCodecSupport:info scalabilityMode:mode]; + return {.is_supported = result.isSupported, + .is_power_efficient = result.isPowerEfficient}; } // Use default implementation. return VideoEncoderFactory::QueryCodecSupport(format, scalability_mode); } -std::unique_ptr ObjCVideoEncoderFactory::Create(const Environment &env, - const SdpVideoFormat &format) { - RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format]; - id encoder = [encoder_factory_ createEncoder:info]; - if ([encoder conformsToProtocol:@protocol(RTC_OBJC_TYPE(RTCNativeVideoEncoderBuilder))]) { - return [((id)encoder) build:env]; +std::unique_ptr ObjCVideoEncoderFactory::Create( + const Environment &env, const SdpVideoFormat &format) { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) + alloc] initWithNativeSdpVideoFormat:format]; + id encoder = + [encoder_factory_ createEncoder:info]; + if ([encoder conformsToProtocol:@protocol(RTC_OBJC_TYPE( + RTCNativeVideoEncoderBuilder))]) { + return + [((id)encoder) build:env]; } else { return std::make_unique(encoder); } @@ -219,7 +250,8 @@ std::unique_ptr ObjCVideoEncoderFactory::Create(const Environment std::unique_ptr ObjCVideoEncoderFactory::GetEncoderSelector() const { if ([encoder_factory_ respondsToSelector:@selector(encoderSelector)]) { - id selector = [encoder_factory_ encoderSelector]; + id selector = + [encoder_factory_ encoderSelector]; if (selector) { return absl::make_unique(selector); } diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm index ff07dc8552..82f35ef104 100644 --- a/sdk/objc/native/src/objc_video_frame.mm +++ b/sdk/objc/native/src/objc_video_frame.mm @@ -16,10 +16,11 @@ namespace webrtc { RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) { - RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] - initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) - rotation:RTCVideoRotation(frame.rotation()) - timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] + initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) + rotation:RTCVideoRotation(frame.rotation()) + timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; videoFrame.timeStamp = frame.rtp_timestamp(); return videoFrame; diff --git a/sdk/objc/native/src/objc_video_renderer.mm b/sdk/objc/native/src/objc_video_renderer.mm index c519fd9f5e..a93690e1bd 100644 --- a/sdk/objc/native/src/objc_video_renderer.mm +++ b/sdk/objc/native/src/objc_video_renderer.mm @@ -18,7 +18,8 @@ namespace webrtc { -ObjCVideoRenderer::ObjCVideoRenderer(id renderer) +ObjCVideoRenderer::ObjCVideoRenderer( + id renderer) : renderer_(renderer), size_(CGSizeZero) {} void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) { diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 02ab0ebba4..6ef6e18f21 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -19,7 +19,8 @@ RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTCObjCVideoSourceAdapter + : NSObject @end namespace webrtc { diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 9da0d9a4e3..e6289a98e8 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -40,7 +40,8 @@ ObjCVideoTrackSource::ObjCVideoTrackSource(bool is_screencast) : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} -ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { +ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) + : adapter_(adapter) { adapter_.objCVideoTrackSource = this; } @@ -60,12 +61,16 @@ bool ObjCVideoTrackSource::remote() const { return false; } -void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) { - cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0); +void ObjCVideoTrackSource::OnOutputFormatRequest(int width, + int height, + int fps) { + cricket::VideoFormat format( + width, height, cricket::VideoFormat::FpsToInterval(fps), 0); video_adapter()->OnOutputFormatRequest(format); } -void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) { +void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * + frame) { const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec; const int64_t translated_timestamp_us = timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros()); @@ -92,24 +97,28 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) if (adapted_width == frame.width && adapted_height == frame.height) { // No adaption - optimized path. buffer = rtc::make_ref_counted(frame.buffer); - } else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + } else if ([frame.buffer + isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { // Adapted CVPixelBuffer frame. RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; - buffer = rtc::make_ref_counted([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] - initWithPixelBuffer:rtcPixelBuffer.pixelBuffer - adaptedWidth:adapted_width - adaptedHeight:adapted_height - cropWidth:crop_width - cropHeight:crop_height - cropX:crop_x + rtcPixelBuffer.cropX - cropY:crop_y + rtcPixelBuffer.cropY]); + buffer = + rtc::make_ref_counted([[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:rtcPixelBuffer.pixelBuffer + adaptedWidth:adapted_width + adaptedHeight:adapted_height + cropWidth:crop_width + cropHeight:crop_height + cropX:crop_x + rtcPixelBuffer.cropX + cropY:crop_y + rtcPixelBuffer.cropY]); } else { // Adapted I420 frame. // TODO(magjed): Optimize this I420 path. - rtc::scoped_refptr i420_buffer = I420Buffer::Create(adapted_width, adapted_height); + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(adapted_width, adapted_height); buffer = rtc::make_ref_counted(frame.buffer); - i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height); + i420_buffer->CropAndScaleFrom( + *buffer->ToI420(), crop_x, crop_y, crop_width, crop_height); buffer = i420_buffer; } diff --git a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm index 4c8bf348f4..804d434e24 100644 --- a/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm +++ b/sdk/objc/unittests/ObjCVideoTrackSource_xctest.mm @@ -28,7 +28,8 @@ typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *); namespace { -class ObjCCallbackVideoSink : public rtc::VideoSinkInterface { +class ObjCCallbackVideoSink + : public rtc::VideoSinkInterface { public: ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {} @@ -59,11 +60,15 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameAdaptsFrame { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer @@ -72,7 +77,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); @@ -86,16 +92,20 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface } - (void)testOnCapturedFrameAdaptsFrameWithAlignment { - // Requesting to adapt 1280x720 to 912x514 gives 639x360 without alignment. The 639 causes issues - // with some hardware encoders (e.g. HEVC) so in this test we verify that the alignment is set and - // respected. + // Requesting to adapt 1280x720 to 912x514 gives 639x360 without alignment. + // The 639 causes issues with some hardware encoders (e.g. HEVC) so in this + // test we verify that the alignment is set and respected. CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer @@ -104,7 +114,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants); _video_source->OnOutputFormatRequest(912, 514, 30); @@ -118,14 +129,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface } - (void)testOnCapturedFrameAdaptationResultsInCommonResolutions { - // Some of the most common resolutions used in the wild are 640x360, 480x270 and 320x180. - // Make sure that we properly scale down to exactly these resolutions. + // Some of the most common resolutions used in the wild are 640x360, 480x270 + // and 320x180. Make sure that we properly scale down to exactly these + // resolutions. CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer @@ -134,7 +150,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer(); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); @@ -163,32 +180,40 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameWithoutAdaptation { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 360, + 640, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { - XCTAssertEqual(frame.width, outputFrame.width); - XCTAssertEqual(frame.height, outputFrame.height); + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink( + ^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTAssertEqual(frame.width, outputFrame.width); + XCTAssertEqual(frame.height, outputFrame.height); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; - XCTAssertEqual(buffer.cropX, outputBuffer.cropX); - XCTAssertEqual(buffer.cropY, outputBuffer.cropY); - XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; + XCTAssertEqual(buffer.cropX, outputBuffer.cropX); + XCTAssertEqual(buffer.cropY, outputBuffer.cropY); + XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); - [callbackExpectation fulfill]; - }); + [callbackExpectation fulfill]; + }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); @@ -199,32 +224,40 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { - XCTAssertEqual(outputFrame.width, 360); - XCTAssertEqual(outputFrame.height, 640); + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink( + ^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTAssertEqual(outputFrame.width, 360); + XCTAssertEqual(outputFrame.height, 640); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; - XCTAssertEqual(outputBuffer.cropX, 0); - XCTAssertEqual(outputBuffer.cropY, 0); - XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; + XCTAssertEqual(outputBuffer.cropX, 0); + XCTAssertEqual(outputBuffer.cropY, 0); + XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); - [callbackExpectation fulfill]; - }); + [callbackExpectation fulfill]; + }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); @@ -235,32 +268,40 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameCVPixelBufferNeedsCropping { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 380, + 640, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { - XCTAssertEqual(outputFrame.width, 360); - XCTAssertEqual(outputFrame.height, 640); + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink( + ^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTAssertEqual(outputFrame.width, 360); + XCTAssertEqual(outputFrame.height, 640); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; - XCTAssertEqual(outputBuffer.cropX, 10); - XCTAssertEqual(outputBuffer.cropY, 0); - XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; + XCTAssertEqual(outputBuffer.cropX, 10); + XCTAssertEqual(outputBuffer.cropY, 0); + XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); - [callbackExpectation fulfill]; - }); + [callbackExpectation fulfill]; + }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); @@ -271,41 +312,49 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); // Create a frame that's already adapted down. - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:640 - adaptedHeight:360 - cropWidth:720 - cropHeight:1280 - cropX:0 - cropY:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:640 + adaptedHeight:360 + cropWidth:720 + cropHeight:1280 + cropX:0 + cropY:0]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { - XCTAssertEqual(outputFrame.width, 480); - XCTAssertEqual(outputFrame.height, 270); + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink( + ^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTAssertEqual(outputFrame.width, 480); + XCTAssertEqual(outputFrame.height, 270); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; - XCTAssertEqual(outputBuffer.cropX, 0); - XCTAssertEqual(outputBuffer.cropY, 0); - XCTAssertEqual(outputBuffer.cropWidth, 640); - XCTAssertEqual(outputBuffer.cropHeight, 360); - XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; + XCTAssertEqual(outputBuffer.cropX, 0); + XCTAssertEqual(outputBuffer.cropY, 0); + XCTAssertEqual(outputBuffer.cropWidth, 640); + XCTAssertEqual(outputBuffer.cropHeight, 360); + XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); - [callbackExpectation fulfill]; - }); + [callbackExpectation fulfill]; + }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(480, 270, 30); _video_source->OnCapturedFrame(frame); @@ -316,40 +365,48 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 380, + 640, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:370 - adaptedHeight:640 - cropWidth:370 - cropHeight:640 - cropX:10 - cropY:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:370 + adaptedHeight:640 + cropWidth:370 + cropHeight:640 + cropX:10 + cropY:0]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { - XCTAssertEqual(outputFrame.width, 360); - XCTAssertEqual(outputFrame.height, 640); + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink( + ^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTAssertEqual(outputFrame.width, 360); + XCTAssertEqual(outputFrame.height, 640); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; - XCTAssertEqual(outputBuffer.cropX, 14); - XCTAssertEqual(outputBuffer.cropY, 0); - XCTAssertEqual(outputBuffer.cropWidth, 360); - XCTAssertEqual(outputBuffer.cropHeight, 640); - XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; + XCTAssertEqual(outputBuffer.cropX, 14); + XCTAssertEqual(outputBuffer.cropY, 0); + XCTAssertEqual(outputBuffer.cropWidth, 360); + XCTAssertEqual(outputBuffer.cropHeight, 640); + XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); - [callbackExpectation fulfill]; - }); + [callbackExpectation fulfill]; + }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); @@ -360,40 +417,48 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface - (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 380, + 640, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:300 - adaptedHeight:640 - cropWidth:300 - cropHeight:640 - cropX:40 - cropY:0]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:300 + adaptedHeight:640 + cropWidth:300 + cropHeight:640 + cropX:40 + cropY:0]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { - XCTAssertEqual(outputFrame.width, 300); - XCTAssertEqual(outputFrame.height, 534); + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink( + ^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTAssertEqual(outputFrame.width, 300); + XCTAssertEqual(outputFrame.height, 534); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; - XCTAssertEqual(outputBuffer.cropX, 40); - XCTAssertEqual(outputBuffer.cropY, 52); - XCTAssertEqual(outputBuffer.cropWidth, 300); - XCTAssertEqual(outputBuffer.cropHeight, 534); - XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer; + XCTAssertEqual(outputBuffer.cropX, 40); + XCTAssertEqual(outputBuffer.cropY, 52); + XCTAssertEqual(outputBuffer.cropWidth, 300); + XCTAssertEqual(outputBuffer.cropHeight, 534); + XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer); - [callbackExpectation fulfill]; - }); + [callbackExpectation fulfill]; + }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); @@ -403,7 +468,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface } - (void)testOnCapturedFrameI420BufferNeedsAdaptation { - rtc::scoped_refptr i420Buffer = CreateI420Gradient(720, 1280); + rtc::scoped_refptr i420Buffer = + CreateI420Gradient(720, 1280); RTC_OBJC_TYPE(RTCI420Buffer) *buffer = [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = @@ -411,22 +477,28 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * + outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; + RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = + (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; - double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); + double psnr = + I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); [callbackExpectation fulfill]; }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); @@ -435,7 +507,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface } - (void)testOnCapturedFrameI420BufferNeedsCropping { - rtc::scoped_refptr i420Buffer = CreateI420Gradient(380, 640); + rtc::scoped_refptr i420Buffer = + CreateI420Gradient(380, 640); RTC_OBJC_TYPE(RTCI420Buffer) *buffer = [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; RTC_OBJC_TYPE(RTCVideoFrame) *frame = @@ -443,22 +516,28 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface rotation:RTCVideoRotation_0 timeStampNs:0]; - XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"]; - ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) { + XCTestExpectation *callbackExpectation = + [self expectationWithDescription:@"videoSinkCallback"]; + ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * + outputFrame) { XCTAssertEqual(outputFrame.width, 360); XCTAssertEqual(outputFrame.height, 640); - RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; + RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = + (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer; - double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); + double psnr = + I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]); XCTAssertGreaterThanOrEqual(psnr, 40); [callbackExpectation fulfill]; }); const rtc::VideoSinkWants video_sink_wants; - rtc::VideoSourceInterface *video_source_interface = _video_source.get(); - video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants); + rtc::VideoSourceInterface *video_source_interface = + _video_source.get(); + video_source_interface->AddOrUpdateSink(&callback_video_sink, + video_sink_wants); _video_source->OnOutputFormatRequest(640, 360, 30); _video_source->OnCapturedFrame(frame); diff --git a/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm b/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm index f7439f39f9..f032214b28 100644 --- a/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm +++ b/sdk/objc/unittests/RTCAudioDeviceModule_xctest.mm @@ -18,30 +18,30 @@ #include "api/scoped_refptr.h" -typedef int32_t(^NeedMorePlayDataBlock)(const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - void* audioSamples, - size_t& nSamplesOut, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms); +typedef int32_t (^NeedMorePlayDataBlock)(const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, + void *audioSamples, + size_t &nSamplesOut, + int64_t *elapsed_time_ms, + int64_t *ntp_time_ms); -typedef int32_t(^RecordedDataIsAvailableBlock)(const void* audioSamples, - const size_t nSamples, - const size_t nBytesPerSample, - const size_t nChannels, - const uint32_t samplesPerSec, - const uint32_t totalDelayMS, - const int32_t clockDrift, - const uint32_t currentMicLevel, - const bool keyPressed, - uint32_t& newMicLevel); +typedef int32_t (^RecordedDataIsAvailableBlock)(const void *audioSamples, + const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, + const uint32_t totalDelayMS, + const int32_t clockDrift, + const uint32_t currentMicLevel, + const bool keyPressed, + uint32_t &newMicLevel); - -// This class implements the AudioTransport API and forwards all methods to the appropriate blocks. +// This class implements the AudioTransport API and forwards all methods to the +// appropriate blocks. class MockAudioTransport : public webrtc::AudioTransport { -public: + public: MockAudioTransport() {} ~MockAudioTransport() override {} @@ -57,10 +57,10 @@ public: const size_t nBytesPerSample, const size_t nChannels, const uint32_t samplesPerSec, - void* audioSamples, - size_t& nSamplesOut, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) override { + void *audioSamples, + size_t &nSamplesOut, + int64_t *elapsed_time_ms, + int64_t *ntp_time_ms) override { return needMorePlayDataBlock(nSamples, nBytesPerSample, nChannels, @@ -71,7 +71,7 @@ public: ntp_time_ms); } - int32_t RecordedDataIsAvailable(const void* audioSamples, + int32_t RecordedDataIsAvailable(const void *audioSamples, const size_t nSamples, const size_t nBytesPerSample, const size_t nChannels, @@ -80,7 +80,7 @@ public: const int32_t clockDrift, const uint32_t currentMicLevel, const bool keyPressed, - uint32_t& newMicLevel) override { + uint32_t &newMicLevel) override { return recordedDataIsAvailableBlock(audioSamples, nSamples, nBytesPerSample, @@ -97,9 +97,9 @@ public: int sample_rate, size_t number_of_channels, size_t number_of_frames, - void* audio_data, - int64_t* elapsed_time_ms, - int64_t* ntp_time_ms) override {} + void *audio_data, + int64_t *elapsed_time_ms, + int64_t *ntp_time_ms) override {} private: NeedMorePlayDataBlock needMorePlayDataBlock; @@ -158,8 +158,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; audioDeviceModule = webrtc::CreateAudioDeviceModule(); XCTAssertEqual(0, audioDeviceModule->Init()); - XCTAssertEqual(0, audioDeviceModule->GetPlayoutAudioParameters(&playoutParameters)); - XCTAssertEqual(0, audioDeviceModule->GetRecordAudioParameters(&recordParameters)); + XCTAssertEqual( + 0, audioDeviceModule->GetPlayoutAudioParameters(&playoutParameters)); + XCTAssertEqual( + 0, audioDeviceModule->GetRecordAudioParameters(&recordParameters)); } - (void)tearDown { @@ -181,7 +183,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertFalse(audioDeviceModule->Playing()); } -- (void)startRecording{ +- (void)startRecording { XCTAssertFalse(audioDeviceModule->Recording()); XCTAssertEqual(0, audioDeviceModule->InitRecording()); XCTAssertTrue(audioDeviceModule->RecordingIsInitialized()); @@ -189,15 +191,18 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertTrue(audioDeviceModule->Recording()); } -- (void)stopRecording{ +- (void)stopRecording { XCTAssertEqual(0, audioDeviceModule->StopRecording()); XCTAssertFalse(audioDeviceModule->Recording()); } -- (NSURL*)fileURLForSampleRate:(int)sampleRate { - XCTAssertTrue(sampleRate == 48000 || sampleRate == 44100 || sampleRate == 16000); - NSString *filename = [NSString stringWithFormat:@"audio_short%d", sampleRate / 1000]; - NSURL *url = [[NSBundle mainBundle] URLForResource:filename withExtension:@"pcm"]; +- (NSURL *)fileURLForSampleRate:(int)sampleRate { + XCTAssertTrue(sampleRate == 48000 || sampleRate == 44100 || + sampleRate == 16000); + NSString *filename = + [NSString stringWithFormat:@"audio_short%d", sampleRate / 1000]; + NSURL *url = [[NSBundle mainBundle] URLForResource:filename + withExtension:@"pcm"]; XCTAssertNotNil(url); return url; @@ -279,7 +284,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nChannels, self.playoutParameters.channels()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate()); - XCTAssertNotEqual((void*)NULL, audioSamples); + XCTAssertNotEqual((void *)NULL, audioSamples); return 0; }); @@ -299,7 +304,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; // Passing this test ensures that initialization of the second audio unit // has been done successfully and that there is no conflict with the already // playing first ADM. - XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"]; + XCTestExpectation *playoutExpectation = + [self expectationWithDescription:@"NeedMorePlayoutData"]; __block int num_callbacks = 0; MockAudioTransport mock2; @@ -316,7 +322,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nChannels, self.playoutParameters.channels()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate()); - XCTAssertNotEqual((void*)NULL, audioSamples); + XCTAssertNotEqual((void *)NULL, audioSamples); if (++num_callbacks == kNumCallbacks) { [playoutExpectation fulfill]; } @@ -340,7 +346,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; // audio samples to play out using the NeedMorePlayData callback. - (void)testStartPlayoutVerifyCallbacks { XCTSkipIf(!_testEnabled); - XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"]; + XCTestExpectation *playoutExpectation = + [self expectationWithDescription:@"NeedMorePlayoutData"]; __block int num_callbacks = 0; mock.expectNeedMorePlayData(^int32_t(const size_t nSamples, const size_t nBytesPerSample, @@ -355,7 +362,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nChannels, self.playoutParameters.channels()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate()); - XCTAssertNotEqual((void*)NULL, audioSamples); + XCTAssertNotEqual((void *)NULL, audioSamples); if (++num_callbacks == kNumCallbacks) { [playoutExpectation fulfill]; } @@ -374,10 +381,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; - (void)testStartRecordingVerifyCallbacks { XCTSkipIf(!_testEnabled); XCTestExpectation *recordExpectation = - [self expectationWithDescription:@"RecordedDataIsAvailable"]; + [self expectationWithDescription:@"RecordedDataIsAvailable"]; __block int num_callbacks = 0; - mock.expectRecordedDataIsAvailable(^(const void* audioSamples, + mock.expectRecordedDataIsAvailable(^(const void *audioSamples, const size_t nSamples, const size_t nBytesPerSample, const size_t nChannels, @@ -386,8 +393,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; const int32_t clockDrift, const uint32_t currentMicLevel, const bool keyPressed, - uint32_t& newMicLevel) { - XCTAssertNotEqual((void*)NULL, audioSamples); + uint32_t &newMicLevel) { + XCTAssertNotEqual((void *)NULL, audioSamples); XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer()); XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nChannels, self.recordParameters.channels()); @@ -412,11 +419,12 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; // active in both directions. - (void)testStartPlayoutAndRecordingVerifyCallbacks { XCTSkipIf(!_testEnabled); - XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"]; + XCTestExpectation *playoutExpectation = + [self expectationWithDescription:@"NeedMorePlayoutData"]; __block NSUInteger callbackCount = 0; XCTestExpectation *recordExpectation = - [self expectationWithDescription:@"RecordedDataIsAvailable"]; + [self expectationWithDescription:@"RecordedDataIsAvailable"]; recordExpectation.expectedFulfillmentCount = kNumCallbacks; mock.expectNeedMorePlayData(^int32_t(const size_t nSamples, @@ -432,7 +440,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nChannels, self.playoutParameters.channels()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate()); - XCTAssertNotEqual((void*)NULL, audioSamples); + XCTAssertNotEqual((void *)NULL, audioSamples); if (callbackCount++ >= kNumCallbacks) { [playoutExpectation fulfill]; } @@ -440,7 +448,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; return 0; }); - mock.expectRecordedDataIsAvailable(^(const void* audioSamples, + mock.expectRecordedDataIsAvailable(^(const void *audioSamples, const size_t nSamples, const size_t nBytesPerSample, const size_t nChannels, @@ -449,8 +457,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; const int32_t clockDrift, const uint32_t currentMicLevel, const bool keyPressed, - uint32_t& newMicLevel) { - XCTAssertNotEqual((void*)NULL, audioSamples); + uint32_t &newMicLevel) { + XCTAssertNotEqual((void *)NULL, audioSamples); XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer()); XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nChannels, self.recordParameters.channels()); @@ -479,7 +487,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; XCTAssertEqual(1u, playoutParameters.channels()); // Using XCTestExpectation to count callbacks is very slow. - XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"]; + XCTestExpectation *playoutExpectation = + [self expectationWithDescription:@"NeedMorePlayoutData"]; const int expectedCallbackCount = kFilePlayTimeInSec * kNumCallbacksPerSecond; __block int callbackCount = 0; @@ -494,7 +503,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; size_t &nSamplesOut, int64_t *elapsed_time_ms, int64_t *ntp_time_ms) { - [inputStream read:(uint8_t *)audioSamples maxLength:nSamples*nBytesPerSample*nChannels]; + [inputStream read:(uint8_t *)audioSamples + maxLength:nSamples * nBytesPerSample * nChannels]; nSamplesOut = nSamples; if (callbackCount++ == expectedCallbackCount) { [playoutExpectation fulfill]; @@ -533,11 +543,14 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; - (void)testRunPlayoutAndRecordingInFullDuplex { XCTSkipIf(!_testEnabled); XCTAssertEqual(recordParameters.channels(), playoutParameters.channels()); - XCTAssertEqual(recordParameters.sample_rate(), playoutParameters.sample_rate()); + XCTAssertEqual(recordParameters.sample_rate(), + playoutParameters.sample_rate()); - XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"]; + XCTestExpectation *playoutExpectation = + [self expectationWithDescription:@"NeedMorePlayoutData"]; __block NSUInteger playoutCallbacks = 0; - NSUInteger expectedPlayoutCallbacks = kFullDuplexTimeInSec * kNumCallbacksPerSecond; + NSUInteger expectedPlayoutCallbacks = + kFullDuplexTimeInSec * kNumCallbacksPerSecond; // FIFO queue and measurements NSMutableArray *fifoBuffer = [NSMutableArray arrayWithCapacity:20]; @@ -545,7 +558,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; __block NSUInteger fifoTotalWrittenElements = 0; __block NSUInteger fifoWriteCount = 0; - mock.expectRecordedDataIsAvailable(^(const void* audioSamples, + mock.expectRecordedDataIsAvailable(^(const void *audioSamples, const size_t nSamples, const size_t nBytesPerSample, const size_t nChannels, @@ -554,12 +567,14 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; const int32_t clockDrift, const uint32_t currentMicLevel, const bool keyPressed, - uint32_t& newMicLevel) { + uint32_t &newMicLevel) { if (fifoWriteCount++ < kNumIgnoreFirstCallbacks) { return 0; } - NSData *data = [NSData dataWithBytes:audioSamples length:nSamples*nBytesPerSample*nChannels]; + NSData *data = + [NSData dataWithBytes:audioSamples + length:nSamples * nBytesPerSample * nChannels]; @synchronized(fifoBuffer) { [fifoBuffer addObject:data]; fifoMaxSize = MAX(fifoMaxSize, fifoBuffer.count); @@ -587,9 +602,9 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; } if (data) { - memcpy(audioSamples, (char*) data.bytes, data.length); + memcpy(audioSamples, (char *)data.bytes, data.length); } else { - memset(audioSamples, 0, nSamples*nBytesPerSample*nChannels); + memset(audioSamples, 0, nSamples * nBytesPerSample * nChannels); } if (playoutCallbacks++ == expectedPlayoutCallbacks) { @@ -604,10 +619,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50; NSTimeInterval waitTimeout = kFullDuplexTimeInSec * 2.0; [self waitForExpectationsWithTimeout:waitTimeout handler:nil]; - size_t fifoAverageSize = - (fifoTotalWrittenElements == 0) - ? 0.0 - : 0.5 + (double)fifoTotalWrittenElements / (fifoWriteCount - kNumIgnoreFirstCallbacks); + size_t fifoAverageSize = (fifoTotalWrittenElements == 0) ? 0.0 : + 0.5 + + (double)fifoTotalWrittenElements / + (fifoWriteCount - kNumIgnoreFirstCallbacks); [self stopPlayout]; [self stopRecording]; diff --git a/sdk/objc/unittests/RTCAudioDevice_xctest.mm b/sdk/objc/unittests/RTCAudioDevice_xctest.mm index 25ce8f34a8..299e5cb5d9 100644 --- a/sdk/objc/unittests/RTCAudioDevice_xctest.mm +++ b/sdk/objc/unittests/RTCAudioDevice_xctest.mm @@ -47,12 +47,15 @@ _audioDeviceModule = webrtc::CreateAudioDeviceModule(); _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS( - /*bypass_voice_processing=*/false, /*muted_speech_event_handler=*/nullptr)); + /*bypass_voice_processing=*/false, + /*muted_speech_event_handler=*/nullptr)); self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSError *error = nil; [self.audioSession lockForConfiguration]; - [self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error]; + [self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord + withOptions:0 + error:&error]; XCTAssertNil(error); [self.audioSession setMode:AVAudioSessionModeVoiceChat error:&error]; @@ -75,27 +78,33 @@ // Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly // after an iOS AVAudioSessionInterruptionTypeEnded notification event. -// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by: +// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted +// notifications by: // - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_ // callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list. -// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it +// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted +// notification, it // passes the notification to callbacks in its delegate list which sets // AudioDeviceIOS's is_interrupted_ flag to true. // - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its // audio_session_observer_ callback is removed from RTCAudioSessions's // delegate list. -// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification, -// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s -// delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in -// the wrong (true) state and the audio session will ignore audio changes. -// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize -// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted -// flag in AudioDeviceIOS.InitPlayOrRecord. +// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption +// notification, AudioDeviceIOS is not notified as its callback is not in +// RTC_OBJC_TYPE(RTCAudioSession)'s delegate list. This causes +// AudioDeviceIOS's is_interrupted_ flag to be in the wrong (true) state and +// the audio session will ignore audio changes. +// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is +// to initialize AudioDeviceIOS's is_interrupted_ flag to +// RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted flag in +// AudioDeviceIOS.InitPlayOrRecord. - (void)testInterruptedAudioSession { XCTSkipIf(!_testEnabled); XCTAssertTrue(self.audioSession.isActive); - XCTAssertTrue([self.audioSession.category isEqual:AVAudioSessionCategoryPlayAndRecord] || - [self.audioSession.category isEqual:AVAudioSessionCategoryPlayback]); + XCTAssertTrue( + [self.audioSession.category + isEqual:AVAudioSessionCategoryPlayAndRecord] || + [self.audioSession.category isEqual:AVAudioSessionCategoryPlayback]); XCTAssertEqual(AVAudioSessionModeVoiceChat, self.audioSession.mode); std::unique_ptr task_queue_factory = @@ -103,7 +112,8 @@ std::unique_ptr audio_buffer; audio_buffer.reset(new webrtc::AudioDeviceBuffer(task_queue_factory.get())); _audio_device->AttachAudioBuffer(audio_buffer.get()); - XCTAssertEqual(webrtc::AudioDeviceGeneric::InitStatus::OK, _audio_device->Init()); + XCTAssertEqual(webrtc::AudioDeviceGeneric::InitStatus::OK, + _audio_device->Init()); XCTAssertEqual(0, _audio_device->InitPlayout()); XCTAssertEqual(0, _audio_device->StartPlayout()); @@ -128,9 +138,11 @@ } - (void)testMuteSpeechHandlerCalledWithStartedWhenSpeechActivityHasStarted { - XCTestExpectation *handlerExpectation = [self expectationWithDescription:@"mutedSpeechHandler"]; - webrtc::AudioDeviceModule::MutedSpeechEventHandler muted_speech_event_handler = - ^void(webrtc::AudioDeviceModule::MutedSpeechEvent event) { + XCTestExpectation *handlerExpectation = + [self expectationWithDescription:@"mutedSpeechHandler"]; + webrtc::AudioDeviceModule::MutedSpeechEventHandler + muted_speech_event_handler = ^void( + webrtc::AudioDeviceModule::MutedSpeechEvent event) { XCTAssertEqual(event, webrtc::AudioDeviceModule::kMutedSpeechStarted); [handlerExpectation fulfill]; }; @@ -139,22 +151,26 @@ /*bypass_voice_processing=*/false, /*muted_speech_event_handler=*/muted_speech_event_handler)); - _audio_device->OnReceivedMutedSpeechActivity(kAUVoiceIOSpeechActivityHasStarted); + _audio_device->OnReceivedMutedSpeechActivity( + kAUVoiceIOSpeechActivityHasStarted); [self waitForExpectations:@[ handlerExpectation ] timeout:10.0]; } - (void)testMuteSpeechHandlerCalledWithEndedWhenSpeechActivityHasEnded { - XCTestExpectation *handlerExpectation = [self expectationWithDescription:@"mutedSpeechHandler"]; - webrtc::AudioDeviceModule::MutedSpeechEventHandler muted_speech_event_handler = - ^void(webrtc::AudioDeviceModule::MutedSpeechEvent event) { - XCTAssertEqual(event, webrtc::AudioDeviceModule::kMutedSpeechEnded); - [handlerExpectation fulfill]; - }; + XCTestExpectation *handlerExpectation = + [self expectationWithDescription:@"mutedSpeechHandler"]; + webrtc::AudioDeviceModule::MutedSpeechEventHandler + muted_speech_event_handler = + ^void(webrtc::AudioDeviceModule::MutedSpeechEvent event) { + XCTAssertEqual(event, webrtc::AudioDeviceModule::kMutedSpeechEnded); + [handlerExpectation fulfill]; + }; _audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS( /*bypass_voice_processing=*/false, /*muted_speech_event_handler=*/muted_speech_event_handler)); - _audio_device->OnReceivedMutedSpeechActivity(kAUVoiceIOSpeechActivityHasEnded); + _audio_device->OnReceivedMutedSpeechActivity( + kAUVoiceIOSpeechActivityHasEnded); [self waitForExpectations:@[ handlerExpectation ] timeout:10.0]; } diff --git a/sdk/objc/unittests/RTCAudioSessionTest.mm b/sdk/objc/unittests/RTCAudioSessionTest.mm index e0ba52bcca..aa448281b1 100644 --- a/sdk/objc/unittests/RTCAudioSessionTest.mm +++ b/sdk/objc/unittests/RTCAudioSessionTest.mm @@ -25,8 +25,8 @@ @interface RTC_OBJC_TYPE (RTCAudioSession) (UnitTesting) - @property(nonatomic, - readonly) std::vector<__weak id > delegates; + @property(nonatomic, readonly) std::vector< + __weak id > delegates; - (instancetype)initWithAudioSession:(id)audioSession; @@ -34,7 +34,7 @@ @interface MockAVAudioSession : NSObject -@property (nonatomic, readwrite, assign) float outputVolume; +@property(nonatomic, readwrite, assign) float outputVolume; @end @@ -42,9 +42,10 @@ @synthesize outputVolume = _outputVolume; @end -@interface RTCAudioSessionTestDelegate : NSObject +@interface RTCAudioSessionTestDelegate + : NSObject -@property (nonatomic, readonly) float outputVolume; +@property(nonatomic, readonly) float outputVolume; @end @@ -60,7 +61,8 @@ return self; } -- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session { +- (void)audioSessionDidBeginInterruption: + (RTC_OBJC_TYPE(RTCAudioSession) *)session { } - (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session @@ -69,10 +71,12 @@ - (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session reason:(AVAudioSessionRouteChangeReason)reason - previousRoute:(AVAudioSessionRouteDescription *)previousRoute { + previousRoute: + (AVAudioSessionRouteDescription *)previousRoute { } -- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session { +- (void)audioSessionMediaServerTerminated: + (RTC_OBJC_TYPE(RTCAudioSession) *)session { } - (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session { @@ -81,7 +85,8 @@ - (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session { } -- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session { +- (void)audioSessionShouldUnconfigure: + (RTC_OBJC_TYPE(RTCAudioSession) *)session { } - (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession @@ -101,14 +106,16 @@ - (instancetype)init { self = [super init]; if (self) { - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session addDelegate:self]; } return self; } - (void)dealloc { - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; [session removeDelegate:self]; } @@ -121,7 +128,8 @@ @implementation RTCAudioSessionTest - (void)testAddAndRemoveDelegates { - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSMutableArray *delegates = [NSMutableArray array]; const size_t count = 5; for (size_t i = 0; i < count; ++i) { @@ -131,16 +139,16 @@ [delegates addObject:delegate]; EXPECT_EQ(i + 1, session.delegates.size()); } - [delegates enumerateObjectsUsingBlock:^(RTCAudioSessionTestDelegate *obj, - NSUInteger idx, - BOOL *stop) { + [delegates enumerateObjectsUsingBlock:^( + RTCAudioSessionTestDelegate *obj, NSUInteger idx, BOOL *stop) { [session removeDelegate:obj]; }]; EXPECT_EQ(0u, session.delegates.size()); } - (void)testPushDelegate { - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; NSMutableArray *delegates = [NSMutableArray array]; const size_t count = 2; for (size_t i = 0; i < count; ++i) { @@ -173,7 +181,8 @@ // Tests that delegates added to the audio session properly zero out. This is // checking an implementation detail (that vectors of __weak work as expected). - (void)testZeroingWeakDelegate { - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; @autoreleasepool { // Add a delegate to the session. There should be one delegate at this // point. @@ -201,12 +210,14 @@ [[RTCTestRemoveOnDeallocDelegate alloc] init]; EXPECT_TRUE(delegate); } - RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *session = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; EXPECT_EQ(0u, session.delegates.size()); } - (void)testAudioSessionActivation { - RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; + RTC_OBJC_TYPE(RTCAudioSession) *audioSession = + [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]; EXPECT_EQ(0, audioSession.activationCount); [audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]]; EXPECT_EQ(1, audioSession.activationCount); @@ -218,23 +229,27 @@ - (void)DISABLED_testConfigureWebRTCSession { NSError *error = nil; - void (^setActiveBlock)(NSInvocation *invocation) = ^(NSInvocation *invocation) { - __autoreleasing NSError **retError; - [invocation getArgument:&retError atIndex:4]; - *retError = [NSError errorWithDomain:@"AVAudioSession" - code:AVAudioSessionErrorCodeCannotInterruptOthers - userInfo:nil]; - BOOL failure = NO; - [invocation setReturnValue:&failure]; - }; + void (^setActiveBlock)(NSInvocation *invocation) = + ^(NSInvocation *invocation) { + __autoreleasing NSError **retError; + [invocation getArgument:&retError atIndex:4]; + *retError = [NSError + errorWithDomain:@"AVAudioSession" + code:AVAudioSessionErrorCodeCannotInterruptOthers + userInfo:nil]; + BOOL failure = NO; + [invocation setReturnValue:&failure]; + }; id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]); - OCMStub([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES - withOptions:0 - error:([OCMArg anyObjectRef])]) + OCMStub([[mockAVAudioSession ignoringNonObjectArgs] + setActive:YES + withOptions:0 + error:([OCMArg anyObjectRef])]) .andDo(setActiveBlock); - id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]); + id mockAudioSession = + OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]); OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession); RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession; @@ -242,9 +257,10 @@ [audioSession lockForConfiguration]; // configureWebRTCSession is forced to fail in the above mock interface, // so activationCount should remain 0 - OCMExpect([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES - withOptions:0 - error:([OCMArg anyObjectRef])]) + OCMExpect([[mockAVAudioSession ignoringNonObjectArgs] + setActive:YES + withOptions:0 + error:([OCMArg anyObjectRef])]) .andDo(setActiveBlock); OCMExpect([mockAudioSession session]).andReturn(mockAVAudioSession); EXPECT_FALSE([audioSession configureWebRTCSession:&error]); @@ -255,11 +271,17 @@ EXPECT_EQ(NO, [mockAVAudioSession setActive:YES withOptions:0 error:&error]); [audioSession unlockForConfiguration]; - // The -Wunused-value is a workaround for https://bugs.llvm.org/show_bug.cgi?id=45245 - _Pragma("clang diagnostic push") _Pragma("clang diagnostic ignored \"-Wunused-value\""); + // The -Wunused-value is a workaround for + // https://bugs.llvm.org/show_bug.cgi?id=45245 + _Pragma("clang diagnostic push") + _Pragma("clang diagnostic ignored \"-Wunused-value\""); OCMVerify([mockAudioSession session]); - OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES withOptions:0 error:&error]); - OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO withOptions:0 error:&error]); + OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES + withOptions:0 + error:&error]); + OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO + withOptions:0 + error:&error]); _Pragma("clang diagnostic pop"); [mockAVAudioSession stopMocking]; @@ -271,7 +293,8 @@ NSError *error = nil; id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]); - id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]); + id mockAudioSession = + OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]); OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession); RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession; @@ -291,7 +314,9 @@ }); waitLock.Wait(timeout); - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error]; + [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord + withOptions:0 + error:&error]; EXPECT_TRUE(error != nil); EXPECT_EQ(error.domain, kRTCAudioSessionErrorDomain); EXPECT_EQ(error.code, kRTCAudioSessionErrorLockRequired); @@ -304,8 +329,8 @@ - (void)testAudioVolumeDidNotify { MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init]; - RTC_OBJC_TYPE(RTCAudioSession) *session = - [[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession]; + RTC_OBJC_TYPE(RTCAudioSession) *session = [[RTC_OBJC_TYPE(RTCAudioSession) + alloc] initWithAudioSession:mockAVAudioSession]; RTCAudioSessionTestDelegate *delegate = [[RTCAudioSessionTestDelegate alloc] init]; [session addDelegate:delegate]; diff --git a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm index cf759c5243..0c74c5c533 100644 --- a/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm +++ b/sdk/objc/unittests/RTCCVPixelBuffer_xctest.mm @@ -34,27 +34,28 @@ struct ToI420WithCropAndScaleSetting { int scaleHeight; }; -constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] = { - ToI420WithCropAndScaleSetting{ - .inputWidth = 640, - .inputHeight = 360, - .offsetX = 0, - .offsetY = 0, - .cropWidth = 640, - .cropHeight = 360, - .scaleWidth = 320, - .scaleHeight = 180, - }, - ToI420WithCropAndScaleSetting{ - .inputWidth = 640, - .inputHeight = 360, - .offsetX = 160, - .offsetY = 90, - .cropWidth = 160, - .cropHeight = 90, - .scaleWidth = 320, - .scaleHeight = 180, - }, +constexpr const ToI420WithCropAndScaleSetting + kToI420WithCropAndScaleSettings[] = { + ToI420WithCropAndScaleSetting{ + .inputWidth = 640, + .inputHeight = 360, + .offsetX = 0, + .offsetY = 0, + .cropWidth = 640, + .cropHeight = 360, + .scaleWidth = 320, + .scaleHeight = 180, + }, + ToI420WithCropAndScaleSetting{ + .inputWidth = 640, + .inputHeight = 360, + .offsetX = 160, + .offsetY = 90, + .cropWidth = 160, + .cropHeight = 90, + .scaleWidth = 320, + .scaleHeight = 180, + }, }; } // namespace @@ -67,10 +68,14 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testRequiresCroppingNoCrop { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertFalse([buffer requiresCropping]); @@ -79,16 +84,21 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testRequiresCroppingWithCrop { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:720 - adaptedHeight:1280 - cropWidth:360 - cropHeight:640 - cropX:100 - cropY:100]; + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] + initWithPixelBuffer:pixelBufferRef + adaptedWidth:720 + adaptedHeight:1280 + cropWidth:360 + cropHeight:640 + cropX:100 + cropY:100]; XCTAssertTrue([croppedBuffer requiresCropping]); @@ -97,11 +107,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testRequiresScalingNoScale { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]); CVBufferRelease(pixelBufferRef); @@ -109,11 +123,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testRequiresScalingWithScale { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]); CVBufferRelease(pixelBufferRef); @@ -121,17 +139,21 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testRequiresScalingWithScaleAndMatchingCrop { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef - adaptedWidth:720 - adaptedHeight:1280 - cropWidth:360 - cropHeight:640 - cropX:100 - cropY:100]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:720 + adaptedHeight:1280 + cropWidth:360 + cropHeight:640 + cropX:100 + cropY:100]; XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]); CVBufferRelease(pixelBufferRef); @@ -139,23 +161,30 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testBufferSize_NV12 { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 720, + 1280, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; + XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], + 576000); CVBufferRelease(pixelBufferRef); } - (void)testBufferSize_RGB { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef); + CVPixelBufferCreate( + NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; - XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0); + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; + XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], + 0); CVBufferRelease(pixelBufferRef); } @@ -165,32 +194,47 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] } - (void)testCropAndScaleNoOp_NV12 { - [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputSize:CGSizeMake(720, 1280)]; + [self + cropAndScaleTestWithNV12InputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputSize:CGSizeMake(720, 1280)]; } - (void)testCropAndScale_NV12FullToVideo { - [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange - outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]; + [self + cropAndScaleTestWithNV12InputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + outputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]; } - (void)testCropAndScaleZeroSizeFrame_NV12 { - [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputSize:CGSizeMake(0, 0)]; + [self + cropAndScaleTestWithNV12InputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputSize:CGSizeMake(0, 0)]; } - (void)testCropAndScaleToSmallFormat_NV12 { - [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputSize:CGSizeMake(148, 320)]; + [self + cropAndScaleTestWithNV12InputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputSize:CGSizeMake(148, 320)]; } - (void)testCropAndScaleToOddFormat_NV12 { - [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputSize:CGSizeMake(361, 640)]; + [self + cropAndScaleTestWithNV12InputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputSize:CGSizeMake(361, 640)]; } - (void)testCropAndScale_32BGRA { @@ -202,11 +246,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] } - (void)testCropAndScaleWithSmallCropInfo_32ARGB { - [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:2 cropY:3]; + [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB + cropX:2 + cropY:3]; } - (void)testCropAndScaleWithLargeCropInfo_32ARGB { - [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:200 cropY:300]; + [self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB + cropX:200 + cropY:300]; } - (void)testToI420_NV12 { @@ -223,33 +271,41 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] - (void)testToI420WithCropAndScale_NV12 { for (const auto &setting : kToI420WithCropAndScaleSettings) { - [self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + [self toI420WithCropAndScaleWithPixelFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange setting:setting]; } } - (void)testToI420WithCropAndScale_32BGRA { for (const auto &setting : kToI420WithCropAndScaleSettings) { - [self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32BGRA setting:setting]; + [self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32BGRA + setting:setting]; } } - (void)testToI420WithCropAndScale_32ARGB { for (const auto &setting : kToI420WithCropAndScaleSettings) { - [self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32ARGB setting:setting]; + [self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32ARGB + setting:setting]; } } - (void)testScaleBufferTest { CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, 1920, 1080, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + 1920, + 1080, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + NULL, + &pixelBufferRef); - rtc::scoped_refptr i420Buffer = CreateI420Gradient(1920, 1080); + rtc::scoped_refptr i420Buffer = + CreateI420Gradient(1920, 1080); CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual(buffer.width, 1920); XCTAssertEqual(buffer.height, 1080); @@ -294,11 +350,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] #pragma mark - Shared test code - (void)cropAndScaleTestWithNV12 { - [self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange - outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]; + [self + cropAndScaleTestWithNV12InputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange + outputFormat: + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]; } -- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat outputFormat:(OSType)outputFormat { +- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat + outputFormat:(OSType)outputFormat { [self cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat outputFormat:(OSType)outputFormat outputSize:CGSizeMake(360, 640)]; @@ -310,40 +370,49 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate(NULL, 720, 1280, inputFormat, NULL, &pixelBufferRef); - rtc::scoped_refptr i420Buffer = CreateI420Gradient(720, 1280); + rtc::scoped_refptr i420Buffer = + CreateI420Gradient(720, 1280); CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; XCTAssertEqual(buffer.width, 720); XCTAssertEqual(buffer.height, 1280); CVPixelBufferRef outputPixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, outputSize.width, outputSize.height, outputFormat, NULL, &outputPixelBufferRef); + CVPixelBufferCreate(NULL, + outputSize.width, + outputSize.height, + outputFormat, + NULL, + &outputPixelBufferRef); std::vector frameScaleBuffer; - if ([buffer requiresScalingToWidth:outputSize.width height:outputSize.height]) { + if ([buffer requiresScalingToWidth:outputSize.width + height:outputSize.height]) { int size = - [buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width height:outputSize.height]; + [buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width + height:outputSize.height]; frameScaleBuffer.resize(size); } else { frameScaleBuffer.clear(); } frameScaleBuffer.shrink_to_fit(); - [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()]; + [buffer cropAndScaleTo:outputPixelBufferRef + withTempBuffer:frameScaleBuffer.data()]; RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef]; + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] + initWithPixelBuffer:outputPixelBufferRef]; XCTAssertEqual(scaledBuffer.width, outputSize.width); XCTAssertEqual(scaledBuffer.height, outputSize.height); if (outputSize.width > 0 && outputSize.height > 0) { RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420]; RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420]; - double psnr = - I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]); + double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer], + *[scaledBufferI420 nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); } @@ -354,20 +423,22 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] [self cropAndScaleTestWithRGBPixelFormat:pixelFormat cropX:0 cropY:0]; } -- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat cropX:(int)cropX cropY:(int)cropY { +- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat + cropX:(int)cropX + cropY:(int)cropY { CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate(NULL, 720, 1280, pixelFormat, NULL, &pixelBufferRef); DrawGradientInRGBPixelBuffer(pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] - initWithPixelBuffer:pixelBufferRef - adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef) - adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX - cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY - cropX:cropX - cropY:cropY]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef + adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef) + adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef) + cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX + cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY + cropX:cropX + cropY:cropY]; XCTAssertEqual(buffer.width, 720); XCTAssertEqual(buffer.height, 1280); @@ -377,29 +448,31 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] [buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL]; RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef]; + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] + initWithPixelBuffer:outputPixelBufferRef]; XCTAssertEqual(scaledBuffer.width, 360); XCTAssertEqual(scaledBuffer.height, 640); RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420]; RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420]; - double psnr = - I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]); + double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer], + *[scaledBufferI420 nativeI420Buffer]); XCTAssertEqual(psnr, webrtc::kPerfectPSNR); CVBufferRelease(pixelBufferRef); } - (void)toI420WithPixelFormat:(OSType)pixelFormat { - rtc::scoped_refptr i420Buffer = CreateI420Gradient(360, 640); + rtc::scoped_refptr i420Buffer = + CreateI420Gradient(360, 640); CVPixelBufferRef pixelBufferRef = NULL; CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &pixelBufferRef); CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420]; double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]); @@ -413,28 +486,36 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] CVBufferRelease(pixelBufferRef); } -- (void)toI420WithCropAndScaleWithPixelFormat:(OSType)pixelFormat - setting:(const ToI420WithCropAndScaleSetting &)setting { +- (void) + toI420WithCropAndScaleWithPixelFormat:(OSType)pixelFormat + setting: + (const ToI420WithCropAndScaleSetting &) + setting { rtc::scoped_refptr i420Buffer = CreateI420Gradient(setting.inputWidth, setting.inputHeight); CVPixelBufferRef pixelBufferRef = NULL; - CVPixelBufferCreate( - NULL, setting.inputWidth, setting.inputHeight, pixelFormat, NULL, &pixelBufferRef); + CVPixelBufferCreate(NULL, + setting.inputWidth, + setting.inputHeight, + pixelFormat, + NULL, + &pixelBufferRef); CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef); RTC_OBJC_TYPE(RTCI420Buffer) *objcI420Buffer = [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer]; RTC_OBJC_TYPE(RTCI420Buffer) *scaledObjcI420Buffer = - (RTC_OBJC_TYPE(RTCI420Buffer) *)[objcI420Buffer cropAndScaleWith:setting.offsetX - offsetY:setting.offsetY - cropWidth:setting.cropWidth - cropHeight:setting.cropHeight - scaleWidth:setting.scaleWidth - scaleHeight:setting.scaleHeight]; - RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef]; + (RTC_OBJC_TYPE(RTCI420Buffer) *)[objcI420Buffer + cropAndScaleWith:setting.offsetX + offsetY:setting.offsetY + cropWidth:setting.cropWidth + cropHeight:setting.cropHeight + scaleWidth:setting.scaleWidth + scaleHeight:setting.scaleHeight]; + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) + alloc] initWithPixelBuffer:pixelBufferRef]; id scaledBuffer = [buffer cropAndScaleWith:setting.offsetX offsetY:setting.offsetY @@ -442,12 +523,13 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] cropHeight:setting.cropHeight scaleWidth:setting.scaleWidth scaleHeight:setting.scaleHeight]; - XCTAssertTrue([scaledBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); + XCTAssertTrue( + [scaledBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]); RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [scaledBuffer toI420]; - double psnr = - I420PSNR(*[scaledObjcI420Buffer nativeI420Buffer], *[fromCVPixelBuffer nativeI420Buffer]); + double psnr = I420PSNR(*[scaledObjcI420Buffer nativeI420Buffer], + *[fromCVPixelBuffer nativeI420Buffer]); double target = webrtc::kPerfectPSNR; if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) { // libyuv's I420ToRGB functions seem to lose some quality. diff --git a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm index 6a117a3546..6ae8db383d 100644 --- a/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm +++ b/sdk/objc/unittests/RTCCameraVideoCapturerTests.mm @@ -32,23 +32,34 @@ #if TARGET_OS_IPHONE // Helper method. CMSampleBufferRef createTestSampleBufferRef() { - // This image is already in the testing bundle. UIImage *image = [UIImage imageNamed:@"Default.png"]; CGSize size = image.size; CGImageRef imageRef = [image CGImage]; CVPixelBufferRef pixelBuffer = nullptr; - CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil, + CVPixelBufferCreate(kCFAllocatorDefault, + size.width, + size.height, + kCVPixelFormatType_32ARGB, + nil, &pixelBuffer); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); - // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both. - CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width, - rgbColorSpace, kCGImageAlphaPremultipliedFirst); + // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of + // 8 for both. + CGContextRef context = CGBitmapContextCreate(nil, + size.width, + size.height, + 8, + 8 * size.width, + rgbColorSpace, + kCGImageAlphaPremultipliedFirst); CGContextDrawImage( - context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef); + context, + CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), + imageRef); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); @@ -59,19 +70,24 @@ CMSampleBufferRef createTestSampleBufferRef() { CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description); CMSampleBufferRef sampleBuffer = nullptr; - CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description, - &timing, &sampleBuffer); + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, + pixelBuffer, + YES, + NULL, + NULL, + description, + &timing, + &sampleBuffer); CFRelease(pixelBuffer); return sampleBuffer; - } #endif @interface RTC_OBJC_TYPE (RTCCameraVideoCapturer) (Tests) - (instancetype)initWithDelegate - : (__weak id)delegate captureSession - : (AVCaptureSession *)captureSession; + : (__weak id) + delegate captureSession : (AVCaptureSession *)captureSession; @end @interface RTCCameraVideoCapturerTests : XCTestCase @@ -88,10 +104,11 @@ CMSampleBufferRef createTestSampleBufferRef() { @synthesize capturer = _capturer; - (void)setUp { - self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); + self.delegateMock = + OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]); - self.capturer = - [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock]; + self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] + initWithDelegate:self.delegateMock]; self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock]; } @@ -123,7 +140,8 @@ CMSampleBufferRef createTestSampleBufferRef() { } - (void)testSetupSessionOutput { - AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0]; + AVCaptureVideoDataOutput *videoOutput = + self.capturer.captureSession.outputs[0]; XCTAssertEqual(videoOutput.alwaysDiscardsLateVideoFrames, NO); XCTAssertEqual(videoOutput.sampleBufferDelegate, self.capturer); } @@ -136,26 +154,35 @@ CMSampleBufferRef createTestSampleBufferRef() { // We don't care about width and heigth so arbitrary 123 and 456 values. int width = 123; int height = 456; - CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height, - nil, &format); + CMVideoFormatDescriptionCreate(nil, + kCVPixelFormatType_420YpCbCr8PlanarFullRange, + width, + height, + nil, + &format); OCMStub([validFormat1 formatDescription]).andReturn(format); id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]); - CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width, - height, nil, &format); + CMVideoFormatDescriptionCreate( + nil, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + width, + height, + nil, + &format); OCMStub([validFormat2 formatDescription]).andReturn(format); id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]); - CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil, - &format); + CMVideoFormatDescriptionCreate( + nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil, &format); OCMStub([invalidFormat formatDescription]).andReturn(format); NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ]; OCMStub([self.deviceMock formats]).andReturn(formats); // when - NSArray *supportedFormats = - [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock]; + NSArray *supportedFormats = [RTC_OBJC_TYPE(RTCCameraVideoCapturer) + supportedFormatsForDevice:self.deviceMock]; // then XCTAssertEqual(supportedFormats.count, 3u); @@ -175,7 +202,8 @@ CMSampleBufferRef createTestSampleBufferRef() { - (void)testDelegateCallbackNotCalledWhenInvalidBuffer { // given CMSampleBufferRef sampleBuffer = nullptr; - [[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]]; + [[self.delegateMock reject] capturer:[OCMArg any] + didCaptureVideoFrame:[OCMArg any]]; // when [self.capturer captureOutput:self.capturer.captureSession.outputs[0] @@ -189,20 +217,24 @@ CMSampleBufferRef createTestSampleBufferRef() { - (void)testDelegateCallbackWithValidBufferAndOrientationUpdate { #if TARGET_OS_IPHONE XCTExpectFailure(@"Setting orientation on UIDevice is not supported"); - [UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) forKey:@"orientation"]; + [UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) + forKey:@"orientation"]; CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); // then - [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * - expectedFrame) { - XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_270); - return YES; - }]]; + [[self.delegateMock expect] + capturer:self.capturer + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL( + RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_270); + return YES; + }]]; // when NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; - [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; + [center postNotificationName:UIDeviceOrientationDidChangeNotification + object:nil]; // We need to wait for the dispatch to finish. WAIT(1000); @@ -216,18 +248,21 @@ CMSampleBufferRef createTestSampleBufferRef() { #endif } -// The XCTest framework considers functions that don't take arguments tests. This is a helper. +// The XCTest framework considers functions that don't take arguments tests. +// This is a helper. - (void)testRotationCamera:(AVCaptureDevicePosition)camera withOrientation:(UIDeviceOrientation)deviceOrientation { #if TARGET_OS_IPHONE - // Mock the AVCaptureConnection as we will get the camera position from the connection's - // input ports. - AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]); - AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]); - NSArray *inputPortsArrayMock = @[captureInputPort]; + // Mock the AVCaptureConnection as we will get the camera position from the + // connection's input ports. + AVCaptureDeviceInput *inputPortMock = + OCMClassMock([AVCaptureDeviceInput class]); + AVCaptureInputPort *captureInputPort = + OCMClassMock([AVCaptureInputPort class]); + NSArray *inputPortsArrayMock = @[ captureInputPort ]; AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts). - andReturn(inputPortsArrayMock); + OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts) + .andReturn(inputPortsArrayMock); OCMStub(captureInputPort.input).andReturn(inputPortMock); OCMStub(inputPortMock.device).andReturn(captureDeviceMock); OCMStub(captureDeviceMock.position).andReturn(camera); @@ -237,27 +272,30 @@ CMSampleBufferRef createTestSampleBufferRef() { CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); - [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * - expectedFrame) { - if (camera == AVCaptureDevicePositionFront) { - if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180); - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0); - } - } else if (camera == AVCaptureDevicePositionBack) { - if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0); - } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180); - } - } - return YES; - }]]; + [[self.delegateMock expect] + capturer:self.capturer + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL( + RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + if (camera == AVCaptureDevicePositionFront) { + if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { + XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180); + } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { + XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0); + } + } else if (camera == AVCaptureDevicePositionBack) { + if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { + XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0); + } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { + XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180); + } + } + return YES; + }]]; NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; - [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; + [center postNotificationName:UIDeviceOrientationDidChangeNotification + object:nil]; // We need to wait for the dispatch to finish. WAIT(1000); @@ -293,42 +331,57 @@ CMSampleBufferRef createTestSampleBufferRef() { } - (void)setExif:(CMSampleBufferRef)sampleBuffer { - rtc::ScopedCFTypeRef exif(CFDictionaryCreateMutable( - kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks)); - CFDictionarySetValue(exif.get(), CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2")); - CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif.get(), kCMAttachmentMode_ShouldPropagate); + rtc::ScopedCFTypeRef exif( + CFDictionaryCreateMutable(kCFAllocatorDefault, + 0, + &kCFTypeDictionaryKeyCallBacks, + &kCFTypeDictionaryValueCallBacks)); + CFDictionarySetValue(exif.get(), + CFSTR("LensModel"), + CFSTR("iPhone SE back camera 4.15mm f/2.2")); + CMSetAttachment(sampleBuffer, + CFSTR("{Exif}"), + exif.get(), + kCMAttachmentMode_ShouldPropagate); } - (void)testRotationFrame { #if TARGET_OS_IPHONE - // Mock the AVCaptureConnection as we will get the camera position from the connection's - // input ports. - AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]); - AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]); - NSArray *inputPortsArrayMock = @[captureInputPort]; + // Mock the AVCaptureConnection as we will get the camera position from the + // connection's input ports. + AVCaptureDeviceInput *inputPortMock = + OCMClassMock([AVCaptureDeviceInput class]); + AVCaptureInputPort *captureInputPort = + OCMClassMock([AVCaptureInputPort class]); + NSArray *inputPortsArrayMock = @[ captureInputPort ]; AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts). - andReturn(inputPortsArrayMock); + OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts) + .andReturn(inputPortsArrayMock); OCMStub(captureInputPort.input).andReturn(inputPortMock); OCMStub(inputPortMock.device).andReturn(captureDeviceMock); OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront); XCTExpectFailure(@"Setting orientation on UIDevice is not supported"); - [UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) forKey:@"orientation"]; + [UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) + forKey:@"orientation"]; CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); - [[self.delegateMock expect] capturer:self.capturer - didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) * - expectedFrame) { - // Front camera and landscape left should return 180. But the frame's exif - // we add below says its from the back camera, so rotation should be 0. - XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0); - return YES; - }]]; + [[self.delegateMock expect] + capturer:self.capturer + didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL( + RTC_OBJC_TYPE(RTCVideoFrame) * + expectedFrame) { + // Front camera and landscape left should return 180. But the frame's + // exif we add below says its from the back camera, so rotation should + // be 0. + XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0); + return YES; + }]]; NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; - [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil]; + [center postNotificationName:UIDeviceOrientationDidChangeNotification + object:nil]; // We need to wait for the dispatch to finish. WAIT(1000); @@ -349,8 +402,8 @@ CMSampleBufferRef createTestSampleBufferRef() { CMSampleBufferRef sampleBuffer = createTestSampleBufferRef(); [self setExif:sampleBuffer]; - AVCaptureDevicePosition cameraPosition = [AVCaptureSession - devicePositionForSampleBuffer:sampleBuffer]; + AVCaptureDevicePosition cameraPosition = + [AVCaptureSession devicePositionForSampleBuffer:sampleBuffer]; XCTAssertEqual(cameraPosition, AVCaptureDevicePositionBack); #endif } @@ -378,10 +431,11 @@ CMSampleBufferRef createTestSampleBufferRef() { OCMStub([self.captureSessionMock addOutput:[OCMArg any]]); OCMStub([self.captureSessionMock beginConfiguration]); OCMStub([self.captureSessionMock commitConfiguration]); - self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); - self.capturer = - [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock - captureSession:self.captureSessionMock]; + self.delegateMock = + OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate))); + self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] + initWithDelegate:self.delegateMock + captureSession:self.captureSessionMock]; self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock]; } @@ -399,16 +453,20 @@ CMSampleBufferRef createTestSampleBufferRef() { - (void)testStartingAndStoppingCapture { id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]]) + OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock + error:[OCMArg setTo:nil]]) .andReturn(expectedDeviceInputMock); - OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES); + OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]) + .andReturn(YES); OCMStub([self.deviceMock unlockForConfiguration]); - OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES); + OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]) + .andReturn(YES); OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]); OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]); - // Set expectation that the capture session should be started with correct device. + // Set expectation that the capture session should be started with correct + // device. OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]); OCMExpect([_captureSessionMock startRunning]); OCMExpect([_captureSessionMock stopRunning]); @@ -422,9 +480,10 @@ CMSampleBufferRef createTestSampleBufferRef() { } - (void)testStartCaptureFailingToLockForConfiguration { - // The captureSessionMock is a strict mock, so this test will crash if the startCapture - // method does not return when failing to lock for configuration. - OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(NO); + // The captureSessionMock is a strict mock, so this test will crash if the + // startCapture method does not return when failing to lock for configuration. + OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]) + .andReturn(NO); id format = OCMClassMock([AVCaptureDeviceFormat class]); [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30]; @@ -436,16 +495,20 @@ CMSampleBufferRef createTestSampleBufferRef() { - (void)testStartingAndStoppingCaptureWithCallbacks { id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]]) + OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock + error:[OCMArg setTo:nil]]) .andReturn(expectedDeviceInputMock); - OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES); + OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]) + .andReturn(YES); OCMStub([self.deviceMock unlockForConfiguration]); - OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES); + OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]) + .andReturn(YES); OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]); OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]); - // Set expectation that the capture session should be started with correct device. + // Set expectation that the capture session should be started with correct + // device. OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]); OCMExpect([_captureSessionMock startRunning]); OCMExpect([_captureSessionMock stopRunning]); @@ -468,8 +531,9 @@ CMSampleBufferRef createTestSampleBufferRef() { dispatch_semaphore_signal(completedStopSemaphore); }]; - dispatch_semaphore_wait(completedStopSemaphore, - dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC)); + dispatch_semaphore_wait( + completedStopSemaphore, + dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC)); OCMVerifyAllWithDelay(_captureSessionMock, 15); XCTAssertTrue(completedStart); XCTAssertTrue(completedStop); @@ -478,13 +542,16 @@ CMSampleBufferRef createTestSampleBufferRef() { - (void)testStartCaptureFailingToLockForConfigurationWithCallback { id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]]) + OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock + error:[OCMArg setTo:nil]]) .andReturn(expectedDeviceInputMock); id errorMock = OCMClassMock([NSError class]); - OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]).andReturn(NO); - OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES); + OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]) + .andReturn(NO); + OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]) + .andReturn(YES); OCMStub([self.deviceMock unlockForConfiguration]); OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]); @@ -501,22 +568,26 @@ CMSampleBufferRef createTestSampleBufferRef() { dispatch_semaphore_signal(completedStartSemaphore); }]; - long ret = dispatch_semaphore_wait(completedStartSemaphore, - dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC)); + long ret = dispatch_semaphore_wait( + completedStartSemaphore, + dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC)); XCTAssertEqual(ret, 0); XCTAssertEqual(callbackError, errorMock); } -// TODO(crbug.com/webrtc/14829): Test is disabled on iOS < 16 and broken on iOS 16. +// TODO(crbug.com/webrtc/14829): Test is disabled on iOS < 16 and broken on +// iOS 16. - (void)DISABLED_testStartCaptureSetsOutputDimensionsInvalidPixelFormat { id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([captureDeviceInputMock deviceInputWithDevice:_deviceMock error:[OCMArg setTo:nil]]) + OCMStub([captureDeviceInputMock deviceInputWithDevice:_deviceMock + error:[OCMArg setTo:nil]]) .andReturn(expectedDeviceInputMock); OCMStub([_deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES); OCMStub([_deviceMock unlockForConfiguration]); - OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES); + OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]) + .andReturn(YES); OCMStub([_captureSessionMock addInput:expectedDeviceInputMock]); OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]); OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]); @@ -529,12 +600,14 @@ CMSampleBufferRef createTestSampleBufferRef() { int width = 110; int height = 220; FourCharCode pixelFormat = 0x18000000; - CMVideoFormatDescriptionCreate(nil, pixelFormat, width, height, nil, &formatDescription); + CMVideoFormatDescriptionCreate( + nil, pixelFormat, width, height, nil, &formatDescription); OCMStub([deviceFormatMock formatDescription]).andReturn(formatDescription); [_capturer startCaptureWithDevice:_deviceMock format:deviceFormatMock fps:30]; - XCTestExpectation *expectation = [self expectationWithDescription:@"StopCompletion"]; + XCTestExpectation *expectation = + [self expectationWithDescription:@"StopCompletion"]; [_capturer stopCaptureWithCompletionHandler:^(void) { [expectation fulfill]; }]; @@ -544,15 +617,22 @@ CMSampleBufferRef createTestSampleBufferRef() { OCMVerify([_captureSessionMock addOutput:[OCMArg checkWithBlock:^BOOL(AVCaptureVideoDataOutput *output) { if (@available(iOS 16, *)) { - XCTAssertEqual(width, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]); - XCTAssertEqual(height, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]); + XCTAssertEqual( + width, + [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]); + XCTAssertEqual( + height, + [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]); } else { - XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]); - XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]); + XCTAssertEqual( + 0, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]); + XCTAssertEqual( + 0, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]); } XCTAssertEqual( (FourCharCode)kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, - [output.videoSettings[(id)kCVPixelBufferPixelFormatTypeKey] unsignedIntValue]); + [output.videoSettings[(id)kCVPixelBufferPixelFormatTypeKey] + unsignedIntValue]); return YES; }]]); } diff --git a/sdk/objc/unittests/RTCCertificateTest.mm b/sdk/objc/unittests/RTCCertificateTest.mm index bc1347336c..64c68e0e45 100644 --- a/sdk/objc/unittests/RTCCertificateTest.mm +++ b/sdk/objc/unittests/RTCCertificateTest.mm @@ -29,7 +29,8 @@ @implementation RTCCertificateTest - (void)testCertificateIsUsedInConfig { - RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; NSArray *urlStrings = @[ @"stun:stun1.example.net" ]; RTC_OBJC_TYPE(RTCIceServer) *server = @@ -37,33 +38,45 @@ originalConfig.iceServers = @[ server ]; // Generate a new certificate. - RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate) - generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}]; + RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = + [RTC_OBJC_TYPE(RTCCertificate) generateCertificateWithParams:@{ + @"expires" : @100000, + @"name" : @"RSASSA-PKCS1-v1_5" + }]; // Store certificate in configuration. originalConfig.certificate = originalCertificate; RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:@{} + optionalConstraints:nil]; RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; // Create PeerConnection with this certificate. RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = - [factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil]; + [factory peerConnectionWithConfiguration:originalConfig + constraints:contraints + delegate:nil]; // Retrieve certificate from the configuration. - RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration; + RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = + peerConnection.configuration; // Extract PEM strings from original certificate. - std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String]; - std::string originalCertificateField = [[originalCertificate certificate] UTF8String]; + std::string originalPrivateKeyField = + [[originalCertificate private_key] UTF8String]; + std::string originalCertificateField = + [[originalCertificate certificate] UTF8String]; // Extract PEM strings from certificate retrieved from configuration. - RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate; - std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String]; - std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String]; + RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = + retrievedConfig.certificate; + std::string retrievedPrivateKeyField = + [[retrievedCertificate private_key] UTF8String]; + std::string retrievedCertificateField = + [[retrievedCertificate certificate] UTF8String]; // Check that the original certificate and retrieved certificate match. EXPECT_EQ(originalPrivateKeyField, retrievedPrivateKeyField); diff --git a/sdk/objc/unittests/RTCConfigurationTest.mm b/sdk/objc/unittests/RTCConfigurationTest.mm index 18cc97191e..d4973bdde0 100644 --- a/sdk/objc/unittests/RTCConfigurationTest.mm +++ b/sdk/objc/unittests/RTCConfigurationTest.mm @@ -30,7 +30,8 @@ RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -47,11 +48,11 @@ config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; - config.cryptoOptions = - [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:YES - srtpEnableEncryptedRtpHeaderExtensions:YES - sframeRequireFrameEncryption:YES]; + config.cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] + initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:YES + srtpEnableEncryptedRtpHeaderExtensions:YES + sframeRequireFrameEncryption:YES]; config.rtcpAudioReportIntervalMs = 2500; config.rtcpVideoReportIntervalMs = 3750; @@ -81,9 +82,14 @@ nativeConfig->continual_gathering_policy); EXPECT_EQ(true, nativeConfig->prune_turn_ports); EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_gcm_crypto_suites); - EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_aes128_sha1_32_crypto_cipher); - EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_encrypted_rtp_header_extensions); - EXPECT_EQ(true, nativeConfig->crypto_options->sframe.require_frame_encryption); + EXPECT_EQ( + true, + nativeConfig->crypto_options->srtp.enable_aes128_sha1_32_crypto_cipher); + EXPECT_EQ(true, + nativeConfig->crypto_options->srtp + .enable_encrypted_rtp_header_extensions); + EXPECT_EQ(true, + nativeConfig->crypto_options->sframe.require_frame_encryption); EXPECT_EQ(2500, nativeConfig->audio_rtcp_report_interval_ms()); EXPECT_EQ(3750, nativeConfig->video_rtcp_report_interval_ms()); } @@ -93,7 +99,8 @@ RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; config.bundlePolicy = RTCBundlePolicyMaxBundle; @@ -110,18 +117,18 @@ config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; - config.cryptoOptions = - [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:NO - srtpEnableEncryptedRtpHeaderExtensions:NO - sframeRequireFrameEncryption:NO]; + config.cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] + initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:NO + srtpEnableEncryptedRtpHeaderExtensions:NO + sframeRequireFrameEncryption:NO]; config.rtcpAudioReportIntervalMs = 1500; config.rtcpVideoReportIntervalMs = 2150; webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig = [config createNativeConfiguration]; - RTC_OBJC_TYPE(RTCConfiguration) *newConfig = - [[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig]; + RTC_OBJC_TYPE(RTCConfiguration) *newConfig = [[RTC_OBJC_TYPE(RTCConfiguration) + alloc] initWithNativeConfiguration:*nativeConfig]; EXPECT_EQ([config.iceServers count], newConfig.iceServers.count); RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0]; RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0]; @@ -135,12 +142,16 @@ EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy); EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy); EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy); - EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets); - EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate); - EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout); + EXPECT_EQ(config.audioJitterBufferMaxPackets, + newConfig.audioJitterBufferMaxPackets); + EXPECT_EQ(config.audioJitterBufferFastAccelerate, + newConfig.audioJitterBufferFastAccelerate); + EXPECT_EQ(config.iceConnectionReceivingTimeout, + newConfig.iceConnectionReceivingTimeout); EXPECT_EQ(config.iceBackupCandidatePairPingInterval, newConfig.iceBackupCandidatePairPingInterval); - EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy); + EXPECT_EQ(config.continualGatheringPolicy, + newConfig.continualGatheringPolicy); EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts); EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites, newConfig.cryptoOptions.srtpEnableGcmCryptoSuites); @@ -150,12 +161,15 @@ newConfig.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions); EXPECT_EQ(config.cryptoOptions.sframeRequireFrameEncryption, newConfig.cryptoOptions.sframeRequireFrameEncryption); - EXPECT_EQ(config.rtcpAudioReportIntervalMs, newConfig.rtcpAudioReportIntervalMs); - EXPECT_EQ(config.rtcpVideoReportIntervalMs, newConfig.rtcpVideoReportIntervalMs); + EXPECT_EQ(config.rtcpAudioReportIntervalMs, + newConfig.rtcpAudioReportIntervalMs); + EXPECT_EQ(config.rtcpVideoReportIntervalMs, + newConfig.rtcpVideoReportIntervalMs); } - (void)testDefaultValues { - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; EXPECT_EQ(config.cryptoOptions, nil); } diff --git a/sdk/objc/unittests/RTCEncodedImage_xctest.mm b/sdk/objc/unittests/RTCEncodedImage_xctest.mm index 389e765e83..22a1c61331 100644 --- a/sdk/objc/unittests/RTCEncodedImage_xctest.mm +++ b/sdk/objc/unittests/RTCEncodedImage_xctest.mm @@ -23,19 +23,23 @@ encoded_image.SetEncodedData(encoded_data); RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = - [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image]; + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] + initWithNativeEncodedImage:encoded_image]; - XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data); + XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), + encoded_data); } - (void)testInitWithNSData { NSData *bufferData = [NSData data]; - RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init]; + RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = + [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init]; encodedImage.buffer = bufferData; webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage]; XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr); - XCTAssertEqual(result_encoded_image.GetEncodedData()->data(), bufferData.bytes); + XCTAssertEqual(result_encoded_image.GetEncodedData()->data(), + bufferData.bytes); } - (void)testRetainsNativeEncodedImage { @@ -44,8 +48,8 @@ const auto encoded_data = webrtc::EncodedImageBuffer::Create(1); webrtc::EncodedImage encoded_image; encoded_image.SetEncodedData(encoded_data); - encodedImage = - [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image]; + encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] + initWithNativeEncodedImage:encoded_image]; } webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage]; XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr); diff --git a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm index 2407c88c1a..7eb9efd0f5 100644 --- a/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm +++ b/sdk/objc/unittests/RTCFileVideoCapturer_xctest.mm @@ -17,7 +17,8 @@ NSString *const kTestFileName = @"foreman.mp4"; static const int kTestTimeoutMs = 5 * 1000; // 5secs. -@interface MockCapturerDelegate : NSObject +@interface MockCapturerDelegate + : NSObject @property(nonatomic, assign) NSInteger capturedFramesCount; @@ -47,7 +48,8 @@ NS_CLASS_AVAILABLE_IOS(10) - (void)setUp { self.mockDelegate = [[MockCapturerDelegate alloc] init]; - self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate]; + self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] + initWithDelegate:self.mockDelegate]; } - (void)tearDown { @@ -62,7 +64,8 @@ NS_CLASS_AVAILABLE_IOS(10) errorOccured = YES; }; - [self.capturer startCapturingFromFileNamed:@"not_in_bundle.mov" onError:errorBlock]; + [self.capturer startCapturingFromFileNamed:@"not_in_bundle.mov" + onError:errorBlock]; ASSERT_TRUE_WAIT(errorOccured, kTestTimeoutMs); } @@ -79,8 +82,10 @@ NS_CLASS_AVAILABLE_IOS(10) secondError = YES; }; - [self.capturer startCapturingFromFileNamed:kTestFileName onError:firstErrorBlock]; - [self.capturer startCapturingFromFileNamed:kTestFileName onError:secondErrorBlock]; + [self.capturer startCapturingFromFileNamed:kTestFileName + onError:firstErrorBlock]; + [self.capturer startCapturingFromFileNamed:kTestFileName + onError:secondErrorBlock]; ASSERT_TRUE_WAIT(secondError, kTestTimeoutMs); } @@ -96,12 +101,16 @@ NS_CLASS_AVAILABLE_IOS(10) // We're dispatching the `stopCapture` with delay to ensure the capturer has // had the chance to capture several frames. - dispatch_time_t captureDelay = dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC); // 2secs. - dispatch_after(captureDelay, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ - capturedFrames = self.mockDelegate.capturedFramesCount; - [self.capturer stopCapture]; - done = YES; - }); + dispatch_time_t captureDelay = + dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC); // 2secs. + dispatch_after( + captureDelay, + dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), + ^{ + capturedFrames = self.mockDelegate.capturedFramesCount; + [self.capturer stopCapture]; + done = YES; + }); WAIT(done, kTestTimeoutMs); capturedFramesAfterStop = self.mockDelegate.capturedFramesCount; diff --git a/sdk/objc/unittests/RTCIceCandidateTest.mm b/sdk/objc/unittests/RTCIceCandidateTest.mm index d781488286..4cdd317a3c 100644 --- a/sdk/objc/unittests/RTCIceCandidateTest.mm +++ b/sdk/objc/unittests/RTCIceCandidateTest.mm @@ -30,7 +30,9 @@ "59052 typ host generation 0"; RTC_OBJC_TYPE(RTCIceCandidate) *candidate = - [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"]; + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp + sdpMLineIndex:0 + sdpMid:@"audio"]; std::unique_ptr nativeCandidate = candidate.nativeCandidate; @@ -50,7 +52,8 @@ webrtc::CreateIceCandidate("audio", 0, sdp, nullptr)); RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate = - [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate.get()]; + [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] + initWithNativeCandidate:nativeCandidate.get()]; EXPECT_NE(nativeCandidate.get(), iceCandidate.nativeCandidate.get()); EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]); EXPECT_EQ(0, iceCandidate.sdpMLineIndex); diff --git a/sdk/objc/unittests/RTCIceServerTest.mm b/sdk/objc/unittests/RTCIceServerTest.mm index 772653c4dc..29163961da 100644 --- a/sdk/objc/unittests/RTCIceServerTest.mm +++ b/sdk/objc/unittests/RTCIceServerTest.mm @@ -25,8 +25,8 @@ @implementation RTCIceServerTest - (void)testOneURLServer { - RTC_OBJC_TYPE(RTCIceServer) *server = - [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"stun:stun1.example.net" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); @@ -36,8 +36,11 @@ } - (void)testTwoURLServer { - RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] - initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = + [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ + @"turn1:turn1.example.net", + @"turn2:turn2.example.net" + ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(2u, iceStruct.urls.size()); @@ -48,10 +51,10 @@ } - (void)testPasswordCredential { - RTC_OBJC_TYPE(RTCIceServer) *server = - [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential"]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential"]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -60,12 +63,12 @@ } - (void)testHostname { - RTC_OBJC_TYPE(RTCIceServer) *server = - [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname"]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname"]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -75,13 +78,13 @@ } - (void)testTlsAlpnProtocols { - RTC_OBJC_TYPE(RTCIceServer) *server = - [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname" - tlsAlpnProtocols:@[ @"proto1", @"proto2" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname" + tlsAlpnProtocols:@[ @"proto1", @"proto2" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -92,14 +95,14 @@ } - (void)testTlsEllipticCurves { - RTC_OBJC_TYPE(RTCIceServer) *server = - [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ] - username:@"username" - credential:@"credential" - tlsCertPolicy:RTCTlsCertPolicySecure - hostname:@"hostname" - tlsAlpnProtocols:@[ @"proto1", @"proto2" ] - tlsEllipticCurves:@[ @"curve1", @"curve2" ]]; + RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] + initWithURLStrings:@[ @"turn1:turn1.example.net" ] + username:@"username" + credential:@"credential" + tlsCertPolicy:RTCTlsCertPolicySecure + hostname:@"hostname" + tlsAlpnProtocols:@[ @"proto1", @"proto2" ] + tlsEllipticCurves:@[ @"curve1", @"curve2" ]]; webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer; EXPECT_EQ(1u, iceStruct.urls.size()); EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front()); @@ -125,7 +128,7 @@ [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer]; EXPECT_EQ(1u, iceServer.urlStrings.count); EXPECT_EQ("stun:stun.example.net", - [NSString stdStringForString:iceServer.urlStrings.firstObject]); + [NSString stdStringForString:iceServer.urlStrings.firstObject]); EXPECT_EQ("username", [NSString stdStringForString:iceServer.username]); EXPECT_EQ("password", [NSString stdStringForString:iceServer.credential]); EXPECT_EQ("hostname", [NSString stdStringForString:iceServer.hostname]); diff --git a/sdk/objc/unittests/RTCMediaConstraintsTest.mm b/sdk/objc/unittests/RTCMediaConstraintsTest.mm index 6ed7859ba1..60d68f3e31 100644 --- a/sdk/objc/unittests/RTCMediaConstraintsTest.mm +++ b/sdk/objc/unittests/RTCMediaConstraintsTest.mm @@ -25,24 +25,28 @@ @implementation RTCMediaConstraintsTests - (void)testMediaConstraints { - NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"}; - NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"}; + NSDictionary *mandatory = @{@"key1" : @"value1", @"key2" : @"value2"}; + NSDictionary *optional = @{@"key3" : @"value3", @"key4" : @"value4"}; RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory - optionalConstraints:optional]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:mandatory + optionalConstraints:optional]; std::unique_ptr nativeConstraints = [constraints nativeConstraints]; - webrtc::MediaConstraints::Constraints nativeMandatory = nativeConstraints->GetMandatory(); + webrtc::MediaConstraints::Constraints nativeMandatory = + nativeConstraints->GetMandatory(); [self expectConstraints:mandatory inNativeConstraints:nativeMandatory]; - webrtc::MediaConstraints::Constraints nativeOptional = nativeConstraints->GetOptional(); + webrtc::MediaConstraints::Constraints nativeOptional = + nativeConstraints->GetOptional(); [self expectConstraints:optional inNativeConstraints:nativeOptional]; } - (void)expectConstraints:(NSDictionary *)constraints - inNativeConstraints:(webrtc::MediaConstraints::Constraints)nativeConstraints { + inNativeConstraints: + (webrtc::MediaConstraints::Constraints)nativeConstraints { EXPECT_EQ(constraints.count, nativeConstraints.size()); for (NSString *key in constraints) { diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 9accfd855f..95d8b058f9 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -37,11 +37,14 @@ extern "C" { @implementation RTCPeerConnectionFactoryBuilderTests - (void)testBuilder { - id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); + id factoryMock = + OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); OCMExpect([factoryMock alloc]).andReturn(factoryMock); - RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs] - initWithMediaAndDependencies:webrtc::PeerConnectionFactoryDependencies()]); - RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; + RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] + ignoringNonObjectArgs] initWithMediaAndDependencies: + webrtc::PeerConnectionFactoryDependencies()]); + RTCPeerConnectionFactoryBuilder* builder = + [[RTCPeerConnectionFactoryBuilder alloc] init]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); @@ -49,11 +52,14 @@ extern "C" { } - (void)testDefaultComponentsBuilder { - id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); + id factoryMock = + OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]); OCMExpect([factoryMock alloc]).andReturn(factoryMock); - RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs] - initWithMediaAndDependencies:webrtc::PeerConnectionFactoryDependencies()]); - RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; + RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] + ignoringNonObjectArgs] initWithMediaAndDependencies: + webrtc::PeerConnectionFactoryDependencies()]); + RTCPeerConnectionFactoryBuilder* builder = + [RTCPeerConnectionFactoryBuilder defaultBuilder]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); diff --git a/sdk/objc/unittests/RTCPeerConnectionTest.mm b/sdk/objc/unittests/RTCPeerConnectionTest.mm index 9ca8403559..e2c05f90f7 100644 --- a/sdk/objc/unittests/RTCPeerConnectionTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionTest.mm @@ -38,7 +38,8 @@ RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; config.iceServers = @[ server ]; config.iceTransportPolicy = RTCIceTransportPolicyRelay; @@ -57,27 +58,31 @@ RTCContinualGatheringPolicyGatherContinually; config.shouldPruneTurnPorts = YES; config.activeResetSrtpParams = YES; - config.cryptoOptions = - [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES - srtpEnableAes128Sha1_32CryptoCipher:YES - srtpEnableEncryptedRtpHeaderExtensions:NO - sframeRequireFrameEncryption:NO]; + config.cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] + initWithSrtpEnableGcmCryptoSuites:YES + srtpEnableAes128Sha1_32CryptoCipher:YES + srtpEnableEncryptedRtpHeaderExtensions:NO + sframeRequireFrameEncryption:NO]; RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:@{} + optionalConstraints:nil]; RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; RTC_OBJC_TYPE(RTCConfiguration) * newConfig; @autoreleasepool { RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = - [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; + [factory peerConnectionWithConfiguration:config + constraints:contraints + delegate:nil]; newConfig = peerConnection.configuration; - EXPECT_TRUE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:100000] - currentBitrateBps:[NSNumber numberWithInt:5000000] - maxBitrateBps:[NSNumber numberWithInt:500000000]]); + EXPECT_TRUE([peerConnection + setBweMinBitrateBps:[NSNumber numberWithInt:100000] + currentBitrateBps:[NSNumber numberWithInt:5000000] + maxBitrateBps:[NSNumber numberWithInt:500000000]]); EXPECT_FALSE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:2] currentBitrateBps:[NSNumber numberWithInt:1] maxBitrateBps:nil]); @@ -95,12 +100,16 @@ EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy); EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy); EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy); - EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets); - EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate); - EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout); + EXPECT_EQ(config.audioJitterBufferMaxPackets, + newConfig.audioJitterBufferMaxPackets); + EXPECT_EQ(config.audioJitterBufferFastAccelerate, + newConfig.audioJitterBufferFastAccelerate); + EXPECT_EQ(config.iceConnectionReceivingTimeout, + newConfig.iceConnectionReceivingTimeout); EXPECT_EQ(config.iceBackupCandidatePairPingInterval, newConfig.iceBackupCandidatePairPingInterval); - EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy); + EXPECT_EQ(config.continualGatheringPolicy, + newConfig.continualGatheringPolicy); EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts); EXPECT_EQ(config.activeResetSrtpParams, newConfig.activeResetSrtpParams); EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites, @@ -118,12 +127,14 @@ RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; config.iceServers = @[ server ]; RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:@{} + optionalConstraints:nil]; RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; @@ -143,18 +154,22 @@ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:@{} + optionalConstraints:nil]; RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = - [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; + [factory peerConnectionWithConfiguration:config + constraints:contraints + delegate:nil]; dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0); - [peerConnection setRemoteDescription:[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] - initWithType:RTCSdpTypeOffer - sdp:@"invalid"] + [peerConnection setRemoteDescription:[[RTC_OBJC_TYPE(RTCSessionDescription) + alloc] initWithType:RTCSdpTypeOffer + sdp:@"invalid"] completionHandler:^(NSError *error) { ASSERT_NE(error, nil); if (error != nil) { @@ -165,8 +180,9 @@ NSTimeInterval timeout = 5; ASSERT_EQ( 0, - dispatch_semaphore_wait(negotiatedSem, - dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)))); + dispatch_semaphore_wait( + negotiatedSem, + dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)))); [peerConnection close]; } @@ -174,18 +190,23 @@ RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; + RTC_OBJC_TYPE(RTCConfiguration) *config = + [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; RTC_OBJC_TYPE(RTCMediaConstraints) *contraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{} - optionalConstraints:nil]; + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] + initWithMandatoryConstraints:@{} + optionalConstraints:nil]; RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection = - [factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil]; + [factory peerConnectionWithConfiguration:config + constraints:contraints + delegate:nil]; dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0); - [peerConnection addIceCandidate:[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:@"invalid" - sdpMLineIndex:-1 - sdpMid:nil] + [peerConnection addIceCandidate:[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] + initWithSdp:@"invalid" + sdpMLineIndex:-1 + sdpMid:nil] completionHandler:^(NSError *error) { ASSERT_NE(error, nil); if (error != nil) { @@ -196,8 +217,9 @@ NSTimeInterval timeout = 5; ASSERT_EQ( 0, - dispatch_semaphore_wait(negotiatedSem, - dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)))); + dispatch_semaphore_wait( + negotiatedSem, + dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC)))); [peerConnection close]; } diff --git a/sdk/objc/unittests/RTCSessionDescriptionTest.mm b/sdk/objc/unittests/RTCSessionDescriptionTest.mm index a22c96b935..0927da534f 100644 --- a/sdk/objc/unittests/RTCSessionDescriptionTest.mm +++ b/sdk/objc/unittests/RTCSessionDescriptionTest.mm @@ -24,18 +24,22 @@ @implementation RTCSessionDescriptionTests /** - * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native - * SessionDescriptionInterface (based on the types and SDP strings being equal). + * Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a + * native SessionDescriptionInterface (based on the types and SDP strings being + * equal). */ - (void)testSessionDescriptionConversion { RTC_OBJC_TYPE(RTCSessionDescription) *description = - [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]]; + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] + initWithType:RTCSdpTypeAnswer + sdp:[self sdp]]; std::unique_ptr nativeDescription = description.nativeDescription; EXPECT_EQ(RTCSdpTypeAnswer, - [RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]); + [RTC_OBJC_TYPE(RTCSessionDescription) + typeForStdString:nativeDescription->type()]); std::string sdp; nativeDescription->ToString(&sdp); @@ -43,13 +47,15 @@ } - (void)testInitFromNativeSessionDescription { - const auto nativeDescription = - webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, [self sdp].stdString, nullptr); + const auto nativeDescription = webrtc::CreateSessionDescription( + webrtc::SdpType::kAnswer, [self sdp].stdString, nullptr); - RTC_OBJC_TYPE(RTCSessionDescription) *description = [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] - initWithNativeDescription:nativeDescription.get()]; - EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer, - [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]); + RTC_OBJC_TYPE(RTCSessionDescription) *description = + [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] + initWithNativeDescription:nativeDescription.get()]; + EXPECT_EQ( + webrtc::SessionDescriptionInterface::kAnswer, + [RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]); EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]); } diff --git a/sdk/objc/unittests/RTCTracingTest.mm b/sdk/objc/unittests/RTCTracingTest.mm index ff93047bdf..63008a79d1 100644 --- a/sdk/objc/unittests/RTCTracingTest.mm +++ b/sdk/objc/unittests/RTCTracingTest.mm @@ -25,10 +25,11 @@ - (NSString *)documentsFilePathForFileName:(NSString *)fileName { NSParameterAssert(fileName.length); - NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); + NSArray *paths = NSSearchPathForDirectoriesInDomains( + NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirPath = paths.firstObject; NSString *filePath = - [documentsDirPath stringByAppendingPathComponent:fileName]; + [documentsDirPath stringByAppendingPathComponent:fileName]; return filePath; } diff --git a/sdk/objc/unittests/avformatmappertests.mm b/sdk/objc/unittests/avformatmappertests.mm index f0dd2b1141..9628e33568 100644 --- a/sdk/objc/unittests/avformatmappertests.mm +++ b/sdk/objc/unittests/avformatmappertests.mm @@ -33,8 +33,8 @@ static cricket::VideoFormat expectedFormat = // CMVideoDescriptionRef mocking. @interface AVCaptureDeviceFormatMock : NSObject -@property (nonatomic, assign) CMVideoFormatDescriptionRef format; -@property (nonatomic, strong) OCMockObject *rangeMock; +@property(nonatomic, assign) CMVideoFormatDescriptionRef format; +@property(nonatomic, strong) OCMockObject* rangeMock; - (instancetype)initWithMediaSubtype:(FourCharCode)subtype minFps:(float)minFps @@ -55,8 +55,8 @@ static cricket::VideoFormat expectedFormat = maxFps:(float)maxFps { self = [super init]; if (self) { - CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight, - nil, &_format); + CMVideoFormatDescriptionCreate( + nil, subtype, kFormatWidth, kFormatHeight, nil, &_format); // We can use OCMock for the range. _rangeMock = [OCMockObject mockForClass:[AVFrameRateRange class]]; [[[_rangeMock stub] andReturnValue:@(minFps)] minFrameRate]; @@ -67,7 +67,7 @@ static cricket::VideoFormat expectedFormat = } + (instancetype)validFormat { - AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc] + AVCaptureDeviceFormatMock* instance = [[AVCaptureDeviceFormatMock alloc] initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange minFps:0.0 maxFps:30.0]; @@ -75,7 +75,7 @@ static cricket::VideoFormat expectedFormat = } + (instancetype)invalidFpsFormat { - AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc] + AVCaptureDeviceFormatMock* instance = [[AVCaptureDeviceFormatMock alloc] initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange minFps:0.0 maxFps:22.0]; @@ -83,7 +83,7 @@ static cricket::VideoFormat expectedFormat = } + (instancetype)invalidMediaSubtypeFormat { - AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc] + AVCaptureDeviceFormatMock* instance = [[AVCaptureDeviceFormatMock alloc] initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8Planar minFps:0.0 maxFps:60.0]; @@ -102,7 +102,7 @@ static cricket::VideoFormat expectedFormat = return self.format; } -- (NSArray *)videoSupportedFrameRateRanges { +- (NSArray*)videoSupportedFrameRateRanges { return @[ self.rangeMock ]; } @@ -202,8 +202,8 @@ TEST(AVFormatMapperTest, SetFormatWhenDeviceCannotLock) { [[[mockDevice stub] andReturn:@[]] formats]; // when - bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil, - cricket::VideoFormat()); + bool resultFormat = webrtc::SetFormatForCaptureDevice( + mockDevice, nil, cricket::VideoFormat()); // then EXPECT_FALSE(resultFormat); @@ -222,8 +222,8 @@ TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) { OCMExpect([mockDevice unlockForConfiguration]); // when - bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil, - cricket::VideoFormat()); + bool resultFormat = webrtc::SetFormatForCaptureDevice( + mockDevice, nil, cricket::VideoFormat()); // then EXPECT_FALSE(resultFormat); @@ -232,7 +232,7 @@ TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) { // https://github.com/erikdoe/ocmock/commit/de1419415581dc307045e54bfe9c98c86efea96b // Without it, stubbed exceptions are being re-raised on [mock verify]. // More information here: - //https://github.com/erikdoe/ocmock/issues/241 + // https://github.com/erikdoe/ocmock/issues/241 @try { [mockDevice verify]; } @catch (NSException* exception) { diff --git a/sdk/objc/unittests/frame_buffer_helpers.mm b/sdk/objc/unittests/frame_buffer_helpers.mm index 98b86c54c0..f4e8a41ebe 100644 --- a/sdk/objc/unittests/frame_buffer_helpers.mm +++ b/sdk/objc/unittests/frame_buffer_helpers.mm @@ -18,22 +18,40 @@ void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) { size_t width = CVPixelBufferGetWidth(pixelBuffer); size_t height = CVPixelBufferGetHeight(pixelBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); - int byteOrder = CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32ARGB ? + int byteOrder = CVPixelBufferGetPixelFormatType(pixelBuffer) == + kCVPixelFormatType_32ARGB ? kCGBitmapByteOrder32Little : 0; - CGContextRef cgContext = CGBitmapContextCreate(baseAddr, - width, - height, - 8, - CVPixelBufferGetBytesPerRow(pixelBuffer), - colorSpace, - byteOrder | kCGImageAlphaNoneSkipLast); + CGContextRef cgContext = + CGBitmapContextCreate(baseAddr, + width, + height, + 8, + CVPixelBufferGetBytesPerRow(pixelBuffer), + colorSpace, + byteOrder | kCGImageAlphaNoneSkipLast); // Create a gradient CGFloat colors[] = { - 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 1.0, + 0.0, + 0.0, + 1.0, + 0.0, + 1.0, + 0.0, + 1.0, + 0.0, + 0.0, + 1.0, + 1.0, }; - CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4); + CGGradientRef gradient = + CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4); CGContextDrawLinearGradient( cgContext, gradient, CGPointMake(0, 0), CGPointMake(width, height), 0); @@ -47,36 +65,44 @@ void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) { CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); } -rtc::scoped_refptr CreateI420Gradient(int width, int height) { - rtc::scoped_refptr buffer(webrtc::I420Buffer::Create(width, height)); +rtc::scoped_refptr CreateI420Gradient(int width, + int height) { + rtc::scoped_refptr buffer( + webrtc::I420Buffer::Create(width, height)); // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { - buffer->MutableDataY()[x + y * width] = 128 * (x * height + y * width) / (width * height); + buffer->MutableDataY()[x + y * width] = + 128 * (x * height + y * width) / (width * height); } } int chroma_width = buffer->ChromaWidth(); int chroma_height = buffer->ChromaHeight(); for (int x = 0; x < chroma_width; x++) { for (int y = 0; y < chroma_height; y++) { - buffer->MutableDataU()[x + y * chroma_width] = 255 * x / (chroma_width - 1); - buffer->MutableDataV()[x + y * chroma_width] = 255 * y / (chroma_height - 1); + buffer->MutableDataU()[x + y * chroma_width] = + 255 * x / (chroma_width - 1); + buffer->MutableDataV()[x + y * chroma_width] = + 255 * y / (chroma_height - 1); } } return buffer; } -void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr i420Buffer, - CVPixelBufferRef pixelBuffer) { +void CopyI420BufferToCVPixelBuffer( + rtc::scoped_refptr i420Buffer, + CVPixelBufferRef pixelBuffer) { CVPixelBufferLockBaseAddress(pixelBuffer, 0); const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer); if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { // NV12 - uint8_t* dstY = static_cast(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0)); + uint8_t* dstY = static_cast( + CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0)); const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); - uint8_t* dstUV = static_cast(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1)); + uint8_t* dstUV = static_cast( + CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1)); const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); libyuv::I420ToNV12(i420Buffer->DataY(), @@ -92,7 +118,8 @@ void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr i420Bu i420Buffer->width(), i420Buffer->height()); } else { - uint8_t* dst = static_cast(CVPixelBufferGetBaseAddress(pixelBuffer)); + uint8_t* dst = + static_cast(CVPixelBufferGetBaseAddress(pixelBuffer)); const int bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); if (pixelFormat == kCVPixelFormatType_32BGRA) { diff --git a/sdk/objc/unittests/nalu_rewriter_xctest.mm b/sdk/objc/unittests/nalu_rewriter_xctest.mm index 82da549bb6..7ed99eb7a7 100644 --- a/sdk/objc/unittests/nalu_rewriter_xctest.mm +++ b/sdk/objc/unittests/nalu_rewriter_xctest.mm @@ -40,7 +40,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { - (void)testCreateVideoFormatDescription { CMVideoFormatDescriptionRef description = - webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER)); + webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, + arraysize(SPS_PPS_BUFFER)); XCTAssertTrue(description); if (description) { CFRelease(description); @@ -58,8 +59,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { // PPS nalu. 0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30}; // clang-format on - description = webrtc::CreateVideoFormatDescription(sps_pps_not_at_start_buffer, - arraysize(sps_pps_not_at_start_buffer)); + description = webrtc::CreateVideoFormatDescription( + sps_pps_not_at_start_buffer, arraysize(sps_pps_not_at_start_buffer)); XCTAssertTrue(description); @@ -69,7 +70,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { } const uint8_t other_buffer[] = {0x00, 0x00, 0x00, 0x01, 0x28}; - XCTAssertFalse(webrtc::CreateVideoFormatDescription(other_buffer, arraysize(other_buffer))); + XCTAssertFalse(webrtc::CreateVideoFormatDescription(other_buffer, + arraysize(other_buffer))); } - (void)testReadEmptyInput { @@ -85,7 +87,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { - (void)testReadSingleNalu { const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xAA}; - webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data)); + webrtc::AnnexBBufferReader reader(annex_b_test_data, + arraysize(annex_b_test_data)); const uint8_t* nalu = nullptr; size_t nalu_length = 0; XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining()); @@ -100,7 +103,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { - (void)testReadSingleNalu3ByteHeader { const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x01, 0xAA}; - webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data)); + webrtc::AnnexBBufferReader reader(annex_b_test_data, + arraysize(annex_b_test_data)); const uint8_t* nalu = nullptr; size_t nalu_length = 0; XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining()); @@ -119,7 +123,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { 0x00, 0x01, 0x00, 0x00, 0x00, 0xFF}; // clang-format on - webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data)); + webrtc::AnnexBBufferReader reader(annex_b_test_data, + arraysize(annex_b_test_data)); const uint8_t* nalu = nullptr; size_t nalu_length = 0; XCTAssertEqual(0u, reader.BytesRemaining()); @@ -136,7 +141,8 @@ static const uint8_t SPS_PPS_BUFFER[] = { 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x01, 0xAA, 0xBB}; // clang-format on - webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data)); + webrtc::AnnexBBufferReader reader(annex_b_test_data, + arraysize(annex_b_test_data)); const uint8_t* nalu = nullptr; size_t nalu_length = 0; XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining()); @@ -160,22 +166,25 @@ static const uint8_t SPS_PPS_BUFFER[] = { memset(buffer.get(), 0, buffer_size); webrtc::AvccBufferWriter writer(buffer.get(), 0); XCTAssertEqual(0u, writer.BytesRemaining()); - XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); - XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); + XCTAssertFalse( + writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); + XCTAssertEqual( + 0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); } - (void)testWriteSingleNalu { - const uint8_t expected_buffer[] = { - 0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC, - }; + const uint8_t expected_buffer[] = {0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC}; const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + 4; std::unique_ptr buffer(new uint8_t[buffer_size]); webrtc::AvccBufferWriter writer(buffer.get(), buffer_size); XCTAssertEqual(buffer_size, writer.BytesRemaining()); - XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); + XCTAssertTrue( + writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); XCTAssertEqual(0u, writer.BytesRemaining()); - XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1))); - XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); + XCTAssertFalse( + writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1))); + XCTAssertEqual( + 0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); } - (void)testWriteMultipleNalus { @@ -185,15 +194,20 @@ static const uint8_t SPS_PPS_BUFFER[] = { 0x00, 0x00, 0x00, 0x04, 0xDE, 0xAD, 0xBE, 0xEF }; // clang-format on - const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8; + const size_t buffer_size = + arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8; std::unique_ptr buffer(new uint8_t[buffer_size]); webrtc::AvccBufferWriter writer(buffer.get(), buffer_size); XCTAssertEqual(buffer_size, writer.BytesRemaining()); - XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); - XCTAssertEqual(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4), writer.BytesRemaining()); - XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1))); + XCTAssertTrue( + writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); + XCTAssertEqual(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4), + writer.BytesRemaining()); + XCTAssertTrue( + writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1))); XCTAssertEqual(0u, writer.BytesRemaining()); - XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); + XCTAssertEqual( + 0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); } - (void)testOverflow { @@ -203,9 +217,11 @@ static const uint8_t SPS_PPS_BUFFER[] = { memset(buffer.get(), 0, buffer_size); webrtc::AvccBufferWriter writer(buffer.get(), buffer_size); XCTAssertEqual(buffer_size, writer.BytesRemaining()); - XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); + XCTAssertFalse( + writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0))); XCTAssertEqual(buffer_size, writer.BytesRemaining()); - XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); + XCTAssertEqual( + 0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer))); } - (void)testH264AnnexBBufferToCMSampleBuffer { @@ -230,24 +246,27 @@ static const uint8_t SPS_PPS_BUFFER[] = { CMSampleBufferRef out_sample_buffer = nil; CMVideoFormatDescriptionRef description = [self createDescription]; - Boolean result = webrtc::H264AnnexBBufferToCMSampleBuffer(annex_b_test_data, - arraysize(annex_b_test_data), - description, - &out_sample_buffer, - memory_pool); + Boolean result = + webrtc::H264AnnexBBufferToCMSampleBuffer(annex_b_test_data, + arraysize(annex_b_test_data), + description, + &out_sample_buffer, + memory_pool); XCTAssertTrue(result); - XCTAssertEqual(description, CMSampleBufferGetFormatDescription(out_sample_buffer)); + XCTAssertEqual(description, + CMSampleBufferGetFormatDescription(out_sample_buffer)); char* data_ptr = nullptr; - CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(out_sample_buffer); + CMBlockBufferRef block_buffer = + CMSampleBufferGetDataBuffer(out_sample_buffer); size_t block_buffer_size = CMBlockBufferGetDataLength(block_buffer); CMBlockBufferGetDataPointer(block_buffer, 0, nullptr, nullptr, &data_ptr); XCTAssertEqual(block_buffer_size, arraysize(annex_b_test_data)); - int data_comparison_result = - memcmp(expected_cmsample_data, data_ptr, arraysize(expected_cmsample_data)); + int data_comparison_result = memcmp( + expected_cmsample_data, data_ptr, arraysize(expected_cmsample_data)); XCTAssertEqual(0, data_comparison_result); @@ -277,18 +296,21 @@ static const uint8_t SPS_PPS_BUFFER[] = { rtc::Buffer annexb_buffer(arraysize(cmsample_data)); CMSampleBufferRef sample_buffer = - [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)]; + [self createCMSampleBufferRef:(void*)cmsample_data + cmsampleSize:arraysize(cmsample_data)]; - Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer, - /* is_keyframe */ false, - &annexb_buffer); + Boolean result = + webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer, + /* is_keyframe */ false, + &annexb_buffer); XCTAssertTrue(result); XCTAssertEqual(arraysize(expected_annex_b_data), annexb_buffer.size()); - int data_comparison_result = - memcmp(expected_annex_b_data, annexb_buffer.data(), arraysize(expected_annex_b_data)); + int data_comparison_result = memcmp(expected_annex_b_data, + annexb_buffer.data(), + arraysize(expected_annex_b_data)); XCTAssertEqual(0, data_comparison_result); } @@ -310,18 +332,22 @@ static const uint8_t SPS_PPS_BUFFER[] = { rtc::Buffer annexb_buffer(arraysize(cmsample_data)); CMSampleBufferRef sample_buffer = - [self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)]; + [self createCMSampleBufferRef:(void*)cmsample_data + cmsampleSize:arraysize(cmsample_data)]; - Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer, - /* is_keyframe */ true, - &annexb_buffer); + Boolean result = + webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer, + /* is_keyframe */ true, + &annexb_buffer); XCTAssertTrue(result); XCTAssertEqual(arraysize(SPS_PPS_BUFFER) + arraysize(expected_annex_b_data), annexb_buffer.size()); - XCTAssertEqual(0, memcmp(SPS_PPS_BUFFER, annexb_buffer.data(), arraysize(SPS_PPS_BUFFER))); + XCTAssertEqual( + 0, + memcmp(SPS_PPS_BUFFER, annexb_buffer.data(), arraysize(SPS_PPS_BUFFER))); XCTAssertEqual(0, memcmp(expected_annex_b_data, @@ -331,12 +357,14 @@ static const uint8_t SPS_PPS_BUFFER[] = { - (CMVideoFormatDescriptionRef)createDescription { CMVideoFormatDescriptionRef description = - webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER)); + webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, + arraysize(SPS_PPS_BUFFER)); XCTAssertTrue(description); return description; } -- (CMSampleBufferRef)createCMSampleBufferRef:(void*)cmsampleData cmsampleSize:(size_t)cmsampleSize { +- (CMSampleBufferRef)createCMSampleBufferRef:(void*)cmsampleData + cmsampleSize:(size_t)cmsampleSize { CMSampleBufferRef sample_buffer = nil; OSStatus status; diff --git a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm index 3f1d60f2c8..3a8748e102 100644 --- a/sdk/objc/unittests/objc_video_decoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_decoder_factory_tests.mm @@ -23,7 +23,8 @@ #include "rtc_base/gunit.h" #import "sdk/objc/base/RTCMacros.h" -id CreateDecoderFactoryReturning(int return_code) { +id CreateDecoderFactoryReturning( + int return_code) { id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder))); OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code); OCMStub([decoderMock decode:[OCMArg any] @@ -33,11 +34,14 @@ id CreateDecoderFactoryReturning(int retu .andReturn(return_code); OCMStub([decoderMock releaseDecoder]).andReturn(return_code); - id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory))); + id decoderFactoryMock = + OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory))); RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" + parameters:nil]; OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]); - OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock); + OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]) + .andReturn(decoderMock); return decoderFactoryMock; } @@ -52,7 +56,8 @@ id CreateErrorDecoderFactory() { std::unique_ptr GetObjCDecoder( id factory) { webrtc::ObjCVideoDecoderFactory decoder_factory(factory); - return decoder_factory.Create(webrtc::CreateEnvironment(), webrtc::SdpVideoFormat::H264()); + return decoder_factory.Create(webrtc::CreateEnvironment(), + webrtc::SdpVideoFormat::H264()); } #pragma mark - @@ -63,21 +68,24 @@ std::unique_ptr GetObjCDecoder( @implementation ObjCVideoDecoderFactoryTests - (void)testConfigureReturnsTrueOnSuccess { - std::unique_ptr decoder = GetObjCDecoder(CreateOKDecoderFactory()); + std::unique_ptr decoder = + GetObjCDecoder(CreateOKDecoderFactory()); webrtc::VideoDecoder::Settings settings; EXPECT_TRUE(decoder->Configure(settings)); } - (void)testConfigureReturnsFalseOnFail { - std::unique_ptr decoder = GetObjCDecoder(CreateErrorDecoderFactory()); + std::unique_ptr decoder = + GetObjCDecoder(CreateErrorDecoderFactory()); webrtc::VideoDecoder::Settings settings; EXPECT_FALSE(decoder->Configure(settings)); } - (void)testDecodeReturnsOKOnSuccess { - std::unique_ptr decoder = GetObjCDecoder(CreateOKDecoderFactory()); + std::unique_ptr decoder = + GetObjCDecoder(CreateOKDecoderFactory()); webrtc::EncodedImage encoded_image; encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create()); @@ -86,7 +94,8 @@ std::unique_ptr GetObjCDecoder( } - (void)testDecodeReturnsErrorOnFail { - std::unique_ptr decoder = GetObjCDecoder(CreateErrorDecoderFactory()); + std::unique_ptr decoder = + GetObjCDecoder(CreateErrorDecoderFactory()); webrtc::EncodedImage encoded_image; encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create()); @@ -95,13 +104,15 @@ std::unique_ptr GetObjCDecoder( } - (void)testReleaseDecodeReturnsOKOnSuccess { - std::unique_ptr decoder = GetObjCDecoder(CreateOKDecoderFactory()); + std::unique_ptr decoder = + GetObjCDecoder(CreateOKDecoderFactory()); EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK); } - (void)testReleaseDecodeReturnsErrorOnFail { - std::unique_ptr decoder = GetObjCDecoder(CreateErrorDecoderFactory()); + std::unique_ptr decoder = + GetObjCDecoder(CreateErrorDecoderFactory()); EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_ERROR); } diff --git a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm index 2f469bb4a6..2c574f50e3 100644 --- a/sdk/objc/unittests/objc_video_encoder_factory_tests.mm +++ b/sdk/objc/unittests/objc_video_encoder_factory_tests.mm @@ -26,21 +26,27 @@ #include "rtc_base/gunit.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -id CreateEncoderFactoryReturning(int return_code) { +id CreateEncoderFactoryReturning( + int return_code) { id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder))); OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1]) .andReturn(return_code); - OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]]) + OCMStub([encoderMock encode:[OCMArg any] + codecSpecificInfo:[OCMArg any] + frameTypes:[OCMArg any]]) .andReturn(return_code); OCMStub([encoderMock releaseEncoder]).andReturn(return_code); OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code); - id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory))); - RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil]; + id encoderFactoryMock = + OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory))); + RTC_OBJC_TYPE(RTCVideoCodecInfo) *supported = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" + parameters:nil]; OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]); OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]); - OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock); + OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]) + .andReturn(encoderMock); return encoderFactoryMock; } @@ -52,12 +58,14 @@ id CreateErrorEncoderFactory() { return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR); } -@interface RTCVideoEncoderFactoryFake : NSObject +@interface RTCVideoEncoderFactoryFake + : NSObject - (instancetype)init NS_UNAVAILABLE; - (instancetype)initWithScalabilityMode:(NSString *)scalabilityMode; - (instancetype)initWithScalabilityMode:(NSString *)scalabilityMode - isPowerEfficient:(bool)isPowerEfficient NS_DESIGNATED_INITIALIZER; + isPowerEfficient:(bool)isPowerEfficient + NS_DESIGNATED_INITIALIZER; @end @implementation RTCVideoEncoderFactoryFake @@ -92,10 +100,12 @@ bool _isPowerEfficient; scalabilityMode:(nullable NSString *)scalabilityMode { if (_scalabilityMode ? [_scalabilityMode isEqualToString:scalabilityMode] : scalabilityMode == nil) { - return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc] initWithSupported:true - isPowerEfficient:_isPowerEfficient]; + return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc] + initWithSupported:true + isPowerEfficient:_isPowerEfficient]; } else { - return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc] initWithSupported:false]; + return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc] + initWithSupported:false]; } } @@ -116,31 +126,41 @@ std::unique_ptr GetObjCEncoder( @implementation ObjCVideoEncoderFactoryTests - (void)testInitEncodeReturnsOKOnSuccess { - std::unique_ptr encoder = GetObjCEncoder(CreateOKEncoderFactory()); + std::unique_ptr encoder = + GetObjCEncoder(CreateOKEncoderFactory()); - auto* settings = new webrtc::VideoCodec(); + auto *settings = new webrtc::VideoCodec(); const webrtc::VideoEncoder::Capabilities kCapabilities(false); - EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)), + EXPECT_EQ(encoder->InitEncode( + settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)), WEBRTC_VIDEO_CODEC_OK); } - (void)testInitEncodeReturnsErrorOnFail { - std::unique_ptr encoder = GetObjCEncoder(CreateErrorEncoderFactory()); + std::unique_ptr encoder = + GetObjCEncoder(CreateErrorEncoderFactory()); - auto* settings = new webrtc::VideoCodec(); + auto *settings = new webrtc::VideoCodec(); const webrtc::VideoEncoder::Capabilities kCapabilities(false); - EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)), + EXPECT_EQ(encoder->InitEncode( + settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)), WEBRTC_VIDEO_CODEC_ERROR); } - (void)testEncodeReturnsOKOnSuccess { - std::unique_ptr encoder = GetObjCEncoder(CreateOKEncoderFactory()); + std::unique_ptr encoder = + GetObjCEncoder(CreateOKEncoderFactory()); CVPixelBufferRef pixel_buffer; - CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); + CVPixelBufferCreate(kCFAllocatorDefault, + 640, + 480, + kCVPixelFormatType_32ARGB, + nil, + &pixel_buffer); rtc::scoped_refptr buffer = - rtc::make_ref_counted( - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); + rtc::make_ref_counted([[RTC_OBJC_TYPE( + RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) @@ -152,13 +172,19 @@ std::unique_ptr GetObjCEncoder( } - (void)testEncodeReturnsErrorOnFail { - std::unique_ptr encoder = GetObjCEncoder(CreateErrorEncoderFactory()); + std::unique_ptr encoder = + GetObjCEncoder(CreateErrorEncoderFactory()); CVPixelBufferRef pixel_buffer; - CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer); + CVPixelBufferCreate(kCFAllocatorDefault, + 640, + 480, + kCVPixelFormatType_32ARGB, + nil, + &pixel_buffer); rtc::scoped_refptr buffer = - rtc::make_ref_counted( - [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); + rtc::make_ref_counted([[RTC_OBJC_TYPE( + RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]); webrtc::VideoFrame frame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_rotation(webrtc::kVideoRotation_0) @@ -170,19 +196,22 @@ std::unique_ptr GetObjCEncoder( } - (void)testReleaseEncodeReturnsOKOnSuccess { - std::unique_ptr encoder = GetObjCEncoder(CreateOKEncoderFactory()); + std::unique_ptr encoder = + GetObjCEncoder(CreateOKEncoderFactory()); EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK); } - (void)testReleaseEncodeReturnsErrorOnFail { - std::unique_ptr encoder = GetObjCEncoder(CreateErrorEncoderFactory()); + std::unique_ptr encoder = + GetObjCEncoder(CreateErrorEncoderFactory()); EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_ERROR); } - (void)testQueryCodecSupportDelegatesToObjcFactoryConvertsNulloptModeToNil { - id fakeEncoderFactory = [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:nil]; + id fakeEncoderFactory = + [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:nil]; webrtc::SdpVideoFormat codec("VP8"); webrtc::ObjCVideoEncoderFactory encoder_factory(fakeEncoderFactory); @@ -193,7 +222,8 @@ std::unique_ptr GetObjCEncoder( } - (void)testQueryCodecSupportDelegatesToObjcFactoryMayReturnUnsupported { - id fakeEncoderFactory = [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2"]; + id fakeEncoderFactory = + [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2"]; webrtc::SdpVideoFormat codec("VP8"); webrtc::ObjCVideoEncoderFactory encoder_factory(fakeEncoderFactory); @@ -201,26 +231,30 @@ std::unique_ptr GetObjCEncoder( } - (void)testQueryCodecSupportDelegatesToObjcFactoryIncludesPowerEfficientFlag { - id fakeEncoderFactory = [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2" - isPowerEfficient:true]; + id fakeEncoderFactory = + [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2" + isPowerEfficient:true]; webrtc::SdpVideoFormat codec("VP8"); webrtc::ObjCVideoEncoderFactory encoder_factory(fakeEncoderFactory); - webrtc::VideoEncoderFactory::CodecSupport s = encoder_factory.QueryCodecSupport(codec, "L1T2"); + webrtc::VideoEncoderFactory::CodecSupport s = + encoder_factory.QueryCodecSupport(codec, "L1T2"); EXPECT_TRUE(s.is_supported); EXPECT_TRUE(s.is_power_efficient); } - (void)testGetSupportedFormats { webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory()); - std::vector supportedFormats = encoder_factory.GetSupportedFormats(); + std::vector supportedFormats = + encoder_factory.GetSupportedFormats(); EXPECT_EQ(supportedFormats.size(), 1u); EXPECT_EQ(supportedFormats[0].name, "H264"); } - (void)testGetImplementations { webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory()); - std::vector supportedFormats = encoder_factory.GetImplementations(); + std::vector supportedFormats = + encoder_factory.GetImplementations(); EXPECT_EQ(supportedFormats.size(), 1u); EXPECT_EQ(supportedFormats[0].name, "H264"); } diff --git a/sdk/objc/unittests/scoped_cftyperef_tests.mm b/sdk/objc/unittests/scoped_cftyperef_tests.mm index a354410ede..c8057ddde3 100644 --- a/sdk/objc/unittests/scoped_cftyperef_tests.mm +++ b/sdk/objc/unittests/scoped_cftyperef_tests.mm @@ -33,7 +33,8 @@ struct TestTypeTraits { }; } // namespace -using ScopedTestType = rtc::internal::ScopedTypeRef; +using ScopedTestType = + rtc::internal::ScopedTypeRef; // In these tests we sometime introduce variables just to // observe side-effects. Ignore the compilers complaints.