Format the rest of /sdk/objc folder

I already submitted separate CLs for /api and /components, here I'm
doing the rest of this folder

Format /sdk/objc/api folder

There are a lot of changes in /sdk so I'm splitting it

Formatting done via:

git ls-files | grep -E '^sdk\/objc\/.*\.(h|cc|mm)' | grep -Ev '^sdk\/objc\/(api|components).*' | xargs clang-format -i

No-Iwyu: Includes didn't change and it isn't related to formatting
Bug: webrtc:42225392
Change-Id: I8bb63f6696b2649ea642b900b9d9827980cc8315
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/373884
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Commit-Queue: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43696}
This commit is contained in:
Boris Tsirkin 2025-01-08 05:55:05 -08:00 committed by WebRTC LUCI CQ
parent c447f3c52e
commit 7b6c887ffd
60 changed files with 2602 additions and 1854 deletions

View File

@ -47,13 +47,17 @@ RTC_EXTERN NSString* RTCFileName(const char* filePath);
RTCLogEx(severity, log_string); \
} while (false)
#define RTCLogVerbose(format, ...) RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__)
#define RTCLogVerbose(format, ...) \
RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__)
#define RTCLogInfo(format, ...) RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__)
#define RTCLogInfo(format, ...) \
RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__)
#define RTCLogWarning(format, ...) RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__)
#define RTCLogWarning(format, ...) \
RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__)
#define RTCLogError(format, ...) RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__)
#define RTCLogError(format, ...) \
RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__)
#if !defined(NDEBUG)
#define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__)

View File

@ -49,7 +49,8 @@
// Macro used internally to declare API types. Declaring an API type without
// using this macro will not include the declared type in the set of types
// that will be affected by the configurable RTC_OBJC_TYPE_PREFIX.
#define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name)
#define RTC_OBJC_TYPE(type_name) \
RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name)
#if defined(__cplusplus)
#define RTC_EXTERN extern "C" RTC_OBJC_EXPORT

View File

@ -18,6 +18,7 @@ NS_ASSUME_NONNULL_BEGIN
/** Extension of the I420 buffer with mutable data access */
RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCMutableI420Buffer)<RTC_OBJC_TYPE(RTCI420Buffer), RTC_OBJC_TYPE(RTCMutableYUVPlanarBuffer)> @end
(RTCMutableI420Buffer)<RTC_OBJC_TYPE(RTCI420Buffer),
RTC_OBJC_TYPE(RTCMutableYUVPlanarBuffer)> @end
NS_ASSUME_NONNULL_END

View File

@ -19,7 +19,8 @@ NS_ASSUME_NONNULL_BEGIN
RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCVideoCapturerDelegate)<NSObject> -
(void)capturer : (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame
(void)capturer
: (RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer didCaptureVideoFrame
: (RTC_OBJC_TYPE(RTCVideoFrame) *)frame;
@end
@ -28,7 +29,8 @@ RTC_OBJC_EXPORT
@property(nonatomic, weak) id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)> delegate;
- (instancetype)initWithDelegate:(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate;
- (instancetype)initWithDelegate:
(id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate;
@end

View File

@ -14,7 +14,8 @@
NS_ASSUME_NONNULL_BEGIN
/** Holds information to identify a codec. Corresponds to webrtc::SdpVideoFormat. */
/** Holds information to identify a codec. Corresponds to
* webrtc::SdpVideoFormat. */
RTC_OBJC_EXPORT
@interface RTC_OBJC_TYPE (RTCVideoCodecInfo) : NSObject <NSCoding>
@ -23,11 +24,13 @@ RTC_OBJC_EXPORT
- (instancetype)initWithName:(NSString *)name;
- (instancetype)initWithName:(NSString *)name
parameters:(nullable NSDictionary<NSString *, NSString *> *)parameters;
parameters:(nullable NSDictionary<NSString *, NSString *> *)
parameters;
- (instancetype)initWithName:(NSString *)name
parameters:(NSDictionary<NSString *, NSString *> *)parameters
scalabilityModes:(NSArray<NSString *> *)scalabilityModes NS_DESIGNATED_INITIALIZER;
scalabilityModes:(NSArray<NSString *> *)scalabilityModes
NS_DESIGNATED_INITIALIZER;
- (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;

View File

@ -16,7 +16,8 @@
NS_ASSUME_NONNULL_BEGIN
/** RTCVideoDecoderFactory is an Objective-C version of webrtc::VideoDecoderFactory.
/** RTCVideoDecoderFactory is an Objective-C version of
* webrtc::VideoDecoderFactory.
*/
RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE

View File

@ -20,8 +20,9 @@
NS_ASSUME_NONNULL_BEGIN
/** Callback block for encoder. */
typedef BOOL (^RTCVideoEncoderCallback)(RTC_OBJC_TYPE(RTCEncodedImage) * frame,
id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> info);
typedef BOOL (^RTCVideoEncoderCallback)(
RTC_OBJC_TYPE(RTCEncodedImage) * frame,
id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> info);
/** Protocol for encoder implementations. */
RTC_OBJC_EXPORT
@ -29,7 +30,8 @@ RTC_OBJC_EXPORT
(RTCVideoEncoder)<NSObject>
- (void)setCallback : (nullable RTCVideoEncoderCallback)callback;
- (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
- (NSInteger)startEncodeWithSettings:
(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings
numberOfCores:(int)numberOfCores;
- (NSInteger)releaseEncoder;
- (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame
@ -38,20 +40,21 @@ RTC_OBJC_EXPORT
- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
- (NSString *)implementationName;
/** Returns QP scaling settings for encoder. The quality scaler adjusts the resolution in order to
* keep the QP from the encoded images within the given range. Returning nil from this function
* disables quality scaling. */
/** Returns QP scaling settings for encoder. The quality scaler adjusts the
* resolution in order to keep the QP from the encoded images within the given
* range. Returning nil from this function disables quality scaling. */
- (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings;
/** Resolutions should be aligned to this value. */
@property(nonatomic, readonly) NSInteger resolutionAlignment;
/** If enabled, resolution alignment is applied to all simulcast layers simultaneously so that when
scaled, all resolutions comply with 'resolutionAlignment'. */
/** If enabled, resolution alignment is applied to all simulcast layers
simultaneously so that when scaled, all resolutions comply with
'resolutionAlignment'. */
@property(nonatomic, readonly) BOOL applyAlignmentToAllSimulcastLayers;
/** If YES, the receiver is expected to resample/scale the source texture to the expected output
size. */
/** If YES, the receiver is expected to resample/scale the source texture to the
expected output size. */
@property(nonatomic, readonly) BOOL supportsNativeHandle;
@end

View File

@ -23,12 +23,15 @@ RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCVideoEncoderSelector)<NSObject>
- (void)registerCurrentEncoderInfo : (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:(NSInteger)bitrate;
- (void)registerCurrentEncoderInfo
: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info;
- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBitrate:
(NSInteger)bitrate;
- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForBrokenEncoder;
@optional
- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForResolutionChangeBySize:(CGSize)size;
- (nullable RTC_OBJC_TYPE(RTCVideoCodecInfo) *)encoderForResolutionChangeBySize:
(CGSize)size;
@end
@ -41,14 +44,16 @@ RTC_OBJC_EXPORT
- (instancetype)initWithSupported:(bool)isSupported;
- (instancetype)initWithSupported:(bool)isSupported
isPowerEfficient:(bool)isPowerEfficient NS_DESIGNATED_INITIALIZER;
isPowerEfficient:(bool)isPowerEfficient
NS_DESIGNATED_INITIALIZER;
@property(nonatomic, readonly) bool isSupported;
@property(nonatomic, readonly) bool isPowerEfficient;
@end
/** RTCVideoEncoderFactory is an Objective-C version of webrtc::VideoEncoderFactory.
/** RTCVideoEncoderFactory is an Objective-C version of
* webrtc::VideoEncoderFactory.
*/
RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
@ -62,7 +67,8 @@ RTC_OBJC_EXPORT
@optional
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)implementations;
- (nullable id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)>)encoderSelector;
/* TODO: b/299588022 - move to non-optional section when implemented by all derived classes. */
/* TODO: b/299588022 - move to non-optional section when implemented by all
* derived classes. */
- (RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) *)
queryCodecSupport:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info
scalabilityMode:(nullable NSString *)scalabilityMode;

View File

@ -20,7 +20,8 @@
return [self initWithSupported:isSupported isPowerEfficient:false];
}
- (instancetype)initWithSupported:(bool)isSupported isPowerEfficient:(bool)isPowerEfficient {
- (instancetype)initWithSupported:(bool)isSupported
isPowerEfficient:(bool)isPowerEfficient {
self = [super init];
if (self) {
_isSupported = isSupported;

View File

@ -14,7 +14,8 @@
NS_ASSUME_NONNULL_BEGIN
/** QP thresholds for encoder. Corresponds to webrtc::VideoEncoder::QpThresholds. */
/** QP thresholds for encoder. Corresponds to
* webrtc::VideoEncoder::QpThresholds. */
RTC_OBJC_EXPORT
@interface RTC_OBJC_TYPE (RTCVideoEncoderQpThresholds) : NSObject

View File

@ -49,7 +49,8 @@ RTC_OBJC_EXPORT
/** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp.
*/
- (instancetype)initWithBuffer:(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)frameBuffer
- (instancetype)initWithBuffer:
(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)>)frameBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs;

View File

@ -35,7 +35,8 @@ RTC_OBJC_EXPORT
@protocol RTC_OBJC_TYPE
(RTCVideoViewDelegate)
- (void)videoView : (id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize
- (void)videoView
: (id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize
: (CGSize)size;
@end

View File

@ -16,7 +16,8 @@ NS_ASSUME_NONNULL_BEGIN
@interface AVCaptureSession (DevicePosition)
// Check the image's EXIF for the camera the image came from.
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:
(CMSampleBufferRef)sampleBuffer;
@end

View File

@ -20,7 +20,8 @@ BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
@implementation AVCaptureSession (DevicePosition)
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:(CMSampleBufferRef)sampleBuffer {
+ (AVCaptureDevicePosition)devicePositionForSampleBuffer:
(CMSampleBufferRef)sampleBuffer {
// Check the image's EXIF for the camera the image came from.
AVCaptureDevicePosition cameraPosition = AVCaptureDevicePositionUnspecified;
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(
@ -29,8 +30,9 @@ BOOL CFStringContainsString(CFStringRef theString, CFStringRef stringToFind) {
int size = CFDictionaryGetCount(attachments);
if (size > 0) {
CFDictionaryRef cfExifDictVal = nil;
if (CFDictionaryGetValueIfPresent(
attachments, (const void *)CFSTR("{Exif}"), (const void **)&cfExifDictVal)) {
if (CFDictionaryGetValueIfPresent(attachments,
(const void *)CFSTR("{Exif}"),
(const void **)&cfExifDictVal)) {
CFStringRef cfLensModelStrVal;
if (CFDictionaryGetValueIfPresent(cfExifDictVal,
(const void *)CFSTR("LensModel"),

View File

@ -24,7 +24,7 @@
charData.length);
}
+ (NSString *)stringForStdString:(const std::string&)stdString {
+ (NSString *)stringForStdString:(const std::string &)stdString {
// std::string may contain null termination character so we construct
// using length.
return [[NSString alloc] initWithBytes:stdString.data()

View File

@ -13,6 +13,7 @@
@interface RTC_OBJC_TYPE (RTCDispatcher)
()
+ (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType;
+ (dispatch_queue_t)dispatchQueueForType
: (RTCDispatcherQueueType)dispatchType;
@end

View File

@ -36,7 +36,8 @@ RTC_OBJC_EXPORT
* @param dispatchType The queue type to dispatch on.
* @param block The block to dispatch asynchronously.
*/
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType block:(dispatch_block_t)block;
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
block:(dispatch_block_t)block;
/** Returns YES if run on queue for the dispatchType otherwise NO.
* Useful for asserting that a method is run on a correct queue.

View File

@ -17,24 +17,30 @@
namespace webrtc {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(bool bypass_voice_processing) {
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
bool bypass_voice_processing) {
RTC_DLOG(LS_INFO) << __FUNCTION__;
#if defined(WEBRTC_IOS)
return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing, nullptr);
return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(
bypass_voice_processing, nullptr);
#else
RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
RTC_LOG(LS_ERROR)
<< "current platform is not supported => this module will self destruct!";
return nullptr;
#endif
}
rtc::scoped_refptr<AudioDeviceModule> CreateMutedDetectAudioDeviceModule(
AudioDeviceModule::MutedSpeechEventHandler handler, bool bypass_voice_processing) {
AudioDeviceModule::MutedSpeechEventHandler handler,
bool bypass_voice_processing) {
RTC_DLOG(LS_INFO) << __FUNCTION__;
#if defined(WEBRTC_IOS)
return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(bypass_voice_processing, handler);
return rtc::make_ref_counted<ios_adm::AudioDeviceModuleIOS>(
bypass_voice_processing, handler);
#else
RTC_LOG(LS_ERROR) << "current platform is not supported => this module will self destruct!";
RTC_LOG(LS_ERROR)
<< "current platform is not supported => this module will self destruct!";
return nullptr;
#endif
}
}
} // namespace webrtc

View File

@ -27,4 +27,4 @@ std::unique_ptr<rtc::NetworkMonitorFactory> CreateNetworkMonitorFactory() {
#endif
}
}
} // namespace webrtc

View File

@ -26,8 +26,9 @@ class SSLCertificateVerifierAdapter final : public rtc::SSLCertificateVerifier {
@autoreleasepool {
rtc::Buffer der_buffer;
certificate.ToDER(&der_buffer);
NSData* serialized_certificate = [[NSData alloc] initWithBytes:der_buffer.data()
length:der_buffer.size()];
NSData* serialized_certificate =
[[NSData alloc] initWithBytes:der_buffer.data()
length:der_buffer.size()];
return [objc_certificate_verifier_ verify:serialized_certificate];
}
}
@ -36,13 +37,14 @@ class SSLCertificateVerifierAdapter final : public rtc::SSLCertificateVerifier {
id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier_;
};
}
} // namespace
namespace webrtc {
std::unique_ptr<rtc::SSLCertificateVerifier> ObjCToNativeCertificateVerifier(
id<RTC_OBJC_TYPE(RTCSSLCertificateVerifier)> objc_certificate_verifier) {
return std::make_unique<SSLCertificateVerifierAdapter>(objc_certificate_verifier);
return std::make_unique<SSLCertificateVerifierAdapter>(
objc_certificate_verifier);
}
} // namespace webrtc

View File

@ -123,7 +123,8 @@ AudioDeviceIOS::AudioDeviceIOS(
io_thread_checker_.Detach();
thread_ = rtc::Thread::Current();
audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this];
audio_session_observer_ =
[[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this];
mach_timebase_info_data_t tinfo;
mach_timebase_info(&tinfo);
machTickUnitsToNanoseconds_ = (double)tinfo.numer / tinfo.denom;
@ -240,12 +241,15 @@ int32_t AudioDeviceIOS::StartPlayout() {
if (fine_audio_buffer_) {
fine_audio_buffer_->ResetPlayout();
}
if (!recording_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
if (!recording_.load() &&
audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
OSStatus result = audio_unit_->Start();
if (result != noErr) {
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session notifyAudioUnitStartFailedWithError:result];
RTCLogError(@"StartPlayout failed to start audio unit, reason %d", result);
RTCLogError(@"StartPlayout failed to start audio unit, reason %d",
result);
return -1;
}
RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
@ -279,8 +283,9 @@ int32_t AudioDeviceIOS::StopPlayout() {
average_number_of_playout_callbacks_between_glitches =
num_playout_callbacks_ / num_detected_playout_glitches;
}
RTC_HISTOGRAM_COUNTS_100000("WebRTC.Audio.AveragePlayoutCallbacksBetweenGlitches",
average_number_of_playout_callbacks_between_glitches);
RTC_HISTOGRAM_COUNTS_100000(
"WebRTC.Audio.AveragePlayoutCallbacksBetweenGlitches",
average_number_of_playout_callbacks_between_glitches);
RTCLog(@"Average number of playout callbacks between glitches: %d",
average_number_of_playout_callbacks_between_glitches);
return 0;
@ -299,12 +304,15 @@ int32_t AudioDeviceIOS::StartRecording() {
if (fine_audio_buffer_) {
fine_audio_buffer_->ResetRecord();
}
if (!playing_.load() && audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
if (!playing_.load() &&
audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) {
OSStatus result = audio_unit_->Start();
if (result != noErr) {
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session notifyAudioUnitStartFailedWithError:result];
RTCLogError(@"StartRecording failed to start audio unit, reason %d", result);
RTCLogError(@"StartRecording failed to start audio unit, reason %d",
result);
return -1;
}
RTC_LOG(LS_INFO) << "Voice-Processing I/O audio unit is now started";
@ -372,8 +380,9 @@ void AudioDeviceIOS::OnValidRouteChange() {
void AudioDeviceIOS::OnCanPlayOrRecordChange(bool can_play_or_record) {
RTC_DCHECK(thread_);
thread_->PostTask(SafeTask(
safety_, [this, can_play_or_record] { HandleCanPlayOrRecordChange(can_play_or_record); }));
thread_->PostTask(SafeTask(safety_, [this, can_play_or_record] {
HandleCanPlayOrRecordChange(can_play_or_record);
}));
}
void AudioDeviceIOS::OnChangedOutputVolume() {
@ -381,11 +390,12 @@ void AudioDeviceIOS::OnChangedOutputVolume() {
thread_->PostTask(SafeTask(safety_, [this] { HandleOutputVolumeChange(); }));
}
OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
UInt32 bus_number,
UInt32 num_frames,
AudioBufferList* /* io_data */) {
OSStatus AudioDeviceIOS::OnDeliverRecordedData(
AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
UInt32 bus_number,
UInt32 num_frames,
AudioBufferList* /* io_data */) {
RTC_DCHECK_RUN_ON(&io_thread_checker_);
OSStatus result = noErr;
// Simply return if recording is not enabled.
@ -400,7 +410,8 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags
// Get audio timestamp for the audio.
// The timestamp will not have NTP time epoch, but that will be addressed by
// the TimeStampAligner in AudioDeviceBuffer::SetRecordedBuffer().
SInt64 capture_timestamp_ns = time_stamp->mHostTime * machTickUnitsToNanoseconds_;
SInt64 capture_timestamp_ns =
time_stamp->mHostTime * machTickUnitsToNanoseconds_;
// Allocate AudioBuffers to be used as storage for the received audio.
// The AudioBufferList structure works as a placeholder for the
@ -421,7 +432,8 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags
// We can make the audio unit provide a buffer instead in io_data, but we
// currently just use our own.
// TODO(henrika): should error handling be improved?
result = audio_unit_->Render(flags, time_stamp, bus_number, num_frames, &audio_buffer_list);
result = audio_unit_->Render(
flags, time_stamp, bus_number, num_frames, &audio_buffer_list);
if (result != noErr) {
RTCLogError(@"Failed to render audio.");
return result;
@ -450,7 +462,8 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
// activated.
if (!playing_.load(std::memory_order_acquire)) {
const size_t size_in_bytes = audio_buffer->mDataByteSize;
RTC_CHECK_EQ(size_in_bytes / VoiceProcessingAudioUnit::kBytesPerSample, num_frames);
RTC_CHECK_EQ(size_in_bytes / VoiceProcessingAudioUnit::kBytesPerSample,
num_frames);
*flags |= kAudioUnitRenderAction_OutputIsSilence;
memset(static_cast<int8_t*>(audio_buffer->mData), 0, size_in_bytes);
return noErr;
@ -464,7 +477,8 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
const int64_t now_time = rtc::TimeMillis();
if (time_stamp->mSampleTime != num_frames) {
const int64_t delta_time = now_time - last_playout_time_;
const int glitch_threshold = 1.6 * playout_parameters_.GetBufferSizeInMilliseconds();
const int glitch_threshold =
1.6 * playout_parameters_.GetBufferSizeInMilliseconds();
if (delta_time > glitch_threshold) {
RTCLogWarning(@"Possible playout audio glitch detected.\n"
" Time since last OnGetPlayoutData was %lld ms.\n",
@ -493,9 +507,11 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
if (last_hw_output_latency_update_sample_count_ >=
playout_parameters_.sample_rate() * kHwLatencyUpdatePeriodSeconds) {
// We update the hardware output latency every kHwLatencyUpdatePeriodSeconds seconds.
hw_output_latency_.store([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].outputLatency,
std::memory_order_relaxed);
// We update the hardware output latency every kHwLatencyUpdatePeriodSeconds
// seconds.
hw_output_latency_.store(
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].outputLatency,
std::memory_order_relaxed);
last_hw_output_latency_update_sample_count_ = 0;
}
double output_latency_ = hw_output_latency_.load(std::memory_order_relaxed) +
@ -505,19 +521,22 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
// the native I/O audio unit) and copy the result to the audio buffer in the
// `io_data` destination.
fine_audio_buffer_->GetPlayoutData(
rtc::ArrayView<int16_t>(static_cast<int16_t*>(audio_buffer->mData), num_frames),
rtc::ArrayView<int16_t>(static_cast<int16_t*>(audio_buffer->mData),
num_frames),
playout_delay_ms);
last_hw_output_latency_update_sample_count_ += num_frames;
total_playout_samples_count_.fetch_add(num_frames, std::memory_order_relaxed);
total_playout_samples_duration_ms_.fetch_add(
num_frames * 1000 / playout_parameters_.sample_rate(), std::memory_order_relaxed);
num_frames * 1000 / playout_parameters_.sample_rate(),
std::memory_order_relaxed);
total_playout_delay_ms_.fetch_add(output_latency_ * kSecondToMs * num_frames,
std::memory_order_relaxed);
return noErr;
}
void AudioDeviceIOS::OnReceivedMutedSpeechActivity(AUVoiceIOSpeechActivityEvent event) {
void AudioDeviceIOS::OnReceivedMutedSpeechActivity(
AUVoiceIOSpeechActivityEvent event) {
RTCLog(@"Received muted speech activity %d.", event);
if (muted_speech_event_handler_ != 0) {
if (event == kAUVoiceIOSpeechActivityHasStarted) {
@ -530,8 +549,10 @@ void AudioDeviceIOS::OnReceivedMutedSpeechActivity(AUVoiceIOSpeechActivityEvent
void AudioDeviceIOS::HandleInterruptionBegin() {
RTC_DCHECK_RUN_ON(thread_);
RTCLog(@"Interruption begin. IsInterrupted changed from %d to 1.", is_interrupted_);
if (audio_unit_ && audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
RTCLog(@"Interruption begin. IsInterrupted changed from %d to 1.",
is_interrupted_);
if (audio_unit_ &&
audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
RTCLog(@"Stopping the audio unit due to interruption begin.");
if (!audio_unit_->Stop()) {
RTCLogError(@"Failed to stop the audio unit for interruption begin.");
@ -549,8 +570,8 @@ void AudioDeviceIOS::HandleInterruptionEnd() {
is_interrupted_ = false;
if (!audio_unit_) return;
if (webrtc::field_trial::IsEnabled("WebRTC-Audio-iOS-Holding")) {
// Work around an issue where audio does not restart properly after an interruption
// by restarting the audio unit when the interruption ends.
// Work around an issue where audio does not restart properly after an
// interruption by restarting the audio unit when the interruption ends.
if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
audio_unit_->Stop();
PrepareForNewStart();
@ -561,12 +582,14 @@ void AudioDeviceIOS::HandleInterruptionEnd() {
// Allocate new buffers given the potentially new stream format.
SetupAudioBuffersForActiveAudioSession();
}
UpdateAudioUnit([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord);
UpdateAudioUnit(
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance].canPlayOrRecord);
}
void AudioDeviceIOS::HandleValidRouteChange() {
RTC_DCHECK_RUN_ON(thread_);
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTCLog(@"%@", session);
HandleSampleRateChange();
}
@ -588,19 +611,22 @@ void AudioDeviceIOS::HandleSampleRateChange() {
// If we don't have an audio unit yet, or the audio unit is uninitialized,
// there is no work to do.
if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) {
if (!audio_unit_ ||
audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) {
return;
}
// The audio unit is already initialized or started.
// Check to see if the sample rate or buffer size has changed.
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
const double new_sample_rate = session.sampleRate;
const NSTimeInterval session_buffer_duration = session.IOBufferDuration;
const size_t new_frames_per_buffer =
static_cast<size_t>(new_sample_rate * session_buffer_duration + .5);
const double current_sample_rate = playout_parameters_.sample_rate();
const size_t current_frames_per_buffer = playout_parameters_.frames_per_buffer();
const size_t current_frames_per_buffer =
playout_parameters_.frames_per_buffer();
RTCLog(@"Handling playout sample rate change:\n"
" Session sample rate: %f frames_per_buffer: %lu\n"
" ADM sample rate: %f frames_per_buffer: %lu",
@ -652,7 +678,8 @@ void AudioDeviceIOS::HandleSampleRateChange() {
if (restart_audio_unit) {
OSStatus result = audio_unit_->Start();
if (result != noErr) {
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session notifyAudioUnitStartFailedWithError:result];
RTCLogError(@"Failed to start audio unit with sample rate: %d, reason %d",
playout_parameters_.sample_rate(),
@ -679,12 +706,15 @@ void AudioDeviceIOS::HandlePlayoutGlitchDetected(uint64_t glitch_duration_ms) {
return;
}
num_detected_playout_glitches_++;
total_playout_glitches_duration_ms_.fetch_add(glitch_duration_ms, std::memory_order_relaxed);
uint64_t glitch_count = num_detected_playout_glitches_.load(std::memory_order_acquire);
total_playout_glitches_duration_ms_.fetch_add(glitch_duration_ms,
std::memory_order_relaxed);
uint64_t glitch_count =
num_detected_playout_glitches_.load(std::memory_order_acquire);
RTCLog(@"Number of detected playout glitches: %lld", glitch_count);
dispatch_async(dispatch_get_main_queue(), ^{
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session notifyDidDetectPlayoutGlitch:glitch_count];
});
}
@ -709,14 +739,16 @@ void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
// Inform the audio device buffer (ADB) about the new audio format.
audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate());
audio_device_buffer_->SetRecordingSampleRate(
record_parameters_.sample_rate());
audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
}
void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
LOGI() << "SetupAudioBuffersForActiveAudioSession";
// Verify the current values once the audio session has been activated.
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
double sample_rate = session.sampleRate;
NSTimeInterval io_buffer_duration = session.IOBufferDuration;
RTCLog(@"%@", session);
@ -738,7 +770,8 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
if (sample_rate <= DBL_EPSILON && playout_parameters_.sample_rate() > 0) {
RTCLogError(@"Reported rate is invalid: %f. "
"Using %d as sample rate instead.",
sample_rate, playout_parameters_.sample_rate());
sample_rate,
playout_parameters_.sample_rate());
sample_rate = playout_parameters_.sample_rate();
}
@ -747,13 +780,18 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
// number of audio frames.
// Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz.
// Hence, 128 is the size we expect to see in upcoming render callbacks.
playout_parameters_.reset(sample_rate, playout_parameters_.channels(), io_buffer_duration);
playout_parameters_.reset(
sample_rate, playout_parameters_.channels(), io_buffer_duration);
RTC_DCHECK(playout_parameters_.is_complete());
record_parameters_.reset(sample_rate, record_parameters_.channels(), io_buffer_duration);
record_parameters_.reset(
sample_rate, record_parameters_.channels(), io_buffer_duration);
RTC_DCHECK(record_parameters_.is_complete());
RTC_LOG(LS_INFO) << " frames per I/O buffer: " << playout_parameters_.frames_per_buffer();
RTC_LOG(LS_INFO) << " bytes per I/O buffer: " << playout_parameters_.GetBytesPerBuffer();
RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(), record_parameters_.GetBytesPerBuffer());
RTC_LOG(LS_INFO) << " frames per I/O buffer: "
<< playout_parameters_.frames_per_buffer();
RTC_LOG(LS_INFO) << " bytes per I/O buffer: "
<< playout_parameters_.GetBytesPerBuffer();
RTC_DCHECK_EQ(playout_parameters_.GetBytesPerBuffer(),
record_parameters_.GetBytesPerBuffer());
// Update the ADB parameters since the sample rate might have changed.
UpdateAudioDeviceBuffer();
@ -768,8 +806,8 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
bool AudioDeviceIOS::CreateAudioUnit() {
RTC_DCHECK(!audio_unit_);
BOOL detect_mute_speech_ = (muted_speech_event_handler_ != 0);
audio_unit_.reset(
new VoiceProcessingAudioUnit(bypass_voice_processing_, detect_mute_speech_, this));
audio_unit_.reset(new VoiceProcessingAudioUnit(
bypass_voice_processing_, detect_mute_speech_, this));
if (!audio_unit_->Init()) {
audio_unit_.reset();
return false;
@ -809,12 +847,13 @@ void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
case VoiceProcessingAudioUnit::kUninitialized:
RTCLog(@"VPAU state: Uninitialized");
should_initialize_audio_unit = can_play_or_record;
should_start_audio_unit =
should_initialize_audio_unit && (playing_.load() || recording_.load());
should_start_audio_unit = should_initialize_audio_unit &&
(playing_.load() || recording_.load());
break;
case VoiceProcessingAudioUnit::kInitialized:
RTCLog(@"VPAU state: Initialized");
should_start_audio_unit = can_play_or_record && (playing_.load() || recording_.load());
should_start_audio_unit =
can_play_or_record && (playing_.load() || recording_.load());
should_uninitialize_audio_unit = !can_play_or_record;
break;
case VoiceProcessingAudioUnit::kStarted:
@ -838,7 +877,8 @@ void AudioDeviceIOS::UpdateAudioUnit(bool can_play_or_record) {
if (should_start_audio_unit) {
RTCLog(@"Starting audio unit for UpdateAudioUnit");
// Log session settings before trying to start audio streaming.
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTCLog(@"%@", session);
OSStatus result = audio_unit_->Start();
if (result != noErr) {
@ -872,7 +912,8 @@ bool AudioDeviceIOS::ConfigureAudioSession() {
RTCLogWarning(@"Audio session already configured.");
return false;
}
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
bool success = [session configureWebRTCSession:nil];
[session unlockForConfiguration];
@ -892,7 +933,8 @@ bool AudioDeviceIOS::ConfigureAudioSessionLocked() {
RTCLogWarning(@"Audio session already configured.");
return false;
}
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
bool success = [session configureWebRTCSession:nil];
if (success) {
has_configured_session_ = true;
@ -910,7 +952,8 @@ void AudioDeviceIOS::UnconfigureAudioSession() {
RTCLogWarning(@"Audio session already unconfigured.");
return;
}
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
[session unconfigureWebRTCSession:nil];
[session endWebRTCSession:nil];
@ -928,7 +971,8 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
return false;
}
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
// Subscribe to audio session events.
[session pushDelegate:audio_session_observer_];
is_interrupted_ = session.isInterrupted ? true : false;
@ -938,7 +982,8 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
NSError* error = nil;
if (![session beginWebRTCSession:&error]) {
[session unlockForConfiguration];
RTCLogError(@"Failed to begin WebRTC session: %@", error.localizedDescription);
RTCLogError(@"Failed to begin WebRTC session: %@",
error.localizedDescription);
audio_unit_.reset();
return false;
}
@ -978,7 +1023,8 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
io_thread_checker_.Detach();
// Remove audio session notification observers.
RTC_OBJC_TYPE(RTCAudioSession)* session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession)* session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session removeDelegate:audio_session_observer_];
// All I/O should be stopped or paused prior to deactivating the audio
@ -1001,7 +1047,8 @@ bool AudioDeviceIOS::IsInterrupted() {
#pragma mark - Not Implemented
int32_t AudioDeviceIOS::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const {
int32_t AudioDeviceIOS::ActiveAudioLayer(
AudioDeviceModule::AudioLayer& audioLayer) const {
audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
return 0;
}
@ -1091,7 +1138,8 @@ int32_t AudioDeviceIOS::MicrophoneMuteIsAvailable(bool& available) {
int32_t AudioDeviceIOS::SetMicrophoneMute(bool enable) {
// Set microphone mute only if the audio unit is started.
if (audio_unit_ && audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
if (audio_unit_ &&
audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) {
BOOL result = audio_unit_->SetMicrophoneMute(enable);
if (!result) {
RTCLogError(@"Set microphone %s failed.", enable ? "mute" : "unmute");
@ -1180,7 +1228,8 @@ int32_t AudioDeviceIOS::SetRecordingDevice(uint16_t index) {
return 0;
}
int32_t AudioDeviceIOS::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType) {
int32_t AudioDeviceIOS::SetRecordingDevice(
AudioDeviceModule::WindowsDeviceType) {
RTC_DCHECK_NOTREACHED() << "Not implemented";
return -1;
}
@ -1196,14 +1245,16 @@ int32_t AudioDeviceIOS::RecordingIsAvailable(bool& available) {
}
std::optional<AudioDeviceModule::Stats> AudioDeviceIOS::GetStats() const {
const uint64_t total_samples_count = total_playout_samples_count_.load(std::memory_order_acquire);
const uint64_t total_samples_count =
total_playout_samples_count_.load(std::memory_order_acquire);
AudioDeviceModule::Stats playout_stats = {
.synthesized_samples_duration_s =
kMsToSecond * total_playout_glitches_duration_ms_.load(std::memory_order_acquire),
.synthesized_samples_events = num_detected_playout_glitches_.load(std::memory_order_acquire),
.total_samples_duration_s =
kMsToSecond * total_playout_samples_duration_ms_.load(std::memory_order_acquire),
.synthesized_samples_duration_s = kMsToSecond *
total_playout_glitches_duration_ms_.load(std::memory_order_acquire),
.synthesized_samples_events =
num_detected_playout_glitches_.load(std::memory_order_acquire),
.total_samples_duration_s = kMsToSecond *
total_playout_samples_duration_ms_.load(std::memory_order_acquire),
.total_playout_delay_s =
kMsToSecond * total_playout_delay_ms_.load(std::memory_order_acquire),
.total_samples_count = total_samples_count,

File diff suppressed because it is too large Load Diff

View File

@ -39,10 +39,10 @@ std::string StdStringFromNSString(NSString* nsString) {
bool CheckAndLogError(BOOL success, NSError* error) {
if (!success) {
NSString* msg =
[NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code,
error.localizedDescription,
error.localizedFailureReason];
NSString* msg = [NSString stringWithFormat:@"Error: %ld, %@, %@",
(long)error.code,
error.localizedDescription,
error.localizedFailureReason];
RTC_LOG(LS_ERROR) << StdStringFromNSString(msg);
return false;
}

View File

@ -32,7 +32,8 @@ static void LogStreamDescription(AudioStreamBasicDescription description) {
" mChannelsPerFrame: %u\n"
" mBitsPerChannel: %u\n"
" mReserved: %u\n}",
description.mSampleRate, formatIdString,
description.mSampleRate,
formatIdString,
static_cast<unsigned int>(description.mFormatFlags),
static_cast<unsigned int>(description.mBytesPerPacket),
static_cast<unsigned int>(description.mFramesPerPacket),
@ -61,19 +62,21 @@ static const AudioUnitElement kOutputBus = 0;
static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) {
RTC_DCHECK(audio_unit);
UInt32 size = sizeof(*enabled);
OSStatus result = AudioUnitGetProperty(audio_unit,
kAUVoiceIOProperty_VoiceProcessingEnableAGC,
kAudioUnitScope_Global,
kInputBus,
enabled,
&size);
OSStatus result =
AudioUnitGetProperty(audio_unit,
kAUVoiceIOProperty_VoiceProcessingEnableAGC,
kAudioUnitScope_Global,
kInputBus,
enabled,
&size);
RTCLog(@"VPIO unit AGC: %u", static_cast<unsigned int>(*enabled));
return result;
}
VoiceProcessingAudioUnit::VoiceProcessingAudioUnit(bool bypass_voice_processing,
bool detect_mute_speech,
VoiceProcessingAudioUnitObserver* observer)
VoiceProcessingAudioUnit::VoiceProcessingAudioUnit(
bool bypass_voice_processing,
bool detect_mute_speech,
VoiceProcessingAudioUnitObserver* observer)
: bypass_voice_processing_(bypass_voice_processing),
detect_mute_speech_(detect_mute_speech),
observer_(observer),
@ -115,8 +118,11 @@ bool VoiceProcessingAudioUnit::Init() {
// Enable input on the input scope of the input element.
UInt32 enable_input = 1;
result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kInputBus, &enable_input,
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&enable_input,
sizeof(enable_input));
if (result != noErr) {
DisposeAudioUnit();
@ -128,9 +134,12 @@ bool VoiceProcessingAudioUnit::Init() {
// Enable output on the output scope of the output element.
UInt32 enable_output = 1;
result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output, kOutputBus,
&enable_output, sizeof(enable_output));
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&enable_output,
sizeof(enable_output));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to enable output on output scope of output element. "
@ -144,9 +153,12 @@ bool VoiceProcessingAudioUnit::Init() {
AURenderCallbackStruct render_callback;
render_callback.inputProc = OnGetPlayoutData;
render_callback.inputProcRefCon = this;
result = AudioUnitSetProperty(
vpio_unit_, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input,
kOutputBus, &render_callback, sizeof(render_callback));
result = AudioUnitSetProperty(vpio_unit_,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
kOutputBus,
&render_callback,
sizeof(render_callback));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to specify the render callback on the output bus. "
@ -158,9 +170,12 @@ bool VoiceProcessingAudioUnit::Init() {
// Disable AU buffer allocation for the recorder, we allocate our own.
// TODO(henrika): not sure that it actually saves resource to make this call.
UInt32 flag = 0;
result = AudioUnitSetProperty(
vpio_unit_, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, kInputBus, &flag, sizeof(flag));
result = AudioUnitSetProperty(vpio_unit_,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
kInputBus,
&flag,
sizeof(flag));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to disable buffer allocation on the input bus. "
@ -177,8 +192,10 @@ bool VoiceProcessingAudioUnit::Init() {
input_callback.inputProcRefCon = this;
result = AudioUnitSetProperty(vpio_unit_,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, kInputBus,
&input_callback, sizeof(input_callback));
kAudioUnitScope_Global,
kInputBus,
&input_callback,
sizeof(input_callback));
if (result != noErr) {
DisposeAudioUnit();
RTCLogError(@"Failed to specify the input callback on the input bus. "
@ -207,9 +224,12 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
#endif
// Set the format on the output scope of the input element/bus.
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, kInputBus, &format, size);
result = AudioUnitSetProperty(vpio_unit_,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&format,
size);
if (result != noErr) {
RTCLogError(@"Failed to set format on output scope of input bus. "
"Error=%ld.",
@ -218,9 +238,12 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
}
// Set the format on the input scope of the output element/bus.
result =
AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, kOutputBus, &format, size);
result = AudioUnitSetProperty(vpio_unit_,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&format,
size);
if (result != noErr) {
RTCLogError(@"Failed to set format on input scope of output bus. "
"Error=%ld.",
@ -257,17 +280,21 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
if (detect_mute_speech_) {
if (@available(iOS 15, *)) {
// Set listener for muted speech event.
AUVoiceIOMutedSpeechActivityEventListener listener = ^(AUVoiceIOSpeechActivityEvent event) {
observer_->OnReceivedMutedSpeechActivity(event);
};
result = AudioUnitSetProperty(vpio_unit_,
kAUVoiceIOProperty_MutedSpeechActivityEventListener,
kAudioUnitScope_Global,
0,
&listener,
sizeof(AUVoiceIOMutedSpeechActivityEventListener));
AUVoiceIOMutedSpeechActivityEventListener listener =
^(AUVoiceIOSpeechActivityEvent event) {
observer_->OnReceivedMutedSpeechActivity(event);
};
result = AudioUnitSetProperty(
vpio_unit_,
kAUVoiceIOProperty_MutedSpeechActivityEventListener,
kAudioUnitScope_Global,
0,
&listener,
sizeof(AUVoiceIOMutedSpeechActivityEventListener));
if (result != noErr) {
RTCLog(@"Failed to set muted speech activity event listener. Error=%ld.", (long)result);
RTCLog(
@"Failed to set muted speech activity event listener. Error=%ld.",
(long)result);
}
}
}
@ -284,7 +311,8 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
if (result == noErr) {
RTCLog(@"Successfully bypassed voice processing.");
} else {
RTCLogError(@"Failed to bypass voice processing. Error=%ld.", (long)result);
RTCLogError(@"Failed to bypass voice processing. Error=%ld.",
(long)result);
}
state_ = kInitialized;
return true;
@ -305,33 +333,34 @@ bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) {
// Example of error code: kAudioUnitErr_NoConnection (-10876).
// All error codes related to audio units are negative and are therefore
// converted into a postive value to match the UMA APIs.
RTC_HISTOGRAM_COUNTS_SPARSE_100000(
"WebRTC.Audio.GetAGCStateErrorCode1", (-1) * result);
RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Audio.GetAGCStateErrorCode1",
(-1) * result);
} else if (agc_is_enabled) {
// Remember that the AGC was enabled by default. Will be used in UMA.
agc_was_enabled_by_default = 1;
} else {
// AGC was initially disabled => try to enable it explicitly.
UInt32 enable_agc = 1;
result =
AudioUnitSetProperty(vpio_unit_,
kAUVoiceIOProperty_VoiceProcessingEnableAGC,
kAudioUnitScope_Global, kInputBus, &enable_agc,
sizeof(enable_agc));
result = AudioUnitSetProperty(vpio_unit_,
kAUVoiceIOProperty_VoiceProcessingEnableAGC,
kAudioUnitScope_Global,
kInputBus,
&enable_agc,
sizeof(enable_agc));
if (result != noErr) {
RTCLogError(@"Failed to enable the built-in AGC. "
"Error=%ld.",
(long)result);
RTC_HISTOGRAM_COUNTS_SPARSE_100000(
"WebRTC.Audio.SetAGCStateErrorCode", (-1) * result);
RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Audio.SetAGCStateErrorCode",
(-1) * result);
}
result = GetAGCState(vpio_unit_, &agc_is_enabled);
if (result != noErr) {
RTCLogError(@"Failed to get AGC state (2nd attempt). "
"Error=%ld.",
(long)result);
RTC_HISTOGRAM_COUNTS_SPARSE_100000(
"WebRTC.Audio.GetAGCStateErrorCode2", (-1) * result);
RTC_HISTOGRAM_COUNTS_SPARSE_100000("WebRTC.Audio.GetAGCStateErrorCode2",
(-1) * result);
}
}
@ -423,7 +452,9 @@ bool VoiceProcessingAudioUnit::SetMicrophoneMute(bool enable) {
}
if (result != noErr) {
RTCLogError(@"Failed to %s microphone. Error=%ld", (enable ? "mute" : "unmute"), (long)result);
RTCLogError(@"Failed to %s microphone. Error=%ld",
(enable ? "mute" : "unmute"),
(long)result);
return false;
}
@ -438,8 +469,8 @@ OSStatus VoiceProcessingAudioUnit::Render(AudioUnitRenderActionFlags* flags,
AudioBufferList* io_data) {
RTC_DCHECK(vpio_unit_) << "Init() not called.";
OSStatus result = AudioUnitRender(vpio_unit_, flags, time_stamp,
output_bus_number, num_frames, io_data);
OSStatus result = AudioUnitRender(
vpio_unit_, flags, time_stamp, output_bus_number, num_frames, io_data);
if (result != noErr) {
RTCLogError(@"Failed to render audio unit. Error=%ld", (long)result);
}
@ -455,8 +486,8 @@ OSStatus VoiceProcessingAudioUnit::OnGetPlayoutData(
AudioBufferList* io_data) {
VoiceProcessingAudioUnit* audio_unit =
static_cast<VoiceProcessingAudioUnit*>(in_ref_con);
return audio_unit->NotifyGetPlayoutData(flags, time_stamp, bus_number,
num_frames, io_data);
return audio_unit->NotifyGetPlayoutData(
flags, time_stamp, bus_number, num_frames, io_data);
}
OSStatus VoiceProcessingAudioUnit::OnDeliverRecordedData(
@ -468,8 +499,8 @@ OSStatus VoiceProcessingAudioUnit::OnDeliverRecordedData(
AudioBufferList* io_data) {
VoiceProcessingAudioUnit* audio_unit =
static_cast<VoiceProcessingAudioUnit*>(in_ref_con);
return audio_unit->NotifyDeliverRecordedData(flags, time_stamp, bus_number,
num_frames, io_data);
return audio_unit->NotifyDeliverRecordedData(
flags, time_stamp, bus_number, num_frames, io_data);
}
OSStatus VoiceProcessingAudioUnit::NotifyGetPlayoutData(
@ -478,8 +509,8 @@ OSStatus VoiceProcessingAudioUnit::NotifyGetPlayoutData(
UInt32 bus_number,
UInt32 num_frames,
AudioBufferList* io_data) {
return observer_->OnGetPlayoutData(flags, time_stamp, bus_number, num_frames,
io_data);
return observer_->OnGetPlayoutData(
flags, time_stamp, bus_number, num_frames, io_data);
}
OSStatus VoiceProcessingAudioUnit::NotifyDeliverRecordedData(
@ -488,8 +519,8 @@ OSStatus VoiceProcessingAudioUnit::NotifyDeliverRecordedData(
UInt32 bus_number,
UInt32 num_frames,
AudioBufferList* io_data) {
return observer_->OnDeliverRecordedData(flags, time_stamp, bus_number,
num_frames, io_data);
return observer_->OnDeliverRecordedData(
flags, time_stamp, bus_number, num_frames, io_data);
}
AudioStreamBasicDescription VoiceProcessingAudioUnit::GetFormat(

View File

@ -29,7 +29,8 @@ namespace objc_adm {
class ObjCAudioDeviceModule : public AudioDeviceModule {
public:
explicit ObjCAudioDeviceModule(id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device);
explicit ObjCAudioDeviceModule(
id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device);
~ObjCAudioDeviceModule() override;
// Retrieve the currently utilized audio layer
@ -135,13 +136,14 @@ class ObjCAudioDeviceModule : public AudioDeviceModule {
#endif // WEBRTC_IOS
public:
OSStatus OnDeliverRecordedData(AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
NSInteger bus_number,
UInt32 num_frames,
const AudioBufferList* io_data,
void* render_context,
RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) render_block);
OSStatus OnDeliverRecordedData(
AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
NSInteger bus_number,
UInt32 num_frames,
const AudioBufferList* io_data,
void* render_context,
RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) render_block);
OSStatus OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
@ -150,20 +152,21 @@ class ObjCAudioDeviceModule : public AudioDeviceModule {
AudioBufferList* io_data);
// Notifies `ObjCAudioDeviceModule` that at least one of the audio input
// parameters or audio input latency of `RTCAudioDevice` has changed. It necessary to
// update `record_parameters_` with current audio parameter of `RTCAudioDevice`
// via `UpdateAudioParameters` and if parameters are actually change then
// ADB parameters are updated with `UpdateInputAudioDeviceBuffer`. Audio input latency
// stored in `cached_recording_delay_ms_` is also updated with current latency
// of `RTCAudioDevice`.
// parameters or audio input latency of `RTCAudioDevice` has changed. It
// necessary to update `record_parameters_` with current audio parameter of
// `RTCAudioDevice` via `UpdateAudioParameters` and if parameters are actually
// change then ADB parameters are updated with `UpdateInputAudioDeviceBuffer`.
// Audio input latency stored in `cached_recording_delay_ms_` is also updated
// with current latency of `RTCAudioDevice`.
void HandleAudioInputParametersChange();
// Same as `HandleAudioInputParametersChange` but should be called when audio output
// parameters of `RTCAudioDevice` has changed.
// Same as `HandleAudioInputParametersChange` but should be called when audio
// output parameters of `RTCAudioDevice` has changed.
void HandleAudioOutputParametersChange();
// Notifies `ObjCAudioDeviceModule` about audio input interruption happen due to
// any reason so `ObjCAudioDeviceModule` is can prepare to restart of audio IO.
// Notifies `ObjCAudioDeviceModule` about audio input interruption happen due
// to any reason so `ObjCAudioDeviceModule` is can prepare to restart of audio
// IO.
void HandleAudioInputInterrupted();
// Same as `ObjCAudioDeviceModule` but should be called when audio output
@ -171,33 +174,39 @@ class ObjCAudioDeviceModule : public AudioDeviceModule {
void HandleAudioOutputInterrupted();
private:
// Update our audio parameters if they are different from current device audio parameters
// Returns true when our parameters are update, false - otherwise.
// `ObjCAudioDeviceModule` has audio device buffer (ADB) which has audio parameters
// of playout & recording. The ADB is configured to work with specific sample rate & channel
// count. `ObjCAudioDeviceModule` stores audio parameters which were used to configure ADB in the
// fields `playout_parameters_` and `recording_parameters_`.
// `RTCAudioDevice` protocol has its own audio parameters exposed as individual properties.
// `RTCAudioDevice` audio parameters might change when playout/recording is already in progress,
// for example, when device is switched. `RTCAudioDevice` audio parameters must be kept in sync
// with ADB audio parameters. This method is invoked when `RTCAudioDevice` reports that it's audio
// parameters (`device_params`) are changed and it detects if there any difference with our
// current audio parameters (`params`). Our parameters are updated in case of actual change and
// method returns true. In case of actual change there is follow-up call to either
// `UpdateOutputAudioDeviceBuffer` or `UpdateInputAudioDeviceBuffer` to apply updated
// `playout_parameters_` or `recording_parameters_` to ADB.
// Update our audio parameters if they are different from current device audio
// parameters Returns true when our parameters are update, false - otherwise.
// `ObjCAudioDeviceModule` has audio device buffer (ADB) which has audio
// parameters of playout & recording. The ADB is configured to work with
// specific sample rate & channel count. `ObjCAudioDeviceModule` stores audio
// parameters which were used to configure ADB in the fields
// `playout_parameters_` and `recording_parameters_`. `RTCAudioDevice`
// protocol has its own audio parameters exposed as individual properties.
// `RTCAudioDevice` audio parameters might change when playout/recording is
// already in progress, for example, when device is switched. `RTCAudioDevice`
// audio parameters must be kept in sync with ADB audio parameters. This
// method is invoked when `RTCAudioDevice` reports that it's audio parameters
// (`device_params`) are changed and it detects if there any difference with
// our current audio parameters (`params`). Our parameters are updated in case
// of actual change and method returns true. In case of actual change there is
// follow-up call to either `UpdateOutputAudioDeviceBuffer` or
// `UpdateInputAudioDeviceBuffer` to apply updated `playout_parameters_` or
// `recording_parameters_` to ADB.
bool UpdateAudioParameters(AudioParameters& params, const AudioParameters& device_params);
bool UpdateAudioParameters(AudioParameters& params,
const AudioParameters& device_params);
// Update our cached audio latency with device latency. Device latency is reported by
// `RTCAudioDevice` object. Whenever latency is changed, `RTCAudioDevice` is obliged to notify ADM
// about the change via `HandleAudioInputParametersChange` or `HandleAudioOutputParametersChange`.
// Current device IO latency is cached in the atomic field and used from audio IO thread
// to be reported to audio device buffer. It is highly recommended by Apple not to call any
// ObjC methods from audio IO thread, that is why implementation relies on caching latency
// into a field and being notified when latency is changed, which is the case when device
// is switched.
void UpdateAudioDelay(std::atomic<int>& delay_ms, const NSTimeInterval device_latency);
// Update our cached audio latency with device latency. Device latency is
// reported by `RTCAudioDevice` object. Whenever latency is changed,
// `RTCAudioDevice` is obliged to notify ADM about the change via
// `HandleAudioInputParametersChange` or `HandleAudioOutputParametersChange`.
// Current device IO latency is cached in the atomic field and used from audio
// IO thread to be reported to audio device buffer. It is highly recommended
// by Apple not to call any ObjC methods from audio IO thread, that is why
// implementation relies on caching latency into a field and being notified
// when latency is changed, which is the case when device is switched.
void UpdateAudioDelay(std::atomic<int>& delay_ms,
const NSTimeInterval device_latency);
// Uses current `playout_parameters_` to inform the audio device buffer (ADB)
// about our internal audio parameters.
@ -214,11 +223,12 @@ class ObjCAudioDeviceModule : public AudioDeviceModule {
// AudioDeviceBuffer is a buffer to consume audio recorded by `RTCAudioDevice`
// and provide audio to be played via `RTCAudioDevice`.
// Audio PCMs could have different sample rate and channels count, but expected
// to be in 16-bit integer interleaved linear PCM format.
// The current parameters ADB configured to work with is stored in field
// Audio PCMs could have different sample rate and channels count, but
// expected to be in 16-bit integer interleaved linear PCM format. The current
// parameters ADB configured to work with is stored in field
// `playout_parameters_` for playout and `record_parameters_` for recording.
// These parameters and ADB must kept in sync with `RTCAudioDevice` audio parameters.
// These parameters and ADB must kept in sync with `RTCAudioDevice` audio
// parameters.
std::unique_ptr<AudioDeviceBuffer> audio_device_buffer_;
// Set to 1 when recording is active and 0 otherwise.

View File

@ -21,19 +21,25 @@
namespace {
webrtc::AudioParameters RecordParameters(id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device) {
const double sample_rate = static_cast<int>([audio_device deviceInputSampleRate]);
const size_t channels = static_cast<size_t>([audio_device inputNumberOfChannels]);
const size_t frames_per_buffer =
static_cast<size_t>(sample_rate * [audio_device inputIOBufferDuration] + .5);
webrtc::AudioParameters RecordParameters(
id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device) {
const double sample_rate =
static_cast<int>([audio_device deviceInputSampleRate]);
const size_t channels =
static_cast<size_t>([audio_device inputNumberOfChannels]);
const size_t frames_per_buffer = static_cast<size_t>(
sample_rate * [audio_device inputIOBufferDuration] + .5);
return webrtc::AudioParameters(sample_rate, channels, frames_per_buffer);
}
webrtc::AudioParameters PlayoutParameters(id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device) {
const double sample_rate = static_cast<int>([audio_device deviceOutputSampleRate]);
const size_t channels = static_cast<size_t>([audio_device outputNumberOfChannels]);
const size_t frames_per_buffer =
static_cast<size_t>(sample_rate * [audio_device outputIOBufferDuration] + .5);
webrtc::AudioParameters PlayoutParameters(
id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device) {
const double sample_rate =
static_cast<int>([audio_device deviceOutputSampleRate]);
const size_t channels =
static_cast<size_t>([audio_device outputNumberOfChannels]);
const size_t frames_per_buffer = static_cast<size_t>(
sample_rate * [audio_device outputIOBufferDuration] + .5);
return webrtc::AudioParameters(sample_rate, channels, frames_per_buffer);
}
@ -42,8 +48,10 @@ webrtc::AudioParameters PlayoutParameters(id<RTC_OBJC_TYPE(RTCAudioDevice)> audi
namespace webrtc {
namespace objc_adm {
ObjCAudioDeviceModule::ObjCAudioDeviceModule(id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device)
: audio_device_(audio_device), task_queue_factory_(CreateDefaultTaskQueueFactory()) {
ObjCAudioDeviceModule::ObjCAudioDeviceModule(
id<RTC_OBJC_TYPE(RTCAudioDevice)> audio_device)
: audio_device_(audio_device),
task_queue_factory_(CreateDefaultTaskQueueFactory()) {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK(audio_device_);
thread_checker_.Detach();
@ -55,7 +63,8 @@ ObjCAudioDeviceModule::~ObjCAudioDeviceModule() {
RTC_DLOG_F(LS_VERBOSE) << "";
}
int32_t ObjCAudioDeviceModule::RegisterAudioCallback(AudioTransport* audioCallback) {
int32_t ObjCAudioDeviceModule::RegisterAudioCallback(
AudioTransport* audioCallback) {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK(audio_device_buffer_);
return audio_device_buffer_->RegisterAudioCallback(audioCallback);
@ -73,12 +82,14 @@ int32_t ObjCAudioDeviceModule::Init() {
io_record_thread_checker_.Detach();
thread_ = rtc::Thread::Current();
audio_device_buffer_.reset(new webrtc::AudioDeviceBuffer(task_queue_factory_.get()));
audio_device_buffer_.reset(
new webrtc::AudioDeviceBuffer(task_queue_factory_.get()));
if (![audio_device_ isInitialized]) {
if (audio_device_delegate_ == nil) {
audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc]
initWithAudioDeviceModule:rtc::scoped_refptr<ObjCAudioDeviceModule>(this)
initWithAudioDeviceModule:rtc::scoped_refptr<ObjCAudioDeviceModule>(
this)
audioDeviceThread:thread_];
}
@ -90,10 +101,12 @@ int32_t ObjCAudioDeviceModule::Init() {
}
}
playout_parameters_.reset([audio_device_delegate_ preferredOutputSampleRate], 1);
playout_parameters_.reset([audio_device_delegate_ preferredOutputSampleRate],
1);
UpdateOutputAudioDeviceBuffer();
record_parameters_.reset([audio_device_delegate_ preferredInputSampleRate], 1);
record_parameters_.reset([audio_device_delegate_ preferredInputSampleRate],
1);
UpdateInputAudioDeviceBuffer();
is_initialized_ = true;
@ -148,7 +161,8 @@ int32_t ObjCAudioDeviceModule::PlayoutIsAvailable(bool* available) {
bool ObjCAudioDeviceModule::PlayoutIsInitialized() const {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK_RUN_ON(&thread_checker_);
return Initialized() && is_playout_initialized_ && [audio_device_ isPlayoutInitialized];
return Initialized() && is_playout_initialized_ &&
[audio_device_ isPlayoutInitialized];
}
int32_t ObjCAudioDeviceModule::InitPlayout() {
@ -169,7 +183,8 @@ int32_t ObjCAudioDeviceModule::InitPlayout() {
}
}
if (UpdateAudioParameters(playout_parameters_, PlayoutParameters(audio_device_))) {
if (UpdateAudioParameters(playout_parameters_,
PlayoutParameters(audio_device_))) {
UpdateOutputAudioDeviceBuffer();
}
@ -224,8 +239,10 @@ int32_t ObjCAudioDeviceModule::StopPlayout() {
int32_t ObjCAudioDeviceModule::PlayoutDelay(uint16_t* delayMS) const {
RTC_DCHECK_RUN_ON(&thread_checker_);
*delayMS = static_cast<uint16_t>(rtc::SafeClamp<int>(
cached_playout_delay_ms_.load(), 0, std::numeric_limits<uint16_t>::max()));
*delayMS = static_cast<uint16_t>(
rtc::SafeClamp<int>(cached_playout_delay_ms_.load(),
0,
std::numeric_limits<uint16_t>::max()));
return 0;
}
@ -239,7 +256,8 @@ int32_t ObjCAudioDeviceModule::RecordingIsAvailable(bool* available) {
bool ObjCAudioDeviceModule::RecordingIsInitialized() const {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK_RUN_ON(&thread_checker_);
return Initialized() && is_recording_initialized_ && [audio_device_ isRecordingInitialized];
return Initialized() && is_recording_initialized_ &&
[audio_device_ isRecordingInitialized];
}
int32_t ObjCAudioDeviceModule::InitRecording() {
@ -260,7 +278,8 @@ int32_t ObjCAudioDeviceModule::InitRecording() {
}
}
if (UpdateAudioParameters(record_parameters_, RecordParameters(audio_device_))) {
if (UpdateAudioParameters(record_parameters_,
RecordParameters(audio_device_))) {
UpdateInputAudioDeviceBuffer();
}
@ -315,7 +334,8 @@ int32_t ObjCAudioDeviceModule::StopRecording() {
#if defined(WEBRTC_IOS)
int ObjCAudioDeviceModule::GetPlayoutAudioParameters(AudioParameters* params) const {
int ObjCAudioDeviceModule::GetPlayoutAudioParameters(
AudioParameters* params) const {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK(playout_parameters_.is_valid());
RTC_DCHECK_RUN_ON(&thread_checker_);
@ -323,7 +343,8 @@ int ObjCAudioDeviceModule::GetPlayoutAudioParameters(AudioParameters* params) co
return 0;
}
int ObjCAudioDeviceModule::GetRecordAudioParameters(AudioParameters* params) const {
int ObjCAudioDeviceModule::GetRecordAudioParameters(
AudioParameters* params) const {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK(record_parameters_.is_valid());
RTC_DCHECK_RUN_ON(&thread_checker_);
@ -339,11 +360,13 @@ void ObjCAudioDeviceModule::UpdateOutputAudioDeviceBuffer() {
RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first";
RTC_DCHECK_GT(playout_parameters_.sample_rate(), 0);
RTC_DCHECK(playout_parameters_.channels() == 1 || playout_parameters_.channels() == 2);
RTC_DCHECK(playout_parameters_.channels() == 1 ||
playout_parameters_.channels() == 2);
audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
playout_fine_audio_buffer_.reset(new FineAudioBuffer(audio_device_buffer_.get()));
playout_fine_audio_buffer_.reset(
new FineAudioBuffer(audio_device_buffer_.get()));
}
void ObjCAudioDeviceModule::UpdateInputAudioDeviceBuffer() {
@ -352,15 +375,18 @@ void ObjCAudioDeviceModule::UpdateInputAudioDeviceBuffer() {
RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first";
RTC_DCHECK_GT(record_parameters_.sample_rate(), 0);
RTC_DCHECK(record_parameters_.channels() == 1 || record_parameters_.channels() == 2);
RTC_DCHECK(record_parameters_.channels() == 1 ||
record_parameters_.channels() == 2);
audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate());
audio_device_buffer_->SetRecordingSampleRate(
record_parameters_.sample_rate());
audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
record_fine_audio_buffer_.reset(new FineAudioBuffer(audio_device_buffer_.get()));
record_fine_audio_buffer_.reset(
new FineAudioBuffer(audio_device_buffer_.get()));
}
void ObjCAudioDeviceModule::UpdateAudioDelay(std::atomic<int>& delay_ms,
const NSTimeInterval device_latency) {
void ObjCAudioDeviceModule::UpdateAudioDelay(
std::atomic<int>& delay_ms, const NSTimeInterval device_latency) {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK_RUN_ON(&thread_checker_);
int latency_ms = static_cast<int>(rtc::kNumMillisecsPerSec * device_latency);
@ -374,12 +400,13 @@ void ObjCAudioDeviceModule::UpdateAudioDelay(std::atomic<int>& delay_ms,
}
}
bool ObjCAudioDeviceModule::UpdateAudioParameters(AudioParameters& params,
const AudioParameters& device_params) {
bool ObjCAudioDeviceModule::UpdateAudioParameters(
AudioParameters& params, const AudioParameters& device_params) {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK_RUN_ON(&thread_checker_);
if (!device_params.is_complete()) {
RTC_LOG_F(LS_INFO) << "Device params are incomplete: " << device_params.ToString();
RTC_LOG_F(LS_INFO) << "Device params are incomplete: "
<< device_params.ToString();
return false;
}
if (params.channels() == device_params.channels() &&
@ -390,10 +417,12 @@ bool ObjCAudioDeviceModule::UpdateAudioParameters(AudioParameters& params,
return false;
}
RTC_LOG_F(LS_INFO) << "Audio params will be changed from: " << params.ToString()
RTC_LOG_F(LS_INFO) << "Audio params will be changed from: "
<< params.ToString()
<< " to: " << device_params.ToString();
params.reset(
device_params.sample_rate(), device_params.channels(), device_params.frames_per_buffer());
params.reset(device_params.sample_rate(),
device_params.channels(),
device_params.frames_per_buffer());
return true;
}
@ -414,14 +443,17 @@ OSStatus ObjCAudioDeviceModule::OnDeliverRecordedData(
// AudioBuffer already fullfilled with audio data
RTC_DCHECK_EQ(1, io_data->mNumberBuffers);
const AudioBuffer* audio_buffer = &io_data->mBuffers[0];
RTC_DCHECK(audio_buffer->mNumberChannels == 1 || audio_buffer->mNumberChannels == 2);
RTC_DCHECK(audio_buffer->mNumberChannels == 1 ||
audio_buffer->mNumberChannels == 2);
record_fine_audio_buffer_->DeliverRecordedData(
rtc::ArrayView<const int16_t>(static_cast<int16_t*>(audio_buffer->mData), num_frames),
rtc::ArrayView<const int16_t>(
static_cast<int16_t*>(audio_buffer->mData), num_frames),
cached_recording_delay_ms_.load());
return noErr;
}
RTC_DCHECK(render_block != nullptr) << "Either io_data or render_block must be provided";
RTC_DCHECK(render_block != nullptr)
<< "Either io_data or render_block must be provided";
// Set the size of our own audio buffer and clear it first to avoid copying
// in combination with potential reallocations.
@ -439,13 +471,18 @@ OSStatus ObjCAudioDeviceModule::OnDeliverRecordedData(
audio_buffer_list.mNumberBuffers = 1;
AudioBuffer* audio_buffer = &audio_buffer_list.mBuffers[0];
audio_buffer->mNumberChannels = channels_count;
audio_buffer->mDataByteSize =
record_audio_buffer_.size() * sizeof(decltype(record_audio_buffer_)::value_type);
audio_buffer->mDataByteSize = record_audio_buffer_.size() *
sizeof(decltype(record_audio_buffer_)::value_type);
audio_buffer->mData = reinterpret_cast<int8_t*>(record_audio_buffer_.data());
// Obtain the recorded audio samples by initiating a rendering cycle into own buffer.
result =
render_block(flags, time_stamp, bus_number, num_frames, &audio_buffer_list, render_context);
// Obtain the recorded audio samples by initiating a rendering cycle into own
// buffer.
result = render_block(flags,
time_stamp,
bus_number,
num_frames,
&audio_buffer_list,
render_context);
if (result != noErr) {
RTC_LOG_F(LS_ERROR) << "Failed to render audio: " << result;
return result;
@ -454,21 +491,23 @@ OSStatus ObjCAudioDeviceModule::OnDeliverRecordedData(
// Get a pointer to the recorded audio and send it to the WebRTC ADB.
// Use the FineAudioBuffer instance to convert between native buffer size
// and the 10ms buffer size used by WebRTC.
record_fine_audio_buffer_->DeliverRecordedData(record_audio_buffer_,
cached_recording_delay_ms_.load());
record_fine_audio_buffer_->DeliverRecordedData(
record_audio_buffer_, cached_recording_delay_ms_.load());
return noErr;
}
OSStatus ObjCAudioDeviceModule::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
NSInteger bus_number,
UInt32 num_frames,
AudioBufferList* io_data) {
OSStatus ObjCAudioDeviceModule::OnGetPlayoutData(
AudioUnitRenderActionFlags* flags,
const AudioTimeStamp* time_stamp,
NSInteger bus_number,
UInt32 num_frames,
AudioBufferList* io_data) {
RTC_DCHECK_RUN_ON(&io_playout_thread_checker_);
// Verify 16-bit, noninterleaved mono or stereo PCM signal format.
RTC_DCHECK_EQ(1, io_data->mNumberBuffers);
AudioBuffer* audio_buffer = &io_data->mBuffers[0];
RTC_DCHECK(audio_buffer->mNumberChannels == 1 || audio_buffer->mNumberChannels == 2);
RTC_DCHECK(audio_buffer->mNumberChannels == 1 ||
audio_buffer->mNumberChannels == 2);
RTC_DCHECK_EQ(audio_buffer->mDataByteSize,
sizeof(int16_t) * num_frames * audio_buffer->mNumberChannels);
@ -476,7 +515,9 @@ OSStatus ObjCAudioDeviceModule::OnGetPlayoutData(AudioUnitRenderActionFlags* fla
// activated.
if (!playing_.load()) {
*flags |= kAudioUnitRenderAction_OutputIsSilence;
memset(static_cast<int8_t*>(audio_buffer->mData), 0, audio_buffer->mDataByteSize);
memset(static_cast<int8_t*>(audio_buffer->mData),
0,
audio_buffer->mDataByteSize);
return noErr;
}
@ -506,7 +547,8 @@ void ObjCAudioDeviceModule::HandleAudioInputParametersChange() {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK_RUN_ON(&thread_checker_);
if (UpdateAudioParameters(record_parameters_, RecordParameters(audio_device_))) {
if (UpdateAudioParameters(record_parameters_,
RecordParameters(audio_device_))) {
UpdateInputAudioDeviceBuffer();
}
@ -517,7 +559,8 @@ void ObjCAudioDeviceModule::HandleAudioOutputParametersChange() {
RTC_DLOG_F(LS_VERBOSE) << "";
RTC_DCHECK_RUN_ON(&thread_checker_);
if (UpdateAudioParameters(playout_parameters_, PlayoutParameters(audio_device_))) {
if (UpdateAudioParameters(playout_parameters_,
PlayoutParameters(audio_device_))) {
UpdateOutputAudioDeviceBuffer();
}
@ -538,15 +581,17 @@ int16_t ObjCAudioDeviceModule::RecordingDevices() {
return 0;
}
int32_t ObjCAudioDeviceModule::PlayoutDeviceName(uint16_t index,
char name[kAdmMaxDeviceNameSize],
char guid[kAdmMaxGuidSize]) {
int32_t ObjCAudioDeviceModule::PlayoutDeviceName(
uint16_t index,
char name[kAdmMaxDeviceNameSize],
char guid[kAdmMaxGuidSize]) {
return -1;
}
int32_t ObjCAudioDeviceModule::RecordingDeviceName(uint16_t index,
char name[kAdmMaxDeviceNameSize],
char guid[kAdmMaxGuidSize]) {
int32_t ObjCAudioDeviceModule::RecordingDeviceName(
uint16_t index,
char name[kAdmMaxDeviceNameSize],
char guid[kAdmMaxGuidSize]) {
return -1;
}
@ -664,7 +709,8 @@ int32_t ObjCAudioDeviceModule::StereoPlayout(bool* enabled) const {
return 0;
}
int32_t ObjCAudioDeviceModule::StereoRecordingIsAvailable(bool* available) const {
int32_t ObjCAudioDeviceModule::StereoRecordingIsAvailable(
bool* available) const {
*available = false;
return 0;
}

View File

@ -22,11 +22,14 @@ class ObjCAudioDeviceModule;
} // namespace objc_adm
} // namespace webrtc
@interface ObjCAudioDeviceDelegate : NSObject <RTC_OBJC_TYPE (RTCAudioDeviceDelegate)>
@interface ObjCAudioDeviceDelegate
: NSObject <RTC_OBJC_TYPE (RTCAudioDeviceDelegate)>
- (instancetype)initWithAudioDeviceModule:
(rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>)audioDeviceModule
audioDeviceThread:(rtc::Thread*)thread;
- (instancetype)
initWithAudioDeviceModule:
(rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>)
audioDeviceModule
audioDeviceThread:(rtc::Thread*)thread;
- (void)resetAudioDeviceModule;

View File

@ -30,10 +30,12 @@ constexpr double kPreferredOutputSampleRate = 48000.0;
constexpr NSTimeInterval kPeferredInputIOBufferDuration = 0.02;
constexpr NSTimeInterval kPeferredOutputIOBufferDuration = 0.02;
class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDeviceDelegateImpl> {
class AudioDeviceDelegateImpl final
: public rtc::RefCountedNonVirtual<AudioDeviceDelegateImpl> {
public:
AudioDeviceDelegateImpl(
rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule> audio_device_module,
rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>
audio_device_module,
rtc::Thread* thread)
: audio_device_module_(audio_device_module), thread_(thread) {
RTC_DCHECK(audio_device_module_);
@ -49,7 +51,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDevi
void reset_audio_device_module() { audio_device_module_ = nullptr; }
private:
rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule> audio_device_module_;
rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>
audio_device_module_;
rtc::Thread* thread_;
};
@ -71,67 +74,73 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDevi
@synthesize preferredOutputIOBufferDuration = preferredOutputIOBufferDuration_;
- (instancetype)initWithAudioDeviceModule:
(rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>)audioDeviceModule
audioDeviceThread:(rtc::Thread*)thread {
- (instancetype)
initWithAudioDeviceModule:
(rtc::scoped_refptr<webrtc::objc_adm::ObjCAudioDeviceModule>)
audioDeviceModule
audioDeviceThread:(rtc::Thread*)thread {
RTC_DCHECK_RUN_ON(thread);
self = [super init];
if (self) {
impl_ = rtc::make_ref_counted<AudioDeviceDelegateImpl>(audioDeviceModule, thread);
impl_ = rtc::make_ref_counted<AudioDeviceDelegateImpl>(audioDeviceModule,
thread);
preferredInputSampleRate_ = kPreferredInputSampleRate;
preferredInputIOBufferDuration_ = kPeferredInputIOBufferDuration;
preferredOutputSampleRate_ = kPreferredOutputSampleRate;
preferredOutputIOBufferDuration_ = kPeferredOutputIOBufferDuration;
rtc::scoped_refptr<AudioDeviceDelegateImpl> playout_delegate = impl_;
getPlayoutData_ = ^OSStatus(AudioUnitRenderActionFlags* _Nonnull actionFlags,
const AudioTimeStamp* _Nonnull timestamp,
NSInteger inputBusNumber,
UInt32 frameCount,
AudioBufferList* _Nonnull outputData) {
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device =
playout_delegate->audio_device_module();
if (audio_device) {
return audio_device->OnGetPlayoutData(
actionFlags, timestamp, inputBusNumber, frameCount, outputData);
} else {
*actionFlags |= kAudioUnitRenderAction_OutputIsSilence;
RTC_LOG(LS_VERBOSE) << "No alive audio device";
return noErr;
}
};
rtc::scoped_refptr<AudioDeviceDelegateImpl> record_delegate = impl_;
deliverRecordedData_ =
getPlayoutData_ =
^OSStatus(AudioUnitRenderActionFlags* _Nonnull actionFlags,
const AudioTimeStamp* _Nonnull timestamp,
NSInteger inputBusNumber,
UInt32 frameCount,
const AudioBufferList* _Nullable inputData,
void* renderContext,
RTC_OBJC_TYPE(RTCAudioDeviceRenderRecordedDataBlock) _Nullable renderBlock) {
AudioBufferList* _Nonnull outputData) {
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device =
record_delegate->audio_device_module();
playout_delegate->audio_device_module();
if (audio_device) {
return audio_device->OnDeliverRecordedData(actionFlags,
timestamp,
inputBusNumber,
frameCount,
inputData,
renderContext,
renderBlock);
return audio_device->OnGetPlayoutData(
actionFlags, timestamp, inputBusNumber, frameCount, outputData);
} else {
*actionFlags |= kAudioUnitRenderAction_OutputIsSilence;
RTC_LOG(LS_VERBOSE) << "No alive audio device";
return noErr;
}
};
rtc::scoped_refptr<AudioDeviceDelegateImpl> record_delegate = impl_;
deliverRecordedData_ = ^OSStatus(
AudioUnitRenderActionFlags* _Nonnull actionFlags,
const AudioTimeStamp* _Nonnull timestamp,
NSInteger inputBusNumber,
UInt32 frameCount,
const AudioBufferList* _Nullable inputData,
void* renderContext,
RTC_OBJC_TYPE(
RTCAudioDeviceRenderRecordedDataBlock) _Nullable renderBlock) {
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device =
record_delegate->audio_device_module();
if (audio_device) {
return audio_device->OnDeliverRecordedData(actionFlags,
timestamp,
inputBusNumber,
frameCount,
inputData,
renderContext,
renderBlock);
} else {
RTC_LOG(LS_VERBOSE) << "No alive audio device";
return noErr;
}
};
}
return self;
}
- (void)notifyAudioInputParametersChange {
RTC_DCHECK_RUN_ON(impl_->thread());
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module();
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module =
impl_->audio_device_module();
if (audio_device_module) {
audio_device_module->HandleAudioInputParametersChange();
}
@ -139,7 +148,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDevi
- (void)notifyAudioOutputParametersChange {
RTC_DCHECK_RUN_ON(impl_->thread());
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module();
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module =
impl_->audio_device_module();
if (audio_device_module) {
audio_device_module->HandleAudioOutputParametersChange();
}
@ -147,7 +157,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDevi
- (void)notifyAudioInputInterrupted {
RTC_DCHECK_RUN_ON(impl_->thread());
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module();
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module =
impl_->audio_device_module();
if (audio_device_module) {
audio_device_module->HandleAudioInputInterrupted();
}
@ -155,7 +166,8 @@ class AudioDeviceDelegateImpl final : public rtc::RefCountedNonVirtual<AudioDevi
- (void)notifyAudioOutputInterrupted {
RTC_DCHECK_RUN_ON(impl_->thread());
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module = impl_->audio_device_module();
webrtc::objc_adm::ObjCAudioDeviceModule* audio_device_module =
impl_->audio_device_module();
if (audio_device_module) {
audio_device_module->HandleAudioOutputInterrupted();
}

View File

@ -18,11 +18,14 @@ namespace webrtc {
namespace {
/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping RTC_OBJC_TYPE(RTCI420Buffer) */
/** ObjCFrameBuffer that conforms to I420BufferInterface by wrapping
* RTC_OBJC_TYPE(RTCI420Buffer) */
class ObjCI420FrameBuffer : public I420BufferInterface {
public:
explicit ObjCI420FrameBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer)
: frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
: frame_buffer_(frame_buffer),
width_(frame_buffer.width),
height_(frame_buffer.height) {}
~ObjCI420FrameBuffer() override {}
int width() const override { return width_; }
@ -49,8 +52,11 @@ class ObjCI420FrameBuffer : public I420BufferInterface {
} // namespace
ObjCFrameBuffer::ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
: frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
ObjCFrameBuffer::ObjCFrameBuffer(
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
: frame_buffer_(frame_buffer),
width_(frame_buffer.width),
height_(frame_buffer.height) {}
ObjCFrameBuffer::~ObjCFrameBuffer() {}
@ -70,20 +76,24 @@ rtc::scoped_refptr<I420BufferInterface> ObjCFrameBuffer::ToI420() {
return rtc::make_ref_counted<ObjCI420FrameBuffer>([frame_buffer_ toI420]);
}
rtc::scoped_refptr<VideoFrameBuffer> ObjCFrameBuffer::CropAndScale(int offset_x,
int offset_y,
int crop_width,
int crop_height,
int scaled_width,
int scaled_height) {
if ([frame_buffer_ respondsToSelector:@selector
(cropAndScaleWith:offsetY:cropWidth:cropHeight:scaleWidth:scaleHeight:)]) {
return rtc::make_ref_counted<ObjCFrameBuffer>([frame_buffer_ cropAndScaleWith:offset_x
offsetY:offset_y
cropWidth:crop_width
cropHeight:crop_height
scaleWidth:scaled_width
scaleHeight:scaled_height]);
rtc::scoped_refptr<VideoFrameBuffer> ObjCFrameBuffer::CropAndScale(
int offset_x,
int offset_y,
int crop_width,
int crop_height,
int scaled_width,
int scaled_height) {
if ([frame_buffer_
respondsToSelector:@selector
(cropAndScaleWith:
offsetY:cropWidth:cropHeight:scaleWidth:scaleHeight:)]) {
return rtc::make_ref_counted<ObjCFrameBuffer>([frame_buffer_
cropAndScaleWith:offset_x
offsetY:offset_y
cropWidth:crop_width
cropHeight:crop_height
scaleWidth:scaled_width
scaleHeight:scaled_height]);
}
// Use the default implementation.
@ -91,7 +101,8 @@ rtc::scoped_refptr<VideoFrameBuffer> ObjCFrameBuffer::CropAndScale(int offset_x,
offset_x, offset_y, crop_width, crop_height, scaled_width, scaled_height);
}
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ObjCFrameBuffer::wrapped_frame_buffer() const {
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ObjCFrameBuffer::wrapped_frame_buffer()
const {
return frame_buffer_;
}
@ -100,7 +111,8 @@ id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> ToObjCVideoFrameBuffer(
if (buffer->type() == VideoFrameBuffer::Type::kNative) {
return static_cast<ObjCFrameBuffer*>(buffer.get())->wrapped_frame_buffer();
} else {
return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:buffer->ToI420()];
return [[RTC_OBJC_TYPE(RTCI420Buffer) alloc]
initWithFrameBuffer:buffer->ToI420()];
}
}

View File

@ -41,7 +41,8 @@ void ObjCNetworkMonitor::Start() {
safety_flag_->SetAlive();
network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this];
if (network_monitor_ == nil) {
RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?";
RTC_LOG(LS_WARNING)
<< "Failed to create RTCNetworkMonitor; not available on this OS?";
}
started_ = true;
}
@ -57,8 +58,8 @@ void ObjCNetworkMonitor::Stop() {
started_ = false;
}
rtc::NetworkMonitorInterface::InterfaceInfo ObjCNetworkMonitor::GetInterfaceInfo(
absl::string_view interface_name) {
rtc::NetworkMonitorInterface::InterfaceInfo
ObjCNetworkMonitor::GetInterfaceInfo(absl::string_view interface_name) {
RTC_DCHECK_RUN_ON(thread_);
if (adapter_type_by_name_.empty()) {
// If we have no path update, assume everything's available, because it's
@ -83,7 +84,8 @@ rtc::NetworkMonitorInterface::InterfaceInfo ObjCNetworkMonitor::GetInterfaceInfo
}
void ObjCNetworkMonitor::OnPathUpdate(
std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp> adapter_type_by_name) {
std::map<std::string, rtc::AdapterType, rtc::AbslStringViewCmp>
adapter_type_by_name) {
thread_->PostTask(SafeTask(safety_flag_, [this, adapter_type_by_name] {
RTC_DCHECK_RUN_ON(thread_);
RTC_DCHECK(network_monitor_ != nil);

View File

@ -35,16 +35,19 @@ namespace {
class ObjCVideoDecoder : public VideoDecoder {
public:
ObjCVideoDecoder(id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder)
: decoder_(decoder), implementation_name_([decoder implementationName].stdString) {}
: decoder_(decoder),
implementation_name_([decoder implementationName].stdString) {}
bool Configure(const Settings &settings) override {
return
[decoder_ startDecodeWithNumberOfCores:settings.number_of_cores()] == WEBRTC_VIDEO_CODEC_OK;
return [decoder_ startDecodeWithNumberOfCores:settings.number_of_cores()] ==
WEBRTC_VIDEO_CODEC_OK;
}
int32_t Decode(const EncodedImage &input_image, int64_t render_time_ms = -1) override {
int32_t Decode(const EncodedImage &input_image,
int64_t render_time_ms = -1) override {
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:input_image];
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc]
initWithNativeEncodedImage:input_image];
return [decoder_ decode:encodedImage
missingFrames:false
@ -52,7 +55,8 @@ class ObjCVideoDecoder : public VideoDecoder {
renderTimeMs:render_time_ms];
}
int32_t RegisterDecodeCompleteCallback(DecodedImageCallback *callback) override {
int32_t RegisterDecodeCompleteCallback(
DecodedImageCallback *callback) override {
[decoder_ setCallback:^(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
const auto buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
VideoFrame videoFrame = VideoFrame::Builder()
@ -69,7 +73,9 @@ class ObjCVideoDecoder : public VideoDecoder {
int32_t Release() override { return [decoder_ releaseDecoder]; }
const char *ImplementationName() const override { return implementation_name_.c_str(); }
const char *ImplementationName() const override {
return implementation_name_.c_str();
}
private:
id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder_;
@ -83,19 +89,24 @@ ObjCVideoDecoderFactory::ObjCVideoDecoderFactory(
ObjCVideoDecoderFactory::~ObjCVideoDecoderFactory() {}
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)>
ObjCVideoDecoderFactory::wrapped_decoder_factory() const {
return decoder_factory_;
}
std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::Create(const Environment &env,
const SdpVideoFormat &format) {
std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::Create(
const Environment &env, const SdpVideoFormat &format) {
NSString *codecName = [NSString stringWithUTF8String:format.name.c_str()];
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * codecInfo in decoder_factory_.supportedCodecs) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) *
codecInfo in decoder_factory_.supportedCodecs) {
if ([codecName isEqualToString:codecInfo.name]) {
id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder = [decoder_factory_ createDecoder:codecInfo];
id<RTC_OBJC_TYPE(RTCVideoDecoder)> decoder =
[decoder_factory_ createDecoder:codecInfo];
if ([decoder conformsToProtocol:@protocol(RTC_OBJC_TYPE(RTCNativeVideoDecoderBuilder))]) {
return [((id<RTC_OBJC_TYPE(RTCNativeVideoDecoderBuilder)>)decoder) build:env];
if ([decoder conformsToProtocol:@protocol(RTC_OBJC_TYPE(
RTCNativeVideoDecoderBuilder))]) {
return [((
id<RTC_OBJC_TYPE(RTCNativeVideoDecoderBuilder)>)decoder) build:env];
} else {
return std::unique_ptr<ObjCVideoDecoder>(new ObjCVideoDecoder(decoder));
}
@ -105,9 +116,11 @@ std::unique_ptr<VideoDecoder> ObjCVideoDecoderFactory::Create(const Environment
return nullptr;
}
std::vector<SdpVideoFormat> ObjCVideoDecoderFactory::GetSupportedFormats() const {
std::vector<SdpVideoFormat> ObjCVideoDecoderFactory::GetSupportedFormats()
const {
std::vector<SdpVideoFormat> supported_formats;
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in decoder_factory_.supportedCodecs) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) *
supportedCodec in decoder_factory_.supportedCodecs) {
SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
supported_formats.push_back(format);
}

View File

@ -35,8 +35,9 @@ class ObjCVideoEncoderFactory : public VideoEncoderFactory {
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
std::vector<SdpVideoFormat> GetImplementations() const override;
CodecSupport QueryCodecSupport(const SdpVideoFormat& format,
std::optional<std::string> scalability_mode) const override;
CodecSupport QueryCodecSupport(
const SdpVideoFormat& format,
std::optional<std::string> scalability_mode) const override;
std::unique_ptr<VideoEncoder> Create(const Environment& env,
const SdpVideoFormat& format) override;
std::unique_ptr<EncoderSelectorInterface> GetEncoderSelector() const override;

View File

@ -40,29 +40,37 @@ namespace {
class ObjCVideoEncoder : public VideoEncoder {
public:
ObjCVideoEncoder(id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder)
: encoder_(encoder), implementation_name_([encoder implementationName].stdString) {}
: encoder_(encoder),
implementation_name_([encoder implementationName].stdString) {}
int32_t InitEncode(const VideoCodec *codec_settings, const Settings &encoder_settings) override {
int32_t InitEncode(const VideoCodec *codec_settings,
const Settings &encoder_settings) override {
RTC_OBJC_TYPE(RTCVideoEncoderSettings) *settings =
[[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc] initWithNativeVideoCodec:codec_settings];
[[RTC_OBJC_TYPE(RTCVideoEncoderSettings) alloc]
initWithNativeVideoCodec:codec_settings];
return [encoder_ startEncodeWithSettings:settings
numberOfCores:encoder_settings.number_of_cores];
}
int32_t RegisterEncodeCompleteCallback(EncodedImageCallback *callback) override {
int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback *callback) override {
if (callback) {
[encoder_ setCallback:^BOOL(RTC_OBJC_TYPE(RTCEncodedImage) * _Nonnull frame,
id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info) {
[encoder_ setCallback:^BOOL(
RTC_OBJC_TYPE(RTCEncodedImage) *_Nonnull frame,
id<RTC_OBJC_TYPE(RTCCodecSpecificInfo)> _Nonnull info) {
EncodedImage encodedImage = [frame nativeEncodedImage];
// Handle types that can be converted into one of CodecSpecificInfo's hard coded cases.
// Handle types that can be converted into one of CodecSpecificInfo's
// hard coded cases.
CodecSpecificInfo codecSpecificInfo;
if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) class]]) {
codecSpecificInfo =
[(RTC_OBJC_TYPE(RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
if ([info isKindOfClass:[RTC_OBJC_TYPE(RTCCodecSpecificInfoH264)
class]]) {
codecSpecificInfo = [(RTC_OBJC_TYPE(
RTCCodecSpecificInfoH264) *)info nativeCodecSpecificInfo];
}
EncodedImageCallback::Result res = callback->OnEncodedImage(encodedImage, &codecSpecificInfo);
EncodedImageCallback::Result res =
callback->OnEncodedImage(encodedImage, &codecSpecificInfo);
return res.error == EncodedImageCallback::Result::OK;
}];
} else {
@ -87,7 +95,8 @@ class ObjCVideoEncoder : public VideoEncoder {
void SetRates(const RateControlParameters &parameters) override {
const uint32_t bitrate = parameters.bitrate.get_sum_kbps();
const uint32_t framerate = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
const uint32_t framerate =
static_cast<uint32_t>(parameters.framerate_fps + 0.5);
[encoder_ setBitrate:bitrate framerate:framerate];
}
@ -95,12 +104,15 @@ class ObjCVideoEncoder : public VideoEncoder {
EncoderInfo info;
info.implementation_name = implementation_name_;
RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds = [encoder_ scalingSettings];
info.scaling_settings = qp_thresholds ? ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
ScalingSettings::kOff;
RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *qp_thresholds =
[encoder_ scalingSettings];
info.scaling_settings = qp_thresholds ?
ScalingSettings(qp_thresholds.low, qp_thresholds.high) :
ScalingSettings::kOff;
info.requested_resolution_alignment = encoder_.resolutionAlignment > 0 ?: 1;
info.apply_alignment_to_all_simulcast_layers = encoder_.applyAlignmentToAllSimulcastLayers;
info.apply_alignment_to_all_simulcast_layers =
encoder_.applyAlignmentToAllSimulcastLayers;
info.supports_native_handle = encoder_.supportsNativeHandle;
info.is_hardware_accelerated = true;
return info;
@ -111,35 +123,43 @@ class ObjCVideoEncoder : public VideoEncoder {
const std::string implementation_name_;
};
class ObjcVideoEncoderSelector : public VideoEncoderFactory::EncoderSelectorInterface {
class ObjcVideoEncoderSelector
: public VideoEncoderFactory::EncoderSelectorInterface {
public:
ObjcVideoEncoderSelector(id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector) {
ObjcVideoEncoderSelector(
id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector) {
selector_ = selector;
}
void OnCurrentEncoder(const SdpVideoFormat &format) override {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
alloc] initWithNativeSdpVideoFormat:format];
[selector_ registerCurrentEncoderInfo:info];
}
std::optional<SdpVideoFormat> OnEncoderBroken() override {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBrokenEncoder];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[selector_ encoderForBrokenEncoder];
if (info) {
return [info nativeSdpVideoFormat];
}
return std::nullopt;
}
std::optional<SdpVideoFormat> OnAvailableBitrate(const DataRate &rate) override {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_ encoderForBitrate:rate.kbps<NSInteger>()];
std::optional<SdpVideoFormat> OnAvailableBitrate(
const DataRate &rate) override {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[selector_ encoderForBitrate:rate.kbps<NSInteger>()];
if (info) {
return [info nativeSdpVideoFormat];
}
return std::nullopt;
}
std::optional<SdpVideoFormat> OnResolutionChange(const RenderResolution &resolution) override {
if ([selector_ respondsToSelector:@selector(encoderForResolutionChangeBySize:)]) {
std::optional<SdpVideoFormat> OnResolutionChange(
const RenderResolution &resolution) override {
if ([selector_
respondsToSelector:@selector(encoderForResolutionChangeBySize:)]) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [selector_
encoderForResolutionChangeBySize:CGSizeMake(resolution.Width(), resolution.Height())];
encoderForResolutionChangeBySize:CGSizeMake(resolution.Width(),
resolution.Height())];
if (info) {
return [info nativeSdpVideoFormat];
}
@ -159,13 +179,16 @@ ObjCVideoEncoderFactory::ObjCVideoEncoderFactory(
ObjCVideoEncoderFactory::~ObjCVideoEncoderFactory() {}
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)>
ObjCVideoEncoderFactory::wrapped_encoder_factory() const {
return encoder_factory_;
}
std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const {
std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats()
const {
std::vector<SdpVideoFormat> supported_formats;
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ supportedCodecs]) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) *
supportedCodec in [encoder_factory_ supportedCodecs]) {
SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
supported_formats.push_back(format);
}
@ -173,10 +196,12 @@ std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetSupportedFormats() const
return supported_formats;
}
std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations() const {
std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations()
const {
if ([encoder_factory_ respondsToSelector:@selector(implementations)]) {
std::vector<SdpVideoFormat> supported_formats;
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) * supportedCodec in [encoder_factory_ implementations]) {
for (RTC_OBJC_TYPE(RTCVideoCodecInfo) *
supportedCodec in [encoder_factory_ implementations]) {
SdpVideoFormat format = [supportedCodec nativeSdpVideoFormat];
supported_formats.push_back(format);
}
@ -186,31 +211,37 @@ std::vector<SdpVideoFormat> ObjCVideoEncoderFactory::GetImplementations() const
}
VideoEncoderFactory::CodecSupport ObjCVideoEncoderFactory::QueryCodecSupport(
const SdpVideoFormat &format, std::optional<std::string> scalability_mode) const {
if ([encoder_factory_ respondsToSelector:@selector(queryCodecSupport:scalabilityMode:)]) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
const SdpVideoFormat &format,
std::optional<std::string> scalability_mode) const {
if ([encoder_factory_ respondsToSelector:@selector(queryCodecSupport:
scalabilityMode:)]) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
alloc] initWithNativeSdpVideoFormat:format];
NSString *mode;
if (scalability_mode.has_value()) {
mode = [NSString stringForAbslStringView:*scalability_mode];
}
RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) *result = [encoder_factory_ queryCodecSupport:info
scalabilityMode:mode];
return {.is_supported = result.isSupported, .is_power_efficient = result.isPowerEfficient};
RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) *result =
[encoder_factory_ queryCodecSupport:info scalabilityMode:mode];
return {.is_supported = result.isSupported,
.is_power_efficient = result.isPowerEfficient};
}
// Use default implementation.
return VideoEncoderFactory::QueryCodecSupport(format, scalability_mode);
}
std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::Create(const Environment &env,
const SdpVideoFormat &format) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat:format];
id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder = [encoder_factory_ createEncoder:info];
if ([encoder conformsToProtocol:@protocol(RTC_OBJC_TYPE(RTCNativeVideoEncoderBuilder))]) {
return [((id<RTC_OBJC_TYPE(RTCNativeVideoEncoderBuilder)>)encoder) build:env];
std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::Create(
const Environment &env, const SdpVideoFormat &format) {
RTC_OBJC_TYPE(RTCVideoCodecInfo) *info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo)
alloc] initWithNativeSdpVideoFormat:format];
id<RTC_OBJC_TYPE(RTCVideoEncoder)> encoder =
[encoder_factory_ createEncoder:info];
if ([encoder conformsToProtocol:@protocol(RTC_OBJC_TYPE(
RTCNativeVideoEncoderBuilder))]) {
return
[((id<RTC_OBJC_TYPE(RTCNativeVideoEncoderBuilder)>)encoder) build:env];
} else {
return std::make_unique<ObjCVideoEncoder>(encoder);
}
@ -219,7 +250,8 @@ std::unique_ptr<VideoEncoder> ObjCVideoEncoderFactory::Create(const Environment
std::unique_ptr<VideoEncoderFactory::EncoderSelectorInterface>
ObjCVideoEncoderFactory::GetEncoderSelector() const {
if ([encoder_factory_ respondsToSelector:@selector(encoderSelector)]) {
id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector = [encoder_factory_ encoderSelector];
id<RTC_OBJC_TYPE(RTCVideoEncoderSelector)> selector =
[encoder_factory_ encoderSelector];
if (selector) {
return absl::make_unique<ObjcVideoEncoderSelector>(selector);
}

View File

@ -16,10 +16,11 @@
namespace webrtc {
RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) {
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
rotation:RTCVideoRotation(frame.rotation())
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc]
initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer())
rotation:RTCVideoRotation(frame.rotation())
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
videoFrame.timeStamp = frame.rtp_timestamp();
return videoFrame;

View File

@ -18,7 +18,8 @@
namespace webrtc {
ObjCVideoRenderer::ObjCVideoRenderer(id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer)
ObjCVideoRenderer::ObjCVideoRenderer(
id<RTC_OBJC_TYPE(RTCVideoRenderer)> renderer)
: renderer_(renderer), size_(CGSizeZero) {}
void ObjCVideoRenderer::OnFrame(const VideoFrame& nativeVideoFrame) {

View File

@ -19,7 +19,8 @@
RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame));
@interface RTCObjCVideoSourceAdapter : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
@interface RTCObjCVideoSourceAdapter
: NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
@end
namespace webrtc {

View File

@ -40,7 +40,8 @@ ObjCVideoTrackSource::ObjCVideoTrackSource(bool is_screencast)
: AdaptedVideoTrackSource(/* required resolution alignment */ 2),
is_screencast_(is_screencast) {}
ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) {
ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter)
: adapter_(adapter) {
adapter_.objCVideoTrackSource = this;
}
@ -60,12 +61,16 @@ bool ObjCVideoTrackSource::remote() const {
return false;
}
void ObjCVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
void ObjCVideoTrackSource::OnOutputFormatRequest(int width,
int height,
int fps) {
cricket::VideoFormat format(
width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format);
}
void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame) {
void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) *
frame) {
const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
const int64_t translated_timestamp_us =
timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
@ -92,24 +97,28 @@ void ObjCVideoTrackSource::OnCapturedFrame(RTC_OBJC_TYPE(RTCVideoFrame) * frame)
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
} else if ([frame.buffer
isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) {
// Adapted CVPixelBuffer frame.
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
(RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer;
buffer = rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x + rtcPixelBuffer.cropX
cropY:crop_y + rtcPixelBuffer.cropY]);
buffer =
rtc::make_ref_counted<ObjCFrameBuffer>([[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x + rtcPixelBuffer.cropX
cropY:crop_y + rtcPixelBuffer.cropY]);
} else {
// Adapted I420 frame.
// TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
rtc::scoped_refptr<I420Buffer> i420_buffer =
I420Buffer::Create(adapted_width, adapted_height);
buffer = rtc::make_ref_counted<ObjCFrameBuffer>(frame.buffer);
i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
i420_buffer->CropAndScaleFrom(
*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer;
}

View File

@ -28,7 +28,8 @@ typedef void (^VideoSinkCallback)(RTC_OBJC_TYPE(RTCVideoFrame) *);
namespace {
class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
class ObjCCallbackVideoSink
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
ObjCCallbackVideoSink(VideoSinkCallback callback) : callback_(callback) {}
@ -59,11 +60,15 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameAdaptsFrame {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
@ -72,7 +77,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
@ -86,16 +92,20 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
}
- (void)testOnCapturedFrameAdaptsFrameWithAlignment {
// Requesting to adapt 1280x720 to 912x514 gives 639x360 without alignment. The 639 causes issues
// with some hardware encoders (e.g. HEVC) so in this test we verify that the alignment is set and
// respected.
// Requesting to adapt 1280x720 to 912x514 gives 639x360 without alignment.
// The 639 causes issues with some hardware encoders (e.g. HEVC) so in this
// test we verify that the alignment is set and respected.
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
@ -104,7 +114,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
_video_source->OnOutputFormatRequest(912, 514, 30);
@ -118,14 +129,19 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
}
- (void)testOnCapturedFrameAdaptationResultsInCommonResolutions {
// Some of the most common resolutions used in the wild are 640x360, 480x270 and 320x180.
// Make sure that we properly scale down to exactly these resolutions.
// Some of the most common resolutions used in the wild are 640x360, 480x270
// and 320x180. Make sure that we properly scale down to exactly these
// resolutions.
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
@ -134,7 +150,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
cricket::FakeVideoRenderer *video_renderer = new cricket::FakeVideoRenderer();
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(video_renderer, video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
@ -163,32 +180,40 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameWithoutAdaptation {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 360, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
360,
640,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(frame.width, outputFrame.width);
XCTAssertEqual(frame.height, outputFrame.height);
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(
^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(frame.width, outputFrame.width);
XCTAssertEqual(frame.height, outputFrame.height);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(buffer.cropX, outputBuffer.cropX);
XCTAssertEqual(buffer.cropY, outputBuffer.cropY);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
@ -199,32 +224,40 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameCVPixelBufferNeedsAdaptation {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(
^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
@ -235,32 +268,40 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameCVPixelBufferNeedsCropping {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
380,
640,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(
^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 10);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 10);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
@ -271,41 +312,49 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFramePreAdaptedCVPixelBufferNeedsAdaptation {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
// Create a frame that's already adapted down.
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:640
adaptedHeight:360
cropWidth:720
cropHeight:1280
cropX:0
cropY:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:640
adaptedHeight:360
cropWidth:720
cropHeight:1280
cropX:0
cropY:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 480);
XCTAssertEqual(outputFrame.height, 270);
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(
^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 480);
XCTAssertEqual(outputFrame.height, 270);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 640);
XCTAssertEqual(outputBuffer.cropHeight, 360);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 0);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 640);
XCTAssertEqual(outputBuffer.cropHeight, 360);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(480, 270, 30);
_video_source->OnCapturedFrame(frame);
@ -316,40 +365,48 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFramePreCroppedCVPixelBufferNeedsCropping {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
380,
640,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:370
adaptedHeight:640
cropWidth:370
cropHeight:640
cropX:10
cropY:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:370
adaptedHeight:640
cropWidth:370
cropHeight:640
cropX:10
cropY:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(
^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 14);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 360);
XCTAssertEqual(outputBuffer.cropHeight, 640);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 14);
XCTAssertEqual(outputBuffer.cropY, 0);
XCTAssertEqual(outputBuffer.cropWidth, 360);
XCTAssertEqual(outputBuffer.cropHeight, 640);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
@ -360,40 +417,48 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
- (void)testOnCapturedFrameSmallerPreCroppedCVPixelBufferNeedsCropping {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 380, 640, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
380,
640,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:300
adaptedHeight:640
cropWidth:300
cropHeight:640
cropX:40
cropY:0];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:300
adaptedHeight:640
cropWidth:300
cropHeight:640
cropX:40
cropY:0];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:buffer
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 300);
XCTAssertEqual(outputFrame.height, 534);
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(
^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTAssertEqual(outputFrame.width, 300);
XCTAssertEqual(outputFrame.height, 534);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 40);
XCTAssertEqual(outputBuffer.cropY, 52);
XCTAssertEqual(outputBuffer.cropWidth, 300);
XCTAssertEqual(outputBuffer.cropHeight, 534);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *outputBuffer = outputFrame.buffer;
XCTAssertEqual(outputBuffer.cropX, 40);
XCTAssertEqual(outputBuffer.cropY, 52);
XCTAssertEqual(outputBuffer.cropWidth, 300);
XCTAssertEqual(outputBuffer.cropHeight, 534);
XCTAssertEqual(buffer.pixelBuffer, outputBuffer.pixelBuffer);
[callbackExpectation fulfill];
});
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
@ -403,7 +468,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
}
- (void)testOnCapturedFrameI420BufferNeedsAdaptation {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
CreateI420Gradient(720, 1280);
RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
[[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
@ -411,22 +477,28 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) *
outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer =
(RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
double psnr =
I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);
@ -435,7 +507,8 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
}
- (void)testOnCapturedFrameI420BufferNeedsCropping {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(380, 640);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
CreateI420Gradient(380, 640);
RTC_OBJC_TYPE(RTCI420Buffer) *buffer =
[[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
RTC_OBJC_TYPE(RTCVideoFrame) *frame =
@ -443,22 +516,28 @@ class ObjCCallbackVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame>
rotation:RTCVideoRotation_0
timeStampNs:0];
XCTestExpectation *callbackExpectation = [self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) * outputFrame) {
XCTestExpectation *callbackExpectation =
[self expectationWithDescription:@"videoSinkCallback"];
ObjCCallbackVideoSink callback_video_sink(^void(RTC_OBJC_TYPE(RTCVideoFrame) *
outputFrame) {
XCTAssertEqual(outputFrame.width, 360);
XCTAssertEqual(outputFrame.height, 640);
RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer = (RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
RTC_OBJC_TYPE(RTCI420Buffer) *outputBuffer =
(RTC_OBJC_TYPE(RTCI420Buffer) *)outputFrame.buffer;
double psnr = I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
double psnr =
I420PSNR(*[buffer nativeI420Buffer], *[outputBuffer nativeI420Buffer]);
XCTAssertGreaterThanOrEqual(psnr, 40);
[callbackExpectation fulfill];
});
const rtc::VideoSinkWants video_sink_wants;
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface = _video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink, video_sink_wants);
rtc::VideoSourceInterface<webrtc::VideoFrame> *video_source_interface =
_video_source.get();
video_source_interface->AddOrUpdateSink(&callback_video_sink,
video_sink_wants);
_video_source->OnOutputFormatRequest(640, 360, 30);
_video_source->OnCapturedFrame(frame);

View File

@ -18,30 +18,30 @@
#include "api/scoped_refptr.h"
typedef int32_t(^NeedMorePlayDataBlock)(const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms);
typedef int32_t (^NeedMorePlayDataBlock)(const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
const uint32_t samplesPerSec,
void *audioSamples,
size_t &nSamplesOut,
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms);
typedef int32_t(^RecordedDataIsAvailableBlock)(const void* audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t& newMicLevel);
typedef int32_t (^RecordedDataIsAvailableBlock)(const void *audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
const uint32_t samplesPerSec,
const uint32_t totalDelayMS,
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t &newMicLevel);
// This class implements the AudioTransport API and forwards all methods to the appropriate blocks.
// This class implements the AudioTransport API and forwards all methods to the
// appropriate blocks.
class MockAudioTransport : public webrtc::AudioTransport {
public:
public:
MockAudioTransport() {}
~MockAudioTransport() override {}
@ -57,10 +57,10 @@ public:
const size_t nBytesPerSample,
const size_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
size_t& nSamplesOut,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) override {
void *audioSamples,
size_t &nSamplesOut,
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) override {
return needMorePlayDataBlock(nSamples,
nBytesPerSample,
nChannels,
@ -71,7 +71,7 @@ public:
ntp_time_ms);
}
int32_t RecordedDataIsAvailable(const void* audioSamples,
int32_t RecordedDataIsAvailable(const void *audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
@ -80,7 +80,7 @@ public:
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t& newMicLevel) override {
uint32_t &newMicLevel) override {
return recordedDataIsAvailableBlock(audioSamples,
nSamples,
nBytesPerSample,
@ -97,9 +97,9 @@ public:
int sample_rate,
size_t number_of_channels,
size_t number_of_frames,
void* audio_data,
int64_t* elapsed_time_ms,
int64_t* ntp_time_ms) override {}
void *audio_data,
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) override {}
private:
NeedMorePlayDataBlock needMorePlayDataBlock;
@ -158,8 +158,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
audioDeviceModule = webrtc::CreateAudioDeviceModule();
XCTAssertEqual(0, audioDeviceModule->Init());
XCTAssertEqual(0, audioDeviceModule->GetPlayoutAudioParameters(&playoutParameters));
XCTAssertEqual(0, audioDeviceModule->GetRecordAudioParameters(&recordParameters));
XCTAssertEqual(
0, audioDeviceModule->GetPlayoutAudioParameters(&playoutParameters));
XCTAssertEqual(
0, audioDeviceModule->GetRecordAudioParameters(&recordParameters));
}
- (void)tearDown {
@ -181,7 +183,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertFalse(audioDeviceModule->Playing());
}
- (void)startRecording{
- (void)startRecording {
XCTAssertFalse(audioDeviceModule->Recording());
XCTAssertEqual(0, audioDeviceModule->InitRecording());
XCTAssertTrue(audioDeviceModule->RecordingIsInitialized());
@ -189,15 +191,18 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertTrue(audioDeviceModule->Recording());
}
- (void)stopRecording{
- (void)stopRecording {
XCTAssertEqual(0, audioDeviceModule->StopRecording());
XCTAssertFalse(audioDeviceModule->Recording());
}
- (NSURL*)fileURLForSampleRate:(int)sampleRate {
XCTAssertTrue(sampleRate == 48000 || sampleRate == 44100 || sampleRate == 16000);
NSString *filename = [NSString stringWithFormat:@"audio_short%d", sampleRate / 1000];
NSURL *url = [[NSBundle mainBundle] URLForResource:filename withExtension:@"pcm"];
- (NSURL *)fileURLForSampleRate:(int)sampleRate {
XCTAssertTrue(sampleRate == 48000 || sampleRate == 44100 ||
sampleRate == 16000);
NSString *filename =
[NSString stringWithFormat:@"audio_short%d", sampleRate / 1000];
NSURL *url = [[NSBundle mainBundle] URLForResource:filename
withExtension:@"pcm"];
XCTAssertNotNil(url);
return url;
@ -279,7 +284,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertNotEqual((void *)NULL, audioSamples);
return 0;
});
@ -299,7 +304,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
// Passing this test ensures that initialization of the second audio unit
// has been done successfully and that there is no conflict with the already
// playing first ADM.
XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
XCTestExpectation *playoutExpectation =
[self expectationWithDescription:@"NeedMorePlayoutData"];
__block int num_callbacks = 0;
MockAudioTransport mock2;
@ -316,7 +322,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertNotEqual((void *)NULL, audioSamples);
if (++num_callbacks == kNumCallbacks) {
[playoutExpectation fulfill];
}
@ -340,7 +346,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
// audio samples to play out using the NeedMorePlayData callback.
- (void)testStartPlayoutVerifyCallbacks {
XCTSkipIf(!_testEnabled);
XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
XCTestExpectation *playoutExpectation =
[self expectationWithDescription:@"NeedMorePlayoutData"];
__block int num_callbacks = 0;
mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
const size_t nBytesPerSample,
@ -355,7 +362,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertNotEqual((void *)NULL, audioSamples);
if (++num_callbacks == kNumCallbacks) {
[playoutExpectation fulfill];
}
@ -374,10 +381,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
- (void)testStartRecordingVerifyCallbacks {
XCTSkipIf(!_testEnabled);
XCTestExpectation *recordExpectation =
[self expectationWithDescription:@"RecordedDataIsAvailable"];
[self expectationWithDescription:@"RecordedDataIsAvailable"];
__block int num_callbacks = 0;
mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
mock.expectRecordedDataIsAvailable(^(const void *audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
@ -386,8 +393,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t& newMicLevel) {
XCTAssertNotEqual((void*)NULL, audioSamples);
uint32_t &newMicLevel) {
XCTAssertNotEqual((void *)NULL, audioSamples);
XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, self.recordParameters.channels());
@ -412,11 +419,12 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
// active in both directions.
- (void)testStartPlayoutAndRecordingVerifyCallbacks {
XCTSkipIf(!_testEnabled);
XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
XCTestExpectation *playoutExpectation =
[self expectationWithDescription:@"NeedMorePlayoutData"];
__block NSUInteger callbackCount = 0;
XCTestExpectation *recordExpectation =
[self expectationWithDescription:@"RecordedDataIsAvailable"];
[self expectationWithDescription:@"RecordedDataIsAvailable"];
recordExpectation.expectedFulfillmentCount = kNumCallbacks;
mock.expectNeedMorePlayData(^int32_t(const size_t nSamples,
@ -432,7 +440,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertNotEqual((void *)NULL, audioSamples);
if (callbackCount++ >= kNumCallbacks) {
[playoutExpectation fulfill];
}
@ -440,7 +448,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
return 0;
});
mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
mock.expectRecordedDataIsAvailable(^(const void *audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
@ -449,8 +457,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t& newMicLevel) {
XCTAssertNotEqual((void*)NULL, audioSamples);
uint32_t &newMicLevel) {
XCTAssertNotEqual((void *)NULL, audioSamples);
XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, self.recordParameters.channels());
@ -479,7 +487,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
XCTAssertEqual(1u, playoutParameters.channels());
// Using XCTestExpectation to count callbacks is very slow.
XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
XCTestExpectation *playoutExpectation =
[self expectationWithDescription:@"NeedMorePlayoutData"];
const int expectedCallbackCount = kFilePlayTimeInSec * kNumCallbacksPerSecond;
__block int callbackCount = 0;
@ -494,7 +503,8 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
size_t &nSamplesOut,
int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) {
[inputStream read:(uint8_t *)audioSamples maxLength:nSamples*nBytesPerSample*nChannels];
[inputStream read:(uint8_t *)audioSamples
maxLength:nSamples * nBytesPerSample * nChannels];
nSamplesOut = nSamples;
if (callbackCount++ == expectedCallbackCount) {
[playoutExpectation fulfill];
@ -533,11 +543,14 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
- (void)testRunPlayoutAndRecordingInFullDuplex {
XCTSkipIf(!_testEnabled);
XCTAssertEqual(recordParameters.channels(), playoutParameters.channels());
XCTAssertEqual(recordParameters.sample_rate(), playoutParameters.sample_rate());
XCTAssertEqual(recordParameters.sample_rate(),
playoutParameters.sample_rate());
XCTestExpectation *playoutExpectation = [self expectationWithDescription:@"NeedMorePlayoutData"];
XCTestExpectation *playoutExpectation =
[self expectationWithDescription:@"NeedMorePlayoutData"];
__block NSUInteger playoutCallbacks = 0;
NSUInteger expectedPlayoutCallbacks = kFullDuplexTimeInSec * kNumCallbacksPerSecond;
NSUInteger expectedPlayoutCallbacks =
kFullDuplexTimeInSec * kNumCallbacksPerSecond;
// FIFO queue and measurements
NSMutableArray *fifoBuffer = [NSMutableArray arrayWithCapacity:20];
@ -545,7 +558,7 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
__block NSUInteger fifoTotalWrittenElements = 0;
__block NSUInteger fifoWriteCount = 0;
mock.expectRecordedDataIsAvailable(^(const void* audioSamples,
mock.expectRecordedDataIsAvailable(^(const void *audioSamples,
const size_t nSamples,
const size_t nBytesPerSample,
const size_t nChannels,
@ -554,12 +567,14 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
uint32_t& newMicLevel) {
uint32_t &newMicLevel) {
if (fifoWriteCount++ < kNumIgnoreFirstCallbacks) {
return 0;
}
NSData *data = [NSData dataWithBytes:audioSamples length:nSamples*nBytesPerSample*nChannels];
NSData *data =
[NSData dataWithBytes:audioSamples
length:nSamples * nBytesPerSample * nChannels];
@synchronized(fifoBuffer) {
[fifoBuffer addObject:data];
fifoMaxSize = MAX(fifoMaxSize, fifoBuffer.count);
@ -587,9 +602,9 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
}
if (data) {
memcpy(audioSamples, (char*) data.bytes, data.length);
memcpy(audioSamples, (char *)data.bytes, data.length);
} else {
memset(audioSamples, 0, nSamples*nBytesPerSample*nChannels);
memset(audioSamples, 0, nSamples * nBytesPerSample * nChannels);
}
if (playoutCallbacks++ == expectedPlayoutCallbacks) {
@ -604,10 +619,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
NSTimeInterval waitTimeout = kFullDuplexTimeInSec * 2.0;
[self waitForExpectationsWithTimeout:waitTimeout handler:nil];
size_t fifoAverageSize =
(fifoTotalWrittenElements == 0)
? 0.0
: 0.5 + (double)fifoTotalWrittenElements / (fifoWriteCount - kNumIgnoreFirstCallbacks);
size_t fifoAverageSize = (fifoTotalWrittenElements == 0) ? 0.0 :
0.5 +
(double)fifoTotalWrittenElements /
(fifoWriteCount - kNumIgnoreFirstCallbacks);
[self stopPlayout];
[self stopRecording];

View File

@ -47,12 +47,15 @@
_audioDeviceModule = webrtc::CreateAudioDeviceModule();
_audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS(
/*bypass_voice_processing=*/false, /*muted_speech_event_handler=*/nullptr));
/*bypass_voice_processing=*/false,
/*muted_speech_event_handler=*/nullptr));
self.audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
NSError *error = nil;
[self.audioSession lockForConfiguration];
[self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error];
[self.audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:0
error:&error];
XCTAssertNil(error);
[self.audioSession setMode:AVAudioSessionModeVoiceChat error:&error];
@ -75,27 +78,33 @@
// Verifies that the AudioDeviceIOS is_interrupted_ flag is reset correctly
// after an iOS AVAudioSessionInterruptionTypeEnded notification event.
// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted notifications by:
// AudioDeviceIOS listens to RTC_OBJC_TYPE(RTCAudioSession) interrupted
// notifications by:
// - In AudioDeviceIOS.InitPlayOrRecord registers its audio_session_observer_
// callback with RTC_OBJC_TYPE(RTCAudioSession)'s delegate list.
// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted notification, it
// - When RTC_OBJC_TYPE(RTCAudioSession) receives an iOS audio interrupted
// notification, it
// passes the notification to callbacks in its delegate list which sets
// AudioDeviceIOS's is_interrupted_ flag to true.
// - When AudioDeviceIOS.ShutdownPlayOrRecord is called, its
// audio_session_observer_ callback is removed from RTCAudioSessions's
// delegate list.
// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption notification,
// AudioDeviceIOS is not notified as its callback is not in RTC_OBJC_TYPE(RTCAudioSession)'s
// delegate list. This causes AudioDeviceIOS's is_interrupted_ flag to be in
// the wrong (true) state and the audio session will ignore audio changes.
// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is to initialize
// AudioDeviceIOS's is_interrupted_ flag to RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted
// flag in AudioDeviceIOS.InitPlayOrRecord.
// So if RTC_OBJC_TYPE(RTCAudioSession) receives an iOS end audio interruption
// notification, AudioDeviceIOS is not notified as its callback is not in
// RTC_OBJC_TYPE(RTCAudioSession)'s delegate list. This causes
// AudioDeviceIOS's is_interrupted_ flag to be in the wrong (true) state and
// the audio session will ignore audio changes.
// As RTC_OBJC_TYPE(RTCAudioSession) keeps its own interrupted state, the fix is
// to initialize AudioDeviceIOS's is_interrupted_ flag to
// RTC_OBJC_TYPE(RTCAudioSession)'s isInterrupted flag in
// AudioDeviceIOS.InitPlayOrRecord.
- (void)testInterruptedAudioSession {
XCTSkipIf(!_testEnabled);
XCTAssertTrue(self.audioSession.isActive);
XCTAssertTrue([self.audioSession.category isEqual:AVAudioSessionCategoryPlayAndRecord] ||
[self.audioSession.category isEqual:AVAudioSessionCategoryPlayback]);
XCTAssertTrue(
[self.audioSession.category
isEqual:AVAudioSessionCategoryPlayAndRecord] ||
[self.audioSession.category isEqual:AVAudioSessionCategoryPlayback]);
XCTAssertEqual(AVAudioSessionModeVoiceChat, self.audioSession.mode);
std::unique_ptr<webrtc::TaskQueueFactory> task_queue_factory =
@ -103,7 +112,8 @@
std::unique_ptr<webrtc::AudioDeviceBuffer> audio_buffer;
audio_buffer.reset(new webrtc::AudioDeviceBuffer(task_queue_factory.get()));
_audio_device->AttachAudioBuffer(audio_buffer.get());
XCTAssertEqual(webrtc::AudioDeviceGeneric::InitStatus::OK, _audio_device->Init());
XCTAssertEqual(webrtc::AudioDeviceGeneric::InitStatus::OK,
_audio_device->Init());
XCTAssertEqual(0, _audio_device->InitPlayout());
XCTAssertEqual(0, _audio_device->StartPlayout());
@ -128,9 +138,11 @@
}
- (void)testMuteSpeechHandlerCalledWithStartedWhenSpeechActivityHasStarted {
XCTestExpectation *handlerExpectation = [self expectationWithDescription:@"mutedSpeechHandler"];
webrtc::AudioDeviceModule::MutedSpeechEventHandler muted_speech_event_handler =
^void(webrtc::AudioDeviceModule::MutedSpeechEvent event) {
XCTestExpectation *handlerExpectation =
[self expectationWithDescription:@"mutedSpeechHandler"];
webrtc::AudioDeviceModule::MutedSpeechEventHandler
muted_speech_event_handler = ^void(
webrtc::AudioDeviceModule::MutedSpeechEvent event) {
XCTAssertEqual(event, webrtc::AudioDeviceModule::kMutedSpeechStarted);
[handlerExpectation fulfill];
};
@ -139,22 +151,26 @@
/*bypass_voice_processing=*/false,
/*muted_speech_event_handler=*/muted_speech_event_handler));
_audio_device->OnReceivedMutedSpeechActivity(kAUVoiceIOSpeechActivityHasStarted);
_audio_device->OnReceivedMutedSpeechActivity(
kAUVoiceIOSpeechActivityHasStarted);
[self waitForExpectations:@[ handlerExpectation ] timeout:10.0];
}
- (void)testMuteSpeechHandlerCalledWithEndedWhenSpeechActivityHasEnded {
XCTestExpectation *handlerExpectation = [self expectationWithDescription:@"mutedSpeechHandler"];
webrtc::AudioDeviceModule::MutedSpeechEventHandler muted_speech_event_handler =
^void(webrtc::AudioDeviceModule::MutedSpeechEvent event) {
XCTAssertEqual(event, webrtc::AudioDeviceModule::kMutedSpeechEnded);
[handlerExpectation fulfill];
};
XCTestExpectation *handlerExpectation =
[self expectationWithDescription:@"mutedSpeechHandler"];
webrtc::AudioDeviceModule::MutedSpeechEventHandler
muted_speech_event_handler =
^void(webrtc::AudioDeviceModule::MutedSpeechEvent event) {
XCTAssertEqual(event, webrtc::AudioDeviceModule::kMutedSpeechEnded);
[handlerExpectation fulfill];
};
_audio_device.reset(new webrtc::ios_adm::AudioDeviceIOS(
/*bypass_voice_processing=*/false,
/*muted_speech_event_handler=*/muted_speech_event_handler));
_audio_device->OnReceivedMutedSpeechActivity(kAUVoiceIOSpeechActivityHasEnded);
_audio_device->OnReceivedMutedSpeechActivity(
kAUVoiceIOSpeechActivityHasEnded);
[self waitForExpectations:@[ handlerExpectation ] timeout:10.0];
}

View File

@ -25,8 +25,8 @@
@interface RTC_OBJC_TYPE (RTCAudioSession)
(UnitTesting)
@property(nonatomic,
readonly) std::vector<__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
@property(nonatomic, readonly) std::vector<
__weak id<RTC_OBJC_TYPE(RTCAudioSessionDelegate)> > delegates;
- (instancetype)initWithAudioSession:(id)audioSession;
@ -34,7 +34,7 @@
@interface MockAVAudioSession : NSObject
@property (nonatomic, readwrite, assign) float outputVolume;
@property(nonatomic, readwrite, assign) float outputVolume;
@end
@ -42,9 +42,10 @@
@synthesize outputVolume = _outputVolume;
@end
@interface RTCAudioSessionTestDelegate : NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@interface RTCAudioSessionTestDelegate
: NSObject <RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@property (nonatomic, readonly) float outputVolume;
@property(nonatomic, readonly) float outputVolume;
@end
@ -60,7 +61,8 @@
return self;
}
- (void)audioSessionDidBeginInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionDidBeginInterruption:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionDidEndInterruption:(RTC_OBJC_TYPE(RTCAudioSession) *)session
@ -69,10 +71,12 @@
- (void)audioSessionDidChangeRoute:(RTC_OBJC_TYPE(RTCAudioSession) *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
previousRoute:
(AVAudioSessionRouteDescription *)previousRoute {
}
- (void)audioSessionMediaServerTerminated:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionMediaServerTerminated:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionMediaServerReset:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
@ -81,7 +85,8 @@
- (void)audioSessionShouldConfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSessionShouldUnconfigure:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
- (void)audioSessionShouldUnconfigure:
(RTC_OBJC_TYPE(RTCAudioSession) *)session {
}
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
@ -101,14 +106,16 @@
- (instancetype)init {
self = [super init];
if (self) {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session addDelegate:self];
}
return self;
}
- (void)dealloc {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session removeDelegate:self];
}
@ -121,7 +128,8 @@
@implementation RTCAudioSessionTest
- (void)testAddAndRemoveDelegates {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 5;
for (size_t i = 0; i < count; ++i) {
@ -131,16 +139,16 @@
[delegates addObject:delegate];
EXPECT_EQ(i + 1, session.delegates.size());
}
[delegates enumerateObjectsUsingBlock:^(RTCAudioSessionTestDelegate *obj,
NSUInteger idx,
BOOL *stop) {
[delegates enumerateObjectsUsingBlock:^(
RTCAudioSessionTestDelegate *obj, NSUInteger idx, BOOL *stop) {
[session removeDelegate:obj];
}];
EXPECT_EQ(0u, session.delegates.size());
}
- (void)testPushDelegate {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 2;
for (size_t i = 0; i < count; ++i) {
@ -173,7 +181,8 @@
// Tests that delegates added to the audio session properly zero out. This is
// checking an implementation detail (that vectors of __weak work as expected).
- (void)testZeroingWeakDelegate {
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
@autoreleasepool {
// Add a delegate to the session. There should be one delegate at this
// point.
@ -201,12 +210,14 @@
[[RTCTestRemoveOnDeallocDelegate alloc] init];
EXPECT_TRUE(delegate);
}
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
EXPECT_EQ(0u, session.delegates.size());
}
- (void)testAudioSessionActivation {
RTC_OBJC_TYPE(RTCAudioSession) *audioSession = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *audioSession =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
EXPECT_EQ(0, audioSession.activationCount);
[audioSession audioSessionDidActivate:[AVAudioSession sharedInstance]];
EXPECT_EQ(1, audioSession.activationCount);
@ -218,23 +229,27 @@
- (void)DISABLED_testConfigureWebRTCSession {
NSError *error = nil;
void (^setActiveBlock)(NSInvocation *invocation) = ^(NSInvocation *invocation) {
__autoreleasing NSError **retError;
[invocation getArgument:&retError atIndex:4];
*retError = [NSError errorWithDomain:@"AVAudioSession"
code:AVAudioSessionErrorCodeCannotInterruptOthers
userInfo:nil];
BOOL failure = NO;
[invocation setReturnValue:&failure];
};
void (^setActiveBlock)(NSInvocation *invocation) =
^(NSInvocation *invocation) {
__autoreleasing NSError **retError;
[invocation getArgument:&retError atIndex:4];
*retError = [NSError
errorWithDomain:@"AVAudioSession"
code:AVAudioSessionErrorCodeCannotInterruptOthers
userInfo:nil];
BOOL failure = NO;
[invocation setReturnValue:&failure];
};
id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
OCMStub([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
withOptions:0
error:([OCMArg anyObjectRef])])
OCMStub([[mockAVAudioSession ignoringNonObjectArgs]
setActive:YES
withOptions:0
error:([OCMArg anyObjectRef])])
.andDo(setActiveBlock);
id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
id mockAudioSession =
OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
@ -242,9 +257,10 @@
[audioSession lockForConfiguration];
// configureWebRTCSession is forced to fail in the above mock interface,
// so activationCount should remain 0
OCMExpect([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
withOptions:0
error:([OCMArg anyObjectRef])])
OCMExpect([[mockAVAudioSession ignoringNonObjectArgs]
setActive:YES
withOptions:0
error:([OCMArg anyObjectRef])])
.andDo(setActiveBlock);
OCMExpect([mockAudioSession session]).andReturn(mockAVAudioSession);
EXPECT_FALSE([audioSession configureWebRTCSession:&error]);
@ -255,11 +271,17 @@
EXPECT_EQ(NO, [mockAVAudioSession setActive:YES withOptions:0 error:&error]);
[audioSession unlockForConfiguration];
// The -Wunused-value is a workaround for https://bugs.llvm.org/show_bug.cgi?id=45245
_Pragma("clang diagnostic push") _Pragma("clang diagnostic ignored \"-Wunused-value\"");
// The -Wunused-value is a workaround for
// https://bugs.llvm.org/show_bug.cgi?id=45245
_Pragma("clang diagnostic push")
_Pragma("clang diagnostic ignored \"-Wunused-value\"");
OCMVerify([mockAudioSession session]);
OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES withOptions:0 error:&error]);
OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO withOptions:0 error:&error]);
OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:YES
withOptions:0
error:&error]);
OCMVerify([[mockAVAudioSession ignoringNonObjectArgs] setActive:NO
withOptions:0
error:&error]);
_Pragma("clang diagnostic pop");
[mockAVAudioSession stopMocking];
@ -271,7 +293,8 @@
NSError *error = nil;
id mockAVAudioSession = OCMPartialMock([AVAudioSession sharedInstance]);
id mockAudioSession = OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
id mockAudioSession =
OCMPartialMock([RTC_OBJC_TYPE(RTCAudioSession) sharedInstance]);
OCMStub([mockAudioSession session]).andReturn(mockAVAudioSession);
RTC_OBJC_TYPE(RTCAudioSession) *audioSession = mockAudioSession;
@ -291,7 +314,9 @@
});
waitLock.Wait(timeout);
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error];
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:0
error:&error];
EXPECT_TRUE(error != nil);
EXPECT_EQ(error.domain, kRTCAudioSessionErrorDomain);
EXPECT_EQ(error.code, kRTCAudioSessionErrorLockRequired);
@ -304,8 +329,8 @@
- (void)testAudioVolumeDidNotify {
MockAVAudioSession *mockAVAudioSession = [[MockAVAudioSession alloc] init];
RTC_OBJC_TYPE(RTCAudioSession) *session =
[[RTC_OBJC_TYPE(RTCAudioSession) alloc] initWithAudioSession:mockAVAudioSession];
RTC_OBJC_TYPE(RTCAudioSession) *session = [[RTC_OBJC_TYPE(RTCAudioSession)
alloc] initWithAudioSession:mockAVAudioSession];
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];

View File

@ -34,27 +34,28 @@ struct ToI420WithCropAndScaleSetting {
int scaleHeight;
};
constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[] = {
ToI420WithCropAndScaleSetting{
.inputWidth = 640,
.inputHeight = 360,
.offsetX = 0,
.offsetY = 0,
.cropWidth = 640,
.cropHeight = 360,
.scaleWidth = 320,
.scaleHeight = 180,
},
ToI420WithCropAndScaleSetting{
.inputWidth = 640,
.inputHeight = 360,
.offsetX = 160,
.offsetY = 90,
.cropWidth = 160,
.cropHeight = 90,
.scaleWidth = 320,
.scaleHeight = 180,
},
constexpr const ToI420WithCropAndScaleSetting
kToI420WithCropAndScaleSettings[] = {
ToI420WithCropAndScaleSetting{
.inputWidth = 640,
.inputHeight = 360,
.offsetX = 0,
.offsetY = 0,
.cropWidth = 640,
.cropHeight = 360,
.scaleWidth = 320,
.scaleHeight = 180,
},
ToI420WithCropAndScaleSetting{
.inputWidth = 640,
.inputHeight = 360,
.offsetX = 160,
.offsetY = 90,
.cropWidth = 160,
.cropHeight = 90,
.scaleWidth = 320,
.scaleHeight = 180,
},
};
} // namespace
@ -67,10 +68,14 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testRequiresCroppingNoCrop {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertFalse([buffer requiresCropping]);
@ -79,16 +84,21 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testRequiresCroppingWithCrop {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *croppedBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
XCTAssertTrue([croppedBuffer requiresCropping]);
@ -97,11 +107,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testRequiresScalingNoScale {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertFalse([buffer requiresScalingToWidth:720 height:1280]);
CVBufferRelease(pixelBufferRef);
@ -109,11 +123,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testRequiresScalingWithScale {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertTrue([buffer requiresScalingToWidth:360 height:640]);
CVBufferRelease(pixelBufferRef);
@ -121,17 +139,21 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testRequiresScalingWithScaleAndMatchingCrop {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:720
adaptedHeight:1280
cropWidth:360
cropHeight:640
cropX:100
cropY:100];
XCTAssertFalse([buffer requiresScalingToWidth:360 height:640]);
CVBufferRelease(pixelBufferRef);
@ -139,23 +161,30 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testBufferSize_NV12 {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
720,
1280,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 576000);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640],
576000);
CVBufferRelease(pixelBufferRef);
}
- (void)testBufferSize_RGB {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
CVPixelBufferCreate(
NULL, 720, 1280, kCVPixelFormatType_32BGRA, NULL, &pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640], 0);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual([buffer bufferSizeForCroppingAndScalingToWidth:360 height:640],
0);
CVBufferRelease(pixelBufferRef);
}
@ -165,32 +194,47 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
}
- (void)testCropAndScaleNoOp_NV12 {
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(720, 1280)];
[self
cropAndScaleTestWithNV12InputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(720, 1280)];
}
- (void)testCropAndScale_NV12FullToVideo {
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
[self
cropAndScaleTestWithNV12InputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
outputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
}
- (void)testCropAndScaleZeroSizeFrame_NV12 {
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(0, 0)];
[self
cropAndScaleTestWithNV12InputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(0, 0)];
}
- (void)testCropAndScaleToSmallFormat_NV12 {
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(148, 320)];
[self
cropAndScaleTestWithNV12InputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(148, 320)];
}
- (void)testCropAndScaleToOddFormat_NV12 {
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(361, 640)];
[self
cropAndScaleTestWithNV12InputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputSize:CGSizeMake(361, 640)];
}
- (void)testCropAndScale_32BGRA {
@ -202,11 +246,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
}
- (void)testCropAndScaleWithSmallCropInfo_32ARGB {
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:2 cropY:3];
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB
cropX:2
cropY:3];
}
- (void)testCropAndScaleWithLargeCropInfo_32ARGB {
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB cropX:200 cropY:300];
[self cropAndScaleTestWithRGBPixelFormat:kCVPixelFormatType_32ARGB
cropX:200
cropY:300];
}
- (void)testToI420_NV12 {
@ -223,33 +271,41 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
- (void)testToI420WithCropAndScale_NV12 {
for (const auto &setting : kToI420WithCropAndScaleSettings) {
[self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
[self toI420WithCropAndScaleWithPixelFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
setting:setting];
}
}
- (void)testToI420WithCropAndScale_32BGRA {
for (const auto &setting : kToI420WithCropAndScaleSettings) {
[self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32BGRA setting:setting];
[self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32BGRA
setting:setting];
}
}
- (void)testToI420WithCropAndScale_32ARGB {
for (const auto &setting : kToI420WithCropAndScaleSettings) {
[self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32ARGB setting:setting];
[self toI420WithCropAndScaleWithPixelFormat:kCVPixelFormatType_32ARGB
setting:setting];
}
}
- (void)testScaleBufferTest {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, 1920, 1080, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
1920,
1080,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
NULL,
&pixelBufferRef);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(1920, 1080);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
CreateI420Gradient(1920, 1080);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual(buffer.width, 1920);
XCTAssertEqual(buffer.height, 1080);
@ -294,11 +350,15 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
#pragma mark - Shared test code
- (void)cropAndScaleTestWithNV12 {
[self cropAndScaleTestWithNV12InputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
[self
cropAndScaleTestWithNV12InputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
outputFormat:
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
}
- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat outputFormat:(OSType)outputFormat {
- (void)cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
outputFormat:(OSType)outputFormat {
[self cropAndScaleTestWithNV12InputFormat:(OSType)inputFormat
outputFormat:(OSType)outputFormat
outputSize:CGSizeMake(360, 640)];
@ -310,40 +370,49 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 720, 1280, inputFormat, NULL, &pixelBufferRef);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(720, 1280);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
CreateI420Gradient(720, 1280);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
XCTAssertEqual(buffer.width, 720);
XCTAssertEqual(buffer.height, 1280);
CVPixelBufferRef outputPixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, outputSize.width, outputSize.height, outputFormat, NULL, &outputPixelBufferRef);
CVPixelBufferCreate(NULL,
outputSize.width,
outputSize.height,
outputFormat,
NULL,
&outputPixelBufferRef);
std::vector<uint8_t> frameScaleBuffer;
if ([buffer requiresScalingToWidth:outputSize.width height:outputSize.height]) {
if ([buffer requiresScalingToWidth:outputSize.width
height:outputSize.height]) {
int size =
[buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width height:outputSize.height];
[buffer bufferSizeForCroppingAndScalingToWidth:outputSize.width
height:outputSize.height];
frameScaleBuffer.resize(size);
} else {
frameScaleBuffer.clear();
}
frameScaleBuffer.shrink_to_fit();
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:frameScaleBuffer.data()];
[buffer cropAndScaleTo:outputPixelBufferRef
withTempBuffer:frameScaleBuffer.data()];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:outputPixelBufferRef];
XCTAssertEqual(scaledBuffer.width, outputSize.width);
XCTAssertEqual(scaledBuffer.height, outputSize.height);
if (outputSize.width > 0 && outputSize.height > 0) {
RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
double psnr =
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer],
*[scaledBufferI420 nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
}
@ -354,20 +423,22 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
[self cropAndScaleTestWithRGBPixelFormat:pixelFormat cropX:0 cropY:0];
}
- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat cropX:(int)cropX cropY:(int)cropY {
- (void)cropAndScaleTestWithRGBPixelFormat:(OSType)pixelFormat
cropX:(int)cropX
cropY:(int)cropY {
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 720, 1280, pixelFormat, NULL, &pixelBufferRef);
DrawGradientInRGBPixelBuffer(pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:pixelBufferRef
adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
cropX:cropX
cropY:cropY];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef
adaptedWidth:CVPixelBufferGetWidth(pixelBufferRef)
adaptedHeight:CVPixelBufferGetHeight(pixelBufferRef)
cropWidth:CVPixelBufferGetWidth(pixelBufferRef) - cropX
cropHeight:CVPixelBufferGetHeight(pixelBufferRef) - cropY
cropX:cropX
cropY:cropY];
XCTAssertEqual(buffer.width, 720);
XCTAssertEqual(buffer.height, 1280);
@ -377,29 +448,31 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
[buffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:NULL];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *scaledBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:outputPixelBufferRef];
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc]
initWithPixelBuffer:outputPixelBufferRef];
XCTAssertEqual(scaledBuffer.width, 360);
XCTAssertEqual(scaledBuffer.height, 640);
RTC_OBJC_TYPE(RTCI420Buffer) *originalBufferI420 = [buffer toI420];
RTC_OBJC_TYPE(RTCI420Buffer) *scaledBufferI420 = [scaledBuffer toI420];
double psnr =
I420PSNR(*[originalBufferI420 nativeI420Buffer], *[scaledBufferI420 nativeI420Buffer]);
double psnr = I420PSNR(*[originalBufferI420 nativeI420Buffer],
*[scaledBufferI420 nativeI420Buffer]);
XCTAssertEqual(psnr, webrtc::kPerfectPSNR);
CVBufferRelease(pixelBufferRef);
}
- (void)toI420WithPixelFormat:(OSType)pixelFormat {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer = CreateI420Gradient(360, 640);
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
CreateI420Gradient(360, 640);
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(NULL, 360, 640, pixelFormat, NULL, &pixelBufferRef);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [buffer toI420];
double psnr = I420PSNR(*i420Buffer, *[fromCVPixelBuffer nativeI420Buffer]);
@ -413,28 +486,36 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
CVBufferRelease(pixelBufferRef);
}
- (void)toI420WithCropAndScaleWithPixelFormat:(OSType)pixelFormat
setting:(const ToI420WithCropAndScaleSetting &)setting {
- (void)
toI420WithCropAndScaleWithPixelFormat:(OSType)pixelFormat
setting:
(const ToI420WithCropAndScaleSetting &)
setting {
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer =
CreateI420Gradient(setting.inputWidth, setting.inputHeight);
CVPixelBufferRef pixelBufferRef = NULL;
CVPixelBufferCreate(
NULL, setting.inputWidth, setting.inputHeight, pixelFormat, NULL, &pixelBufferRef);
CVPixelBufferCreate(NULL,
setting.inputWidth,
setting.inputHeight,
pixelFormat,
NULL,
&pixelBufferRef);
CopyI420BufferToCVPixelBuffer(i420Buffer, pixelBufferRef);
RTC_OBJC_TYPE(RTCI420Buffer) *objcI420Buffer =
[[RTC_OBJC_TYPE(RTCI420Buffer) alloc] initWithFrameBuffer:i420Buffer];
RTC_OBJC_TYPE(RTCI420Buffer) *scaledObjcI420Buffer =
(RTC_OBJC_TYPE(RTCI420Buffer) *)[objcI420Buffer cropAndScaleWith:setting.offsetX
offsetY:setting.offsetY
cropWidth:setting.cropWidth
cropHeight:setting.cropHeight
scaleWidth:setting.scaleWidth
scaleHeight:setting.scaleHeight];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBufferRef];
(RTC_OBJC_TYPE(RTCI420Buffer) *)[objcI420Buffer
cropAndScaleWith:setting.offsetX
offsetY:setting.offsetY
cropWidth:setting.cropWidth
cropHeight:setting.cropHeight
scaleWidth:setting.scaleWidth
scaleHeight:setting.scaleHeight];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer)
alloc] initWithPixelBuffer:pixelBufferRef];
id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> scaledBuffer =
[buffer cropAndScaleWith:setting.offsetX
offsetY:setting.offsetY
@ -442,12 +523,13 @@ constexpr const ToI420WithCropAndScaleSetting kToI420WithCropAndScaleSettings[]
cropHeight:setting.cropHeight
scaleWidth:setting.scaleWidth
scaleHeight:setting.scaleHeight];
XCTAssertTrue([scaledBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
XCTAssertTrue(
[scaledBuffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]);
RTC_OBJC_TYPE(RTCI420Buffer) *fromCVPixelBuffer = [scaledBuffer toI420];
double psnr =
I420PSNR(*[scaledObjcI420Buffer nativeI420Buffer], *[fromCVPixelBuffer nativeI420Buffer]);
double psnr = I420PSNR(*[scaledObjcI420Buffer nativeI420Buffer],
*[fromCVPixelBuffer nativeI420Buffer]);
double target = webrtc::kPerfectPSNR;
if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
// libyuv's I420ToRGB functions seem to lose some quality.

View File

@ -32,23 +32,34 @@
#if TARGET_OS_IPHONE
// Helper method.
CMSampleBufferRef createTestSampleBufferRef() {
// This image is already in the testing bundle.
UIImage *image = [UIImage imageNamed:@"Default.png"];
CGSize size = image.size;
CGImageRef imageRef = [image CGImage];
CVPixelBufferRef pixelBuffer = nullptr;
CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil,
CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
nil,
&pixelBuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
// We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both.
CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width,
rgbColorSpace, kCGImageAlphaPremultipliedFirst);
// We don't care about bitsPerComponent and bytesPerRow so arbitrary value of
// 8 for both.
CGContextRef context = CGBitmapContextCreate(nil,
size.width,
size.height,
8,
8 * size.width,
rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(
context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef);
context,
CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)),
imageRef);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
@ -59,19 +70,24 @@ CMSampleBufferRef createTestSampleBufferRef() {
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description);
CMSampleBufferRef sampleBuffer = nullptr;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description,
&timing, &sampleBuffer);
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,
pixelBuffer,
YES,
NULL,
NULL,
description,
&timing,
&sampleBuffer);
CFRelease(pixelBuffer);
return sampleBuffer;
}
#endif
@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
(instancetype)initWithDelegate
: (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
: (AVCaptureSession *)captureSession;
: (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)
delegate captureSession : (AVCaptureSession *)captureSession;
@end
@interface RTCCameraVideoCapturerTests : XCTestCase
@ -88,10 +104,11 @@ CMSampleBufferRef createTestSampleBufferRef() {
@synthesize capturer = _capturer;
- (void)setUp {
self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
self.delegateMock =
OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
self.capturer =
[[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc]
initWithDelegate:self.delegateMock];
self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock];
}
@ -123,7 +140,8 @@ CMSampleBufferRef createTestSampleBufferRef() {
}
- (void)testSetupSessionOutput {
AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0];
AVCaptureVideoDataOutput *videoOutput =
self.capturer.captureSession.outputs[0];
XCTAssertEqual(videoOutput.alwaysDiscardsLateVideoFrames, NO);
XCTAssertEqual(videoOutput.sampleBufferDelegate, self.capturer);
}
@ -136,26 +154,35 @@ CMSampleBufferRef createTestSampleBufferRef() {
// We don't care about width and heigth so arbitrary 123 and 456 values.
int width = 123;
int height = 456;
CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height,
nil, &format);
CMVideoFormatDescriptionCreate(nil,
kCVPixelFormatType_420YpCbCr8PlanarFullRange,
width,
height,
nil,
&format);
OCMStub([validFormat1 formatDescription]).andReturn(format);
id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]);
CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width,
height, nil, &format);
CMVideoFormatDescriptionCreate(
nil,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
width,
height,
nil,
&format);
OCMStub([validFormat2 formatDescription]).andReturn(format);
id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]);
CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil,
&format);
CMVideoFormatDescriptionCreate(
nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil, &format);
OCMStub([invalidFormat formatDescription]).andReturn(format);
NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ];
OCMStub([self.deviceMock formats]).andReturn(formats);
// when
NSArray *supportedFormats =
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
NSArray *supportedFormats = [RTC_OBJC_TYPE(RTCCameraVideoCapturer)
supportedFormatsForDevice:self.deviceMock];
// then
XCTAssertEqual(supportedFormats.count, 3u);
@ -175,7 +202,8 @@ CMSampleBufferRef createTestSampleBufferRef() {
- (void)testDelegateCallbackNotCalledWhenInvalidBuffer {
// given
CMSampleBufferRef sampleBuffer = nullptr;
[[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]];
[[self.delegateMock reject] capturer:[OCMArg any]
didCaptureVideoFrame:[OCMArg any]];
// when
[self.capturer captureOutput:self.capturer.captureSession.outputs[0]
@ -189,20 +217,24 @@ CMSampleBufferRef createTestSampleBufferRef() {
- (void)testDelegateCallbackWithValidBufferAndOrientationUpdate {
#if TARGET_OS_IPHONE
XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
[UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) forKey:@"orientation"];
[UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown)
forKey:@"orientation"];
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
// then
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_270);
return YES;
}]];
[[self.delegateMock expect]
capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(
RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_270);
return YES;
}]];
// when
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
[center postNotificationName:UIDeviceOrientationDidChangeNotification
object:nil];
// We need to wait for the dispatch to finish.
WAIT(1000);
@ -216,18 +248,21 @@ CMSampleBufferRef createTestSampleBufferRef() {
#endif
}
// The XCTest framework considers functions that don't take arguments tests. This is a helper.
// The XCTest framework considers functions that don't take arguments tests.
// This is a helper.
- (void)testRotationCamera:(AVCaptureDevicePosition)camera
withOrientation:(UIDeviceOrientation)deviceOrientation {
#if TARGET_OS_IPHONE
// Mock the AVCaptureConnection as we will get the camera position from the connection's
// input ports.
AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
NSArray *inputPortsArrayMock = @[captureInputPort];
// Mock the AVCaptureConnection as we will get the camera position from the
// connection's input ports.
AVCaptureDeviceInput *inputPortMock =
OCMClassMock([AVCaptureDeviceInput class]);
AVCaptureInputPort *captureInputPort =
OCMClassMock([AVCaptureInputPort class]);
NSArray *inputPortsArrayMock = @[ captureInputPort ];
AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
andReturn(inputPortsArrayMock);
OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts)
.andReturn(inputPortsArrayMock);
OCMStub(captureInputPort.input).andReturn(inputPortMock);
OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
OCMStub(captureDeviceMock.position).andReturn(camera);
@ -237,27 +272,30 @@ CMSampleBufferRef createTestSampleBufferRef() {
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
if (camera == AVCaptureDevicePositionFront) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
}
} else if (camera == AVCaptureDevicePositionBack) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
}
}
return YES;
}]];
[[self.delegateMock expect]
capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(
RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
if (camera == AVCaptureDevicePositionFront) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
}
} else if (camera == AVCaptureDevicePositionBack) {
if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_180);
}
}
return YES;
}]];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
[center postNotificationName:UIDeviceOrientationDidChangeNotification
object:nil];
// We need to wait for the dispatch to finish.
WAIT(1000);
@ -293,42 +331,57 @@ CMSampleBufferRef createTestSampleBufferRef() {
}
- (void)setExif:(CMSampleBufferRef)sampleBuffer {
rtc::ScopedCFTypeRef<CFMutableDictionaryRef> exif(CFDictionaryCreateMutable(
kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
CFDictionarySetValue(exif.get(), CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2"));
CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif.get(), kCMAttachmentMode_ShouldPropagate);
rtc::ScopedCFTypeRef<CFMutableDictionaryRef> exif(
CFDictionaryCreateMutable(kCFAllocatorDefault,
0,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks));
CFDictionarySetValue(exif.get(),
CFSTR("LensModel"),
CFSTR("iPhone SE back camera 4.15mm f/2.2"));
CMSetAttachment(sampleBuffer,
CFSTR("{Exif}"),
exif.get(),
kCMAttachmentMode_ShouldPropagate);
}
- (void)testRotationFrame {
#if TARGET_OS_IPHONE
// Mock the AVCaptureConnection as we will get the camera position from the connection's
// input ports.
AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
NSArray *inputPortsArrayMock = @[captureInputPort];
// Mock the AVCaptureConnection as we will get the camera position from the
// connection's input ports.
AVCaptureDeviceInput *inputPortMock =
OCMClassMock([AVCaptureDeviceInput class]);
AVCaptureInputPort *captureInputPort =
OCMClassMock([AVCaptureInputPort class]);
NSArray *inputPortsArrayMock = @[ captureInputPort ];
AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
andReturn(inputPortsArrayMock);
OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts)
.andReturn(inputPortsArrayMock);
OCMStub(captureInputPort.input).andReturn(inputPortMock);
OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront);
XCTExpectFailure(@"Setting orientation on UIDevice is not supported");
[UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) forKey:@"orientation"];
[UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft)
forKey:@"orientation"];
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[[self.delegateMock expect] capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
// Front camera and landscape left should return 180. But the frame's exif
// we add below says its from the back camera, so rotation should be 0.
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
return YES;
}]];
[[self.delegateMock expect]
capturer:self.capturer
didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(
RTC_OBJC_TYPE(RTCVideoFrame) *
expectedFrame) {
// Front camera and landscape left should return 180. But the frame's
// exif we add below says its from the back camera, so rotation should
// be 0.
XCTAssertEqual(expectedFrame.rotation, RTCVideoRotation_0);
return YES;
}]];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
[center postNotificationName:UIDeviceOrientationDidChangeNotification
object:nil];
// We need to wait for the dispatch to finish.
WAIT(1000);
@ -349,8 +402,8 @@ CMSampleBufferRef createTestSampleBufferRef() {
CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
[self setExif:sampleBuffer];
AVCaptureDevicePosition cameraPosition = [AVCaptureSession
devicePositionForSampleBuffer:sampleBuffer];
AVCaptureDevicePosition cameraPosition =
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
XCTAssertEqual(cameraPosition, AVCaptureDevicePositionBack);
#endif
}
@ -378,10 +431,11 @@ CMSampleBufferRef createTestSampleBufferRef() {
OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
OCMStub([self.captureSessionMock beginConfiguration]);
OCMStub([self.captureSessionMock commitConfiguration]);
self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
self.capturer =
[[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
captureSession:self.captureSessionMock];
self.delegateMock =
OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
self.capturer = [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc]
initWithDelegate:self.delegateMock
captureSession:self.captureSessionMock];
self.deviceMock = [RTCCameraVideoCapturerTests createDeviceMock];
}
@ -399,16 +453,20 @@ CMSampleBufferRef createTestSampleBufferRef() {
- (void)testStartingAndStoppingCapture {
id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock
error:[OCMArg setTo:nil]])
.andReturn(expectedDeviceInputMock);
OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]])
.andReturn(YES);
OCMStub([self.deviceMock unlockForConfiguration]);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock])
.andReturn(YES);
OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
// Set expectation that the capture session should be started with correct device.
// Set expectation that the capture session should be started with correct
// device.
OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
OCMExpect([_captureSessionMock startRunning]);
OCMExpect([_captureSessionMock stopRunning]);
@ -422,9 +480,10 @@ CMSampleBufferRef createTestSampleBufferRef() {
}
- (void)testStartCaptureFailingToLockForConfiguration {
// The captureSessionMock is a strict mock, so this test will crash if the startCapture
// method does not return when failing to lock for configuration.
OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(NO);
// The captureSessionMock is a strict mock, so this test will crash if the
// startCapture method does not return when failing to lock for configuration.
OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]])
.andReturn(NO);
id format = OCMClassMock([AVCaptureDeviceFormat class]);
[self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
@ -436,16 +495,20 @@ CMSampleBufferRef createTestSampleBufferRef() {
- (void)testStartingAndStoppingCaptureWithCallbacks {
id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock
error:[OCMArg setTo:nil]])
.andReturn(expectedDeviceInputMock);
OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]])
.andReturn(YES);
OCMStub([self.deviceMock unlockForConfiguration]);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock])
.andReturn(YES);
OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
// Set expectation that the capture session should be started with correct device.
// Set expectation that the capture session should be started with correct
// device.
OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
OCMExpect([_captureSessionMock startRunning]);
OCMExpect([_captureSessionMock stopRunning]);
@ -468,8 +531,9 @@ CMSampleBufferRef createTestSampleBufferRef() {
dispatch_semaphore_signal(completedStopSemaphore);
}];
dispatch_semaphore_wait(completedStopSemaphore,
dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
dispatch_semaphore_wait(
completedStopSemaphore,
dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
OCMVerifyAllWithDelay(_captureSessionMock, 15);
XCTAssertTrue(completedStart);
XCTAssertTrue(completedStop);
@ -478,13 +542,16 @@ CMSampleBufferRef createTestSampleBufferRef() {
- (void)testStartCaptureFailingToLockForConfigurationWithCallback {
id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock
error:[OCMArg setTo:nil]])
.andReturn(expectedDeviceInputMock);
id errorMock = OCMClassMock([NSError class]);
OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]).andReturn(NO);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]])
.andReturn(NO);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock])
.andReturn(YES);
OCMStub([self.deviceMock unlockForConfiguration]);
OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
@ -501,22 +568,26 @@ CMSampleBufferRef createTestSampleBufferRef() {
dispatch_semaphore_signal(completedStartSemaphore);
}];
long ret = dispatch_semaphore_wait(completedStartSemaphore,
dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
long ret = dispatch_semaphore_wait(
completedStartSemaphore,
dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
XCTAssertEqual(ret, 0);
XCTAssertEqual(callbackError, errorMock);
}
// TODO(crbug.com/webrtc/14829): Test is disabled on iOS < 16 and broken on iOS 16.
// TODO(crbug.com/webrtc/14829): Test is disabled on iOS < 16 and broken on
// iOS 16.
- (void)DISABLED_testStartCaptureSetsOutputDimensionsInvalidPixelFormat {
id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([captureDeviceInputMock deviceInputWithDevice:_deviceMock error:[OCMArg setTo:nil]])
OCMStub([captureDeviceInputMock deviceInputWithDevice:_deviceMock
error:[OCMArg setTo:nil]])
.andReturn(expectedDeviceInputMock);
OCMStub([_deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
OCMStub([_deviceMock unlockForConfiguration]);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock])
.andReturn(YES);
OCMStub([_captureSessionMock addInput:expectedDeviceInputMock]);
OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
@ -529,12 +600,14 @@ CMSampleBufferRef createTestSampleBufferRef() {
int width = 110;
int height = 220;
FourCharCode pixelFormat = 0x18000000;
CMVideoFormatDescriptionCreate(nil, pixelFormat, width, height, nil, &formatDescription);
CMVideoFormatDescriptionCreate(
nil, pixelFormat, width, height, nil, &formatDescription);
OCMStub([deviceFormatMock formatDescription]).andReturn(formatDescription);
[_capturer startCaptureWithDevice:_deviceMock format:deviceFormatMock fps:30];
XCTestExpectation *expectation = [self expectationWithDescription:@"StopCompletion"];
XCTestExpectation *expectation =
[self expectationWithDescription:@"StopCompletion"];
[_capturer stopCaptureWithCompletionHandler:^(void) {
[expectation fulfill];
}];
@ -544,15 +617,22 @@ CMSampleBufferRef createTestSampleBufferRef() {
OCMVerify([_captureSessionMock
addOutput:[OCMArg checkWithBlock:^BOOL(AVCaptureVideoDataOutput *output) {
if (@available(iOS 16, *)) {
XCTAssertEqual(width, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
XCTAssertEqual(height, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
XCTAssertEqual(
width,
[output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
XCTAssertEqual(
height,
[output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
} else {
XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
XCTAssertEqual(0, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
XCTAssertEqual(
0, [output.videoSettings[(id)kCVPixelBufferWidthKey] intValue]);
XCTAssertEqual(
0, [output.videoSettings[(id)kCVPixelBufferHeightKey] intValue]);
}
XCTAssertEqual(
(FourCharCode)kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
[output.videoSettings[(id)kCVPixelBufferPixelFormatTypeKey] unsignedIntValue]);
[output.videoSettings[(id)kCVPixelBufferPixelFormatTypeKey]
unsignedIntValue]);
return YES;
}]]);
}

View File

@ -29,7 +29,8 @@
@implementation RTCCertificateTest
- (void)testCertificateIsUsedInConfig {
RTC_OBJC_TYPE(RTCConfiguration) *originalConfig = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *originalConfig =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
NSArray *urlStrings = @[ @"stun:stun1.example.net" ];
RTC_OBJC_TYPE(RTCIceServer) *server =
@ -37,33 +38,45 @@
originalConfig.iceServers = @[ server ];
// Generate a new certificate.
RTC_OBJC_TYPE(RTCCertificate) *originalCertificate = [RTC_OBJC_TYPE(RTCCertificate)
generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
RTC_OBJC_TYPE(RTCCertificate) *originalCertificate =
[RTC_OBJC_TYPE(RTCCertificate) generateCertificateWithParams:@{
@"expires" : @100000,
@"name" : @"RSASSA-PKCS1-v1_5"
}];
// Store certificate in configuration.
originalConfig.certificate = originalCertificate;
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
// Create PeerConnection with this certificate.
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithConfiguration:originalConfig constraints:contraints delegate:nil];
[factory peerConnectionWithConfiguration:originalConfig
constraints:contraints
delegate:nil];
// Retrieve certificate from the configuration.
RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig = peerConnection.configuration;
RTC_OBJC_TYPE(RTCConfiguration) *retrievedConfig =
peerConnection.configuration;
// Extract PEM strings from original certificate.
std::string originalPrivateKeyField = [[originalCertificate private_key] UTF8String];
std::string originalCertificateField = [[originalCertificate certificate] UTF8String];
std::string originalPrivateKeyField =
[[originalCertificate private_key] UTF8String];
std::string originalCertificateField =
[[originalCertificate certificate] UTF8String];
// Extract PEM strings from certificate retrieved from configuration.
RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate = retrievedConfig.certificate;
std::string retrievedPrivateKeyField = [[retrievedCertificate private_key] UTF8String];
std::string retrievedCertificateField = [[retrievedCertificate certificate] UTF8String];
RTC_OBJC_TYPE(RTCCertificate) *retrievedCertificate =
retrievedConfig.certificate;
std::string retrievedPrivateKeyField =
[[retrievedCertificate private_key] UTF8String];
std::string retrievedCertificateField =
[[retrievedCertificate certificate] UTF8String];
// Check that the original certificate and retrieved certificate match.
EXPECT_EQ(originalPrivateKeyField, retrievedPrivateKeyField);

View File

@ -30,7 +30,8 @@
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
@ -47,11 +48,11 @@
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.cryptoOptions =
[[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:YES
sframeRequireFrameEncryption:YES];
config.cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:YES
sframeRequireFrameEncryption:YES];
config.rtcpAudioReportIntervalMs = 2500;
config.rtcpVideoReportIntervalMs = 3750;
@ -81,9 +82,14 @@
nativeConfig->continual_gathering_policy);
EXPECT_EQ(true, nativeConfig->prune_turn_ports);
EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_gcm_crypto_suites);
EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_aes128_sha1_32_crypto_cipher);
EXPECT_EQ(true, nativeConfig->crypto_options->srtp.enable_encrypted_rtp_header_extensions);
EXPECT_EQ(true, nativeConfig->crypto_options->sframe.require_frame_encryption);
EXPECT_EQ(
true,
nativeConfig->crypto_options->srtp.enable_aes128_sha1_32_crypto_cipher);
EXPECT_EQ(true,
nativeConfig->crypto_options->srtp
.enable_encrypted_rtp_header_extensions);
EXPECT_EQ(true,
nativeConfig->crypto_options->sframe.require_frame_encryption);
EXPECT_EQ(2500, nativeConfig->audio_rtcp_report_interval_ms());
EXPECT_EQ(3750, nativeConfig->video_rtcp_report_interval_ms());
}
@ -93,7 +99,8 @@
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
config.bundlePolicy = RTCBundlePolicyMaxBundle;
@ -110,18 +117,18 @@
config.continualGatheringPolicy =
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.cryptoOptions =
[[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:NO
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
config.cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:NO
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
config.rtcpAudioReportIntervalMs = 1500;
config.rtcpVideoReportIntervalMs = 2150;
webrtc::PeerConnectionInterface::RTCConfiguration *nativeConfig =
[config createNativeConfiguration];
RTC_OBJC_TYPE(RTCConfiguration) *newConfig =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] initWithNativeConfiguration:*nativeConfig];
RTC_OBJC_TYPE(RTCConfiguration) *newConfig = [[RTC_OBJC_TYPE(RTCConfiguration)
alloc] initWithNativeConfiguration:*nativeConfig];
EXPECT_EQ([config.iceServers count], newConfig.iceServers.count);
RTC_OBJC_TYPE(RTCIceServer) *newServer = newConfig.iceServers[0];
RTC_OBJC_TYPE(RTCIceServer) *origServer = config.iceServers[0];
@ -135,12 +142,16 @@
EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
EXPECT_EQ(config.audioJitterBufferMaxPackets,
newConfig.audioJitterBufferMaxPackets);
EXPECT_EQ(config.audioJitterBufferFastAccelerate,
newConfig.audioJitterBufferFastAccelerate);
EXPECT_EQ(config.iceConnectionReceivingTimeout,
newConfig.iceConnectionReceivingTimeout);
EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
newConfig.iceBackupCandidatePairPingInterval);
EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
EXPECT_EQ(config.continualGatheringPolicy,
newConfig.continualGatheringPolicy);
EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites,
newConfig.cryptoOptions.srtpEnableGcmCryptoSuites);
@ -150,12 +161,15 @@
newConfig.cryptoOptions.srtpEnableEncryptedRtpHeaderExtensions);
EXPECT_EQ(config.cryptoOptions.sframeRequireFrameEncryption,
newConfig.cryptoOptions.sframeRequireFrameEncryption);
EXPECT_EQ(config.rtcpAudioReportIntervalMs, newConfig.rtcpAudioReportIntervalMs);
EXPECT_EQ(config.rtcpVideoReportIntervalMs, newConfig.rtcpVideoReportIntervalMs);
EXPECT_EQ(config.rtcpAudioReportIntervalMs,
newConfig.rtcpAudioReportIntervalMs);
EXPECT_EQ(config.rtcpVideoReportIntervalMs,
newConfig.rtcpVideoReportIntervalMs);
}
- (void)testDefaultValues {
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
EXPECT_EQ(config.cryptoOptions, nil);
}

View File

@ -23,19 +23,23 @@
encoded_image.SetEncodedData(encoded_data);
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc]
initWithNativeEncodedImage:encoded_image];
XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(), encoded_data);
XCTAssertEqual([encodedImage nativeEncodedImage].GetEncodedData(),
encoded_data);
}
- (void)testInitWithNSData {
NSData *bufferData = [NSData data];
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
RTC_OBJC_TYPE(RTCEncodedImage) *encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] init];
encodedImage.buffer = bufferData;
webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);
XCTAssertEqual(result_encoded_image.GetEncodedData()->data(), bufferData.bytes);
XCTAssertEqual(result_encoded_image.GetEncodedData()->data(),
bufferData.bytes);
}
- (void)testRetainsNativeEncodedImage {
@ -44,8 +48,8 @@
const auto encoded_data = webrtc::EncodedImageBuffer::Create(1);
webrtc::EncodedImage encoded_image;
encoded_image.SetEncodedData(encoded_data);
encodedImage =
[[RTC_OBJC_TYPE(RTCEncodedImage) alloc] initWithNativeEncodedImage:encoded_image];
encodedImage = [[RTC_OBJC_TYPE(RTCEncodedImage) alloc]
initWithNativeEncodedImage:encoded_image];
}
webrtc::EncodedImage result_encoded_image = [encodedImage nativeEncodedImage];
XCTAssertTrue(result_encoded_image.GetEncodedData() != nullptr);

View File

@ -17,7 +17,8 @@
NSString *const kTestFileName = @"foreman.mp4";
static const int kTestTimeoutMs = 5 * 1000; // 5secs.
@interface MockCapturerDelegate : NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
@interface MockCapturerDelegate
: NSObject <RTC_OBJC_TYPE (RTCVideoCapturerDelegate)>
@property(nonatomic, assign) NSInteger capturedFramesCount;
@ -47,7 +48,8 @@ NS_CLASS_AVAILABLE_IOS(10)
- (void)setUp {
self.mockDelegate = [[MockCapturerDelegate alloc] init];
self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:self.mockDelegate];
self.capturer = [[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc]
initWithDelegate:self.mockDelegate];
}
- (void)tearDown {
@ -62,7 +64,8 @@ NS_CLASS_AVAILABLE_IOS(10)
errorOccured = YES;
};
[self.capturer startCapturingFromFileNamed:@"not_in_bundle.mov" onError:errorBlock];
[self.capturer startCapturingFromFileNamed:@"not_in_bundle.mov"
onError:errorBlock];
ASSERT_TRUE_WAIT(errorOccured, kTestTimeoutMs);
}
@ -79,8 +82,10 @@ NS_CLASS_AVAILABLE_IOS(10)
secondError = YES;
};
[self.capturer startCapturingFromFileNamed:kTestFileName onError:firstErrorBlock];
[self.capturer startCapturingFromFileNamed:kTestFileName onError:secondErrorBlock];
[self.capturer startCapturingFromFileNamed:kTestFileName
onError:firstErrorBlock];
[self.capturer startCapturingFromFileNamed:kTestFileName
onError:secondErrorBlock];
ASSERT_TRUE_WAIT(secondError, kTestTimeoutMs);
}
@ -96,12 +101,16 @@ NS_CLASS_AVAILABLE_IOS(10)
// We're dispatching the `stopCapture` with delay to ensure the capturer has
// had the chance to capture several frames.
dispatch_time_t captureDelay = dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC); // 2secs.
dispatch_after(captureDelay, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
capturedFrames = self.mockDelegate.capturedFramesCount;
[self.capturer stopCapture];
done = YES;
});
dispatch_time_t captureDelay =
dispatch_time(DISPATCH_TIME_NOW, 2 * NSEC_PER_SEC); // 2secs.
dispatch_after(
captureDelay,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
^{
capturedFrames = self.mockDelegate.capturedFramesCount;
[self.capturer stopCapture];
done = YES;
});
WAIT(done, kTestTimeoutMs);
capturedFramesAfterStop = self.mockDelegate.capturedFramesCount;

View File

@ -30,7 +30,9 @@
"59052 typ host generation 0";
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp sdpMLineIndex:0 sdpMid:@"audio"];
[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp
sdpMLineIndex:0
sdpMid:@"audio"];
std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate =
candidate.nativeCandidate;
@ -50,7 +52,8 @@
webrtc::CreateIceCandidate("audio", 0, sdp, nullptr));
RTC_OBJC_TYPE(RTCIceCandidate) *iceCandidate =
[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithNativeCandidate:nativeCandidate.get()];
[[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
initWithNativeCandidate:nativeCandidate.get()];
EXPECT_NE(nativeCandidate.get(), iceCandidate.nativeCandidate.get());
EXPECT_TRUE([@"audio" isEqualToString:iceCandidate.sdpMid]);
EXPECT_EQ(0, iceCandidate.sdpMLineIndex);

View File

@ -25,8 +25,8 @@
@implementation RTCIceServerTest
- (void)testOneURLServer {
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"stun:stun1.example.net" ]];
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"stun:stun1.example.net" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
@ -36,8 +36,11 @@
}
- (void)testTwoURLServer {
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net", @"turn2:turn2.example.net" ]];
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[
@"turn1:turn1.example.net",
@"turn2:turn2.example.net"
]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(2u, iceStruct.urls.size());
@ -48,10 +51,10 @@
}
- (void)testPasswordCredential {
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"];
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -60,12 +63,12 @@
}
- (void)testHostname {
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"];
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -75,13 +78,13 @@
}
- (void)testTlsAlpnProtocols {
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -92,14 +95,14 @@
}
- (void)testTlsEllipticCurves {
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
RTC_OBJC_TYPE(RTCIceServer) *server = [[RTC_OBJC_TYPE(RTCIceServer) alloc]
initWithURLStrings:@[ @"turn1:turn1.example.net" ]
username:@"username"
credential:@"credential"
tlsCertPolicy:RTCTlsCertPolicySecure
hostname:@"hostname"
tlsAlpnProtocols:@[ @"proto1", @"proto2" ]
tlsEllipticCurves:@[ @"curve1", @"curve2" ]];
webrtc::PeerConnectionInterface::IceServer iceStruct = server.nativeServer;
EXPECT_EQ(1u, iceStruct.urls.size());
EXPECT_EQ("turn1:turn1.example.net", iceStruct.urls.front());
@ -125,7 +128,7 @@
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithNativeServer:nativeServer];
EXPECT_EQ(1u, iceServer.urlStrings.count);
EXPECT_EQ("stun:stun.example.net",
[NSString stdStringForString:iceServer.urlStrings.firstObject]);
[NSString stdStringForString:iceServer.urlStrings.firstObject]);
EXPECT_EQ("username", [NSString stdStringForString:iceServer.username]);
EXPECT_EQ("password", [NSString stdStringForString:iceServer.credential]);
EXPECT_EQ("hostname", [NSString stdStringForString:iceServer.hostname]);

View File

@ -25,24 +25,28 @@
@implementation RTCMediaConstraintsTests
- (void)testMediaConstraints {
NSDictionary *mandatory = @{@"key1": @"value1", @"key2": @"value2"};
NSDictionary *optional = @{@"key3": @"value3", @"key4": @"value4"};
NSDictionary *mandatory = @{@"key1" : @"value1", @"key2" : @"value2"};
NSDictionary *optional = @{@"key3" : @"value3", @"key4" : @"value4"};
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatory
optionalConstraints:optional];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:mandatory
optionalConstraints:optional];
std::unique_ptr<webrtc::MediaConstraints> nativeConstraints =
[constraints nativeConstraints];
webrtc::MediaConstraints::Constraints nativeMandatory = nativeConstraints->GetMandatory();
webrtc::MediaConstraints::Constraints nativeMandatory =
nativeConstraints->GetMandatory();
[self expectConstraints:mandatory inNativeConstraints:nativeMandatory];
webrtc::MediaConstraints::Constraints nativeOptional = nativeConstraints->GetOptional();
webrtc::MediaConstraints::Constraints nativeOptional =
nativeConstraints->GetOptional();
[self expectConstraints:optional inNativeConstraints:nativeOptional];
}
- (void)expectConstraints:(NSDictionary *)constraints
inNativeConstraints:(webrtc::MediaConstraints::Constraints)nativeConstraints {
inNativeConstraints:
(webrtc::MediaConstraints::Constraints)nativeConstraints {
EXPECT_EQ(constraints.count, nativeConstraints.size());
for (NSString *key in constraints) {

View File

@ -37,11 +37,14 @@ extern "C" {
@implementation RTCPeerConnectionFactoryBuilderTests
- (void)testBuilder {
id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
id factoryMock =
OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
OCMExpect([factoryMock alloc]).andReturn(factoryMock);
RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs]
initWithMediaAndDependencies:webrtc::PeerConnectionFactoryDependencies()]);
RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock]
ignoringNonObjectArgs] initWithMediaAndDependencies:
webrtc::PeerConnectionFactoryDependencies()]);
RTCPeerConnectionFactoryBuilder* builder =
[[RTCPeerConnectionFactoryBuilder alloc] init];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
[builder createPeerConnectionFactory];
EXPECT_TRUE(peerConnectionFactory != nil);
@ -49,11 +52,14 @@ extern "C" {
}
- (void)testDefaultComponentsBuilder {
id factoryMock = OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
id factoryMock =
OCMStrictClassMock([RTC_OBJC_TYPE(RTCPeerConnectionFactory) class]);
OCMExpect([factoryMock alloc]).andReturn(factoryMock);
RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock] ignoringNonObjectArgs]
initWithMediaAndDependencies:webrtc::PeerConnectionFactoryDependencies()]);
RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder];
RTC_UNUSED([[[[factoryMock expect] andReturn:factoryMock]
ignoringNonObjectArgs] initWithMediaAndDependencies:
webrtc::PeerConnectionFactoryDependencies()]);
RTCPeerConnectionFactoryBuilder* builder =
[RTCPeerConnectionFactoryBuilder defaultBuilder];
RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory =
[builder createPeerConnectionFactory];
EXPECT_TRUE(peerConnectionFactory != nil);

View File

@ -38,7 +38,8 @@
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
config.iceServers = @[ server ];
config.iceTransportPolicy = RTCIceTransportPolicyRelay;
@ -57,27 +58,31 @@
RTCContinualGatheringPolicyGatherContinually;
config.shouldPruneTurnPorts = YES;
config.activeResetSrtpParams = YES;
config.cryptoOptions =
[[RTC_OBJC_TYPE(RTCCryptoOptions) alloc] initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
config.cryptoOptions = [[RTC_OBJC_TYPE(RTCCryptoOptions) alloc]
initWithSrtpEnableGcmCryptoSuites:YES
srtpEnableAes128Sha1_32CryptoCipher:YES
srtpEnableEncryptedRtpHeaderExtensions:NO
sframeRequireFrameEncryption:NO];
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) * newConfig;
@autoreleasepool {
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
[factory peerConnectionWithConfiguration:config
constraints:contraints
delegate:nil];
newConfig = peerConnection.configuration;
EXPECT_TRUE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:100000]
currentBitrateBps:[NSNumber numberWithInt:5000000]
maxBitrateBps:[NSNumber numberWithInt:500000000]]);
EXPECT_TRUE([peerConnection
setBweMinBitrateBps:[NSNumber numberWithInt:100000]
currentBitrateBps:[NSNumber numberWithInt:5000000]
maxBitrateBps:[NSNumber numberWithInt:500000000]]);
EXPECT_FALSE([peerConnection setBweMinBitrateBps:[NSNumber numberWithInt:2]
currentBitrateBps:[NSNumber numberWithInt:1]
maxBitrateBps:nil]);
@ -95,12 +100,16 @@
EXPECT_EQ(config.rtcpMuxPolicy, newConfig.rtcpMuxPolicy);
EXPECT_EQ(config.tcpCandidatePolicy, newConfig.tcpCandidatePolicy);
EXPECT_EQ(config.candidateNetworkPolicy, newConfig.candidateNetworkPolicy);
EXPECT_EQ(config.audioJitterBufferMaxPackets, newConfig.audioJitterBufferMaxPackets);
EXPECT_EQ(config.audioJitterBufferFastAccelerate, newConfig.audioJitterBufferFastAccelerate);
EXPECT_EQ(config.iceConnectionReceivingTimeout, newConfig.iceConnectionReceivingTimeout);
EXPECT_EQ(config.audioJitterBufferMaxPackets,
newConfig.audioJitterBufferMaxPackets);
EXPECT_EQ(config.audioJitterBufferFastAccelerate,
newConfig.audioJitterBufferFastAccelerate);
EXPECT_EQ(config.iceConnectionReceivingTimeout,
newConfig.iceConnectionReceivingTimeout);
EXPECT_EQ(config.iceBackupCandidatePairPingInterval,
newConfig.iceBackupCandidatePairPingInterval);
EXPECT_EQ(config.continualGatheringPolicy, newConfig.continualGatheringPolicy);
EXPECT_EQ(config.continualGatheringPolicy,
newConfig.continualGatheringPolicy);
EXPECT_EQ(config.shouldPruneTurnPorts, newConfig.shouldPruneTurnPorts);
EXPECT_EQ(config.activeResetSrtpParams, newConfig.activeResetSrtpParams);
EXPECT_EQ(config.cryptoOptions.srtpEnableGcmCryptoSuites,
@ -118,12 +127,14 @@
RTC_OBJC_TYPE(RTCIceServer) *server =
[[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:urlStrings];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
config.iceServers = @[ server ];
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
@ -143,18 +154,22 @@
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
[factory peerConnectionWithConfiguration:config
constraints:contraints
delegate:nil];
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
[peerConnection setRemoteDescription:[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
initWithType:RTCSdpTypeOffer
sdp:@"invalid"]
[peerConnection setRemoteDescription:[[RTC_OBJC_TYPE(RTCSessionDescription)
alloc] initWithType:RTCSdpTypeOffer
sdp:@"invalid"]
completionHandler:^(NSError *error) {
ASSERT_NE(error, nil);
if (error != nil) {
@ -165,8 +180,9 @@
NSTimeInterval timeout = 5;
ASSERT_EQ(
0,
dispatch_semaphore_wait(negotiatedSem,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
dispatch_semaphore_wait(
negotiatedSem,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
[peerConnection close];
}
@ -174,18 +190,23 @@
RTC_OBJC_TYPE(RTCPeerConnectionFactory) *factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCConfiguration) *config =
[[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
RTC_OBJC_TYPE(RTCMediaConstraints) *contraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{}
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc]
initWithMandatoryConstraints:@{}
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCPeerConnection) *peerConnection =
[factory peerConnectionWithConfiguration:config constraints:contraints delegate:nil];
[factory peerConnectionWithConfiguration:config
constraints:contraints
delegate:nil];
dispatch_semaphore_t negotiatedSem = dispatch_semaphore_create(0);
[peerConnection addIceCandidate:[[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:@"invalid"
sdpMLineIndex:-1
sdpMid:nil]
[peerConnection addIceCandidate:[[RTC_OBJC_TYPE(RTCIceCandidate) alloc]
initWithSdp:@"invalid"
sdpMLineIndex:-1
sdpMid:nil]
completionHandler:^(NSError *error) {
ASSERT_NE(error, nil);
if (error != nil) {
@ -196,8 +217,9 @@
NSTimeInterval timeout = 5;
ASSERT_EQ(
0,
dispatch_semaphore_wait(negotiatedSem,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
dispatch_semaphore_wait(
negotiatedSem,
dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeout * NSEC_PER_SEC))));
[peerConnection close];
}

View File

@ -24,18 +24,22 @@
@implementation RTCSessionDescriptionTests
/**
* Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a native
* SessionDescriptionInterface (based on the types and SDP strings being equal).
* Test conversion of an Objective-C RTC_OBJC_TYPE(RTCSessionDescription) to a
* native SessionDescriptionInterface (based on the types and SDP strings being
* equal).
*/
- (void)testSessionDescriptionConversion {
RTC_OBJC_TYPE(RTCSessionDescription) *description =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:[self sdp]];
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
initWithType:RTCSdpTypeAnswer
sdp:[self sdp]];
std::unique_ptr<webrtc::SessionDescriptionInterface> nativeDescription =
description.nativeDescription;
EXPECT_EQ(RTCSdpTypeAnswer,
[RTC_OBJC_TYPE(RTCSessionDescription) typeForStdString:nativeDescription->type()]);
[RTC_OBJC_TYPE(RTCSessionDescription)
typeForStdString:nativeDescription->type()]);
std::string sdp;
nativeDescription->ToString(&sdp);
@ -43,13 +47,15 @@
}
- (void)testInitFromNativeSessionDescription {
const auto nativeDescription =
webrtc::CreateSessionDescription(webrtc::SdpType::kAnswer, [self sdp].stdString, nullptr);
const auto nativeDescription = webrtc::CreateSessionDescription(
webrtc::SdpType::kAnswer, [self sdp].stdString, nullptr);
RTC_OBJC_TYPE(RTCSessionDescription) *description = [[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
initWithNativeDescription:nativeDescription.get()];
EXPECT_EQ(webrtc::SessionDescriptionInterface::kAnswer,
[RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]);
RTC_OBJC_TYPE(RTCSessionDescription) *description =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc]
initWithNativeDescription:nativeDescription.get()];
EXPECT_EQ(
webrtc::SessionDescriptionInterface::kAnswer,
[RTC_OBJC_TYPE(RTCSessionDescription) stdStringForType:description.type]);
EXPECT_TRUE([[self sdp] isEqualToString:description.sdp]);
}

View File

@ -25,10 +25,11 @@
- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
NSParameterAssert(fileName.length);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSArray *paths = NSSearchPathForDirectoriesInDomains(
NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirPath = paths.firstObject;
NSString *filePath =
[documentsDirPath stringByAppendingPathComponent:fileName];
[documentsDirPath stringByAppendingPathComponent:fileName];
return filePath;
}

View File

@ -33,8 +33,8 @@ static cricket::VideoFormat expectedFormat =
// CMVideoDescriptionRef mocking.
@interface AVCaptureDeviceFormatMock : NSObject
@property (nonatomic, assign) CMVideoFormatDescriptionRef format;
@property (nonatomic, strong) OCMockObject *rangeMock;
@property(nonatomic, assign) CMVideoFormatDescriptionRef format;
@property(nonatomic, strong) OCMockObject* rangeMock;
- (instancetype)initWithMediaSubtype:(FourCharCode)subtype
minFps:(float)minFps
@ -55,8 +55,8 @@ static cricket::VideoFormat expectedFormat =
maxFps:(float)maxFps {
self = [super init];
if (self) {
CMVideoFormatDescriptionCreate(nil, subtype, kFormatWidth, kFormatHeight,
nil, &_format);
CMVideoFormatDescriptionCreate(
nil, subtype, kFormatWidth, kFormatHeight, nil, &_format);
// We can use OCMock for the range.
_rangeMock = [OCMockObject mockForClass:[AVFrameRateRange class]];
[[[_rangeMock stub] andReturnValue:@(minFps)] minFrameRate];
@ -67,7 +67,7 @@ static cricket::VideoFormat expectedFormat =
}
+ (instancetype)validFormat {
AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
AVCaptureDeviceFormatMock* instance = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
minFps:0.0
maxFps:30.0];
@ -75,7 +75,7 @@ static cricket::VideoFormat expectedFormat =
}
+ (instancetype)invalidFpsFormat {
AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
AVCaptureDeviceFormatMock* instance = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
minFps:0.0
maxFps:22.0];
@ -83,7 +83,7 @@ static cricket::VideoFormat expectedFormat =
}
+ (instancetype)invalidMediaSubtypeFormat {
AVCaptureDeviceFormatMock *instance = [[AVCaptureDeviceFormatMock alloc]
AVCaptureDeviceFormatMock* instance = [[AVCaptureDeviceFormatMock alloc]
initWithMediaSubtype:kCVPixelFormatType_420YpCbCr8Planar
minFps:0.0
maxFps:60.0];
@ -102,7 +102,7 @@ static cricket::VideoFormat expectedFormat =
return self.format;
}
- (NSArray *)videoSupportedFrameRateRanges {
- (NSArray*)videoSupportedFrameRateRanges {
return @[ self.rangeMock ];
}
@ -202,8 +202,8 @@ TEST(AVFormatMapperTest, SetFormatWhenDeviceCannotLock) {
[[[mockDevice stub] andReturn:@[]] formats];
// when
bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
cricket::VideoFormat());
bool resultFormat = webrtc::SetFormatForCaptureDevice(
mockDevice, nil, cricket::VideoFormat());
// then
EXPECT_FALSE(resultFormat);
@ -222,8 +222,8 @@ TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) {
OCMExpect([mockDevice unlockForConfiguration]);
// when
bool resultFormat = webrtc::SetFormatForCaptureDevice(mockDevice, nil,
cricket::VideoFormat());
bool resultFormat = webrtc::SetFormatForCaptureDevice(
mockDevice, nil, cricket::VideoFormat());
// then
EXPECT_FALSE(resultFormat);
@ -232,7 +232,7 @@ TEST(AVFormatMapperTest, SetFormatWhenFormatIsIncompatible) {
// https://github.com/erikdoe/ocmock/commit/de1419415581dc307045e54bfe9c98c86efea96b
// Without it, stubbed exceptions are being re-raised on [mock verify].
// More information here:
//https://github.com/erikdoe/ocmock/issues/241
// https://github.com/erikdoe/ocmock/issues/241
@try {
[mockDevice verify];
} @catch (NSException* exception) {

View File

@ -18,22 +18,40 @@ void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) {
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int byteOrder = CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32ARGB ?
int byteOrder = CVPixelBufferGetPixelFormatType(pixelBuffer) ==
kCVPixelFormatType_32ARGB ?
kCGBitmapByteOrder32Little :
0;
CGContextRef cgContext = CGBitmapContextCreate(baseAddr,
width,
height,
8,
CVPixelBufferGetBytesPerRow(pixelBuffer),
colorSpace,
byteOrder | kCGImageAlphaNoneSkipLast);
CGContextRef cgContext =
CGBitmapContextCreate(baseAddr,
width,
height,
8,
CVPixelBufferGetBytesPerRow(pixelBuffer),
colorSpace,
byteOrder | kCGImageAlphaNoneSkipLast);
// Create a gradient
CGFloat colors[] = {
1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0,
1.0,
};
CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4);
CGGradientRef gradient =
CGGradientCreateWithColorComponents(colorSpace, colors, NULL, 4);
CGContextDrawLinearGradient(
cgContext, gradient, CGPointMake(0, 0), CGPointMake(width, height), 0);
@ -47,36 +65,44 @@ void DrawGradientInRGBPixelBuffer(CVPixelBufferRef pixelBuffer) {
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
}
rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width, int height) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(webrtc::I420Buffer::Create(width, height));
rtc::scoped_refptr<webrtc::I420Buffer> CreateI420Gradient(int width,
int height) {
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
webrtc::I420Buffer::Create(width, height));
// Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
buffer->MutableDataY()[x + y * width] = 128 * (x * height + y * width) / (width * height);
buffer->MutableDataY()[x + y * width] =
128 * (x * height + y * width) / (width * height);
}
}
int chroma_width = buffer->ChromaWidth();
int chroma_height = buffer->ChromaHeight();
for (int x = 0; x < chroma_width; x++) {
for (int y = 0; y < chroma_height; y++) {
buffer->MutableDataU()[x + y * chroma_width] = 255 * x / (chroma_width - 1);
buffer->MutableDataV()[x + y * chroma_width] = 255 * y / (chroma_height - 1);
buffer->MutableDataU()[x + y * chroma_width] =
255 * x / (chroma_width - 1);
buffer->MutableDataV()[x + y * chroma_width] =
255 * y / (chroma_height - 1);
}
}
return buffer;
}
void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
CVPixelBufferRef pixelBuffer) {
void CopyI420BufferToCVPixelBuffer(
rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer,
CVPixelBufferRef pixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// NV12
uint8_t* dstY = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
uint8_t* dstY = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
const int dstYStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
uint8_t* dstUV = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
uint8_t* dstUV = static_cast<uint8_t*>(
CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
const int dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
libyuv::I420ToNV12(i420Buffer->DataY(),
@ -92,7 +118,8 @@ void CopyI420BufferToCVPixelBuffer(rtc::scoped_refptr<webrtc::I420Buffer> i420Bu
i420Buffer->width(),
i420Buffer->height());
} else {
uint8_t* dst = static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixelBuffer));
uint8_t* dst =
static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixelBuffer));
const int bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {

View File

@ -40,7 +40,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
- (void)testCreateVideoFormatDescription {
CMVideoFormatDescriptionRef description =
webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER));
webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER,
arraysize(SPS_PPS_BUFFER));
XCTAssertTrue(description);
if (description) {
CFRelease(description);
@ -58,8 +59,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
// PPS nalu.
0x00, 0x00, 0x01, 0x28, 0xCE, 0x3C, 0x30};
// clang-format on
description = webrtc::CreateVideoFormatDescription(sps_pps_not_at_start_buffer,
arraysize(sps_pps_not_at_start_buffer));
description = webrtc::CreateVideoFormatDescription(
sps_pps_not_at_start_buffer, arraysize(sps_pps_not_at_start_buffer));
XCTAssertTrue(description);
@ -69,7 +70,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
}
const uint8_t other_buffer[] = {0x00, 0x00, 0x00, 0x01, 0x28};
XCTAssertFalse(webrtc::CreateVideoFormatDescription(other_buffer, arraysize(other_buffer)));
XCTAssertFalse(webrtc::CreateVideoFormatDescription(other_buffer,
arraysize(other_buffer)));
}
- (void)testReadEmptyInput {
@ -85,7 +87,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
- (void)testReadSingleNalu {
const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x00, 0x01, 0xAA};
webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
webrtc::AnnexBBufferReader reader(annex_b_test_data,
arraysize(annex_b_test_data));
const uint8_t* nalu = nullptr;
size_t nalu_length = 0;
XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
@ -100,7 +103,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
- (void)testReadSingleNalu3ByteHeader {
const uint8_t annex_b_test_data[] = {0x00, 0x00, 0x01, 0xAA};
webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
webrtc::AnnexBBufferReader reader(annex_b_test_data,
arraysize(annex_b_test_data));
const uint8_t* nalu = nullptr;
size_t nalu_length = 0;
XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
@ -119,7 +123,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
0x00, 0x01,
0x00, 0x00, 0x00, 0xFF};
// clang-format on
webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
webrtc::AnnexBBufferReader reader(annex_b_test_data,
arraysize(annex_b_test_data));
const uint8_t* nalu = nullptr;
size_t nalu_length = 0;
XCTAssertEqual(0u, reader.BytesRemaining());
@ -136,7 +141,8 @@ static const uint8_t SPS_PPS_BUFFER[] = {
0x00, 0x00, 0x00, 0xFF,
0x00, 0x00, 0x01, 0xAA, 0xBB};
// clang-format on
webrtc::AnnexBBufferReader reader(annex_b_test_data, arraysize(annex_b_test_data));
webrtc::AnnexBBufferReader reader(annex_b_test_data,
arraysize(annex_b_test_data));
const uint8_t* nalu = nullptr;
size_t nalu_length = 0;
XCTAssertEqual(arraysize(annex_b_test_data), reader.BytesRemaining());
@ -160,22 +166,25 @@ static const uint8_t SPS_PPS_BUFFER[] = {
memset(buffer.get(), 0, buffer_size);
webrtc::AvccBufferWriter writer(buffer.get(), 0);
XCTAssertEqual(0u, writer.BytesRemaining());
XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
XCTAssertFalse(
writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertEqual(
0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
}
- (void)testWriteSingleNalu {
const uint8_t expected_buffer[] = {
0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC,
};
const uint8_t expected_buffer[] = {0x00, 0x00, 0x00, 0x03, 0xAA, 0xBB, 0xCC};
const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + 4;
std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
XCTAssertEqual(buffer_size, writer.BytesRemaining());
XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertTrue(
writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertEqual(0u, writer.BytesRemaining());
XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
XCTAssertFalse(
writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
XCTAssertEqual(
0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
}
- (void)testWriteMultipleNalus {
@ -185,15 +194,20 @@ static const uint8_t SPS_PPS_BUFFER[] = {
0x00, 0x00, 0x00, 0x04, 0xDE, 0xAD, 0xBE, 0xEF
};
// clang-format on
const size_t buffer_size = arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8;
const size_t buffer_size =
arraysize(NALU_TEST_DATA_0) + arraysize(NALU_TEST_DATA_1) + 8;
std::unique_ptr<uint8_t[]> buffer(new uint8_t[buffer_size]);
webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
XCTAssertEqual(buffer_size, writer.BytesRemaining());
XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertEqual(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4), writer.BytesRemaining());
XCTAssertTrue(writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
XCTAssertTrue(
writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertEqual(buffer_size - (arraysize(NALU_TEST_DATA_0) + 4),
writer.BytesRemaining());
XCTAssertTrue(
writer.WriteNalu(NALU_TEST_DATA_1, arraysize(NALU_TEST_DATA_1)));
XCTAssertEqual(0u, writer.BytesRemaining());
XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
XCTAssertEqual(
0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
}
- (void)testOverflow {
@ -203,9 +217,11 @@ static const uint8_t SPS_PPS_BUFFER[] = {
memset(buffer.get(), 0, buffer_size);
webrtc::AvccBufferWriter writer(buffer.get(), buffer_size);
XCTAssertEqual(buffer_size, writer.BytesRemaining());
XCTAssertFalse(writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertFalse(
writer.WriteNalu(NALU_TEST_DATA_0, arraysize(NALU_TEST_DATA_0)));
XCTAssertEqual(buffer_size, writer.BytesRemaining());
XCTAssertEqual(0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
XCTAssertEqual(
0, memcmp(expected_buffer, buffer.get(), arraysize(expected_buffer)));
}
- (void)testH264AnnexBBufferToCMSampleBuffer {
@ -230,24 +246,27 @@ static const uint8_t SPS_PPS_BUFFER[] = {
CMSampleBufferRef out_sample_buffer = nil;
CMVideoFormatDescriptionRef description = [self createDescription];
Boolean result = webrtc::H264AnnexBBufferToCMSampleBuffer(annex_b_test_data,
arraysize(annex_b_test_data),
description,
&out_sample_buffer,
memory_pool);
Boolean result =
webrtc::H264AnnexBBufferToCMSampleBuffer(annex_b_test_data,
arraysize(annex_b_test_data),
description,
&out_sample_buffer,
memory_pool);
XCTAssertTrue(result);
XCTAssertEqual(description, CMSampleBufferGetFormatDescription(out_sample_buffer));
XCTAssertEqual(description,
CMSampleBufferGetFormatDescription(out_sample_buffer));
char* data_ptr = nullptr;
CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(out_sample_buffer);
CMBlockBufferRef block_buffer =
CMSampleBufferGetDataBuffer(out_sample_buffer);
size_t block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
CMBlockBufferGetDataPointer(block_buffer, 0, nullptr, nullptr, &data_ptr);
XCTAssertEqual(block_buffer_size, arraysize(annex_b_test_data));
int data_comparison_result =
memcmp(expected_cmsample_data, data_ptr, arraysize(expected_cmsample_data));
int data_comparison_result = memcmp(
expected_cmsample_data, data_ptr, arraysize(expected_cmsample_data));
XCTAssertEqual(0, data_comparison_result);
@ -277,18 +296,21 @@ static const uint8_t SPS_PPS_BUFFER[] = {
rtc::Buffer annexb_buffer(arraysize(cmsample_data));
CMSampleBufferRef sample_buffer =
[self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)];
[self createCMSampleBufferRef:(void*)cmsample_data
cmsampleSize:arraysize(cmsample_data)];
Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
/* is_keyframe */ false,
&annexb_buffer);
Boolean result =
webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
/* is_keyframe */ false,
&annexb_buffer);
XCTAssertTrue(result);
XCTAssertEqual(arraysize(expected_annex_b_data), annexb_buffer.size());
int data_comparison_result =
memcmp(expected_annex_b_data, annexb_buffer.data(), arraysize(expected_annex_b_data));
int data_comparison_result = memcmp(expected_annex_b_data,
annexb_buffer.data(),
arraysize(expected_annex_b_data));
XCTAssertEqual(0, data_comparison_result);
}
@ -310,18 +332,22 @@ static const uint8_t SPS_PPS_BUFFER[] = {
rtc::Buffer annexb_buffer(arraysize(cmsample_data));
CMSampleBufferRef sample_buffer =
[self createCMSampleBufferRef:(void*)cmsample_data cmsampleSize:arraysize(cmsample_data)];
[self createCMSampleBufferRef:(void*)cmsample_data
cmsampleSize:arraysize(cmsample_data)];
Boolean result = webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
/* is_keyframe */ true,
&annexb_buffer);
Boolean result =
webrtc::H264CMSampleBufferToAnnexBBuffer(sample_buffer,
/* is_keyframe */ true,
&annexb_buffer);
XCTAssertTrue(result);
XCTAssertEqual(arraysize(SPS_PPS_BUFFER) + arraysize(expected_annex_b_data),
annexb_buffer.size());
XCTAssertEqual(0, memcmp(SPS_PPS_BUFFER, annexb_buffer.data(), arraysize(SPS_PPS_BUFFER)));
XCTAssertEqual(
0,
memcmp(SPS_PPS_BUFFER, annexb_buffer.data(), arraysize(SPS_PPS_BUFFER)));
XCTAssertEqual(0,
memcmp(expected_annex_b_data,
@ -331,12 +357,14 @@ static const uint8_t SPS_PPS_BUFFER[] = {
- (CMVideoFormatDescriptionRef)createDescription {
CMVideoFormatDescriptionRef description =
webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER, arraysize(SPS_PPS_BUFFER));
webrtc::CreateVideoFormatDescription(SPS_PPS_BUFFER,
arraysize(SPS_PPS_BUFFER));
XCTAssertTrue(description);
return description;
}
- (CMSampleBufferRef)createCMSampleBufferRef:(void*)cmsampleData cmsampleSize:(size_t)cmsampleSize {
- (CMSampleBufferRef)createCMSampleBufferRef:(void*)cmsampleData
cmsampleSize:(size_t)cmsampleSize {
CMSampleBufferRef sample_buffer = nil;
OSStatus status;

View File

@ -23,7 +23,8 @@
#include "rtc_base/gunit.h"
#import "sdk/objc/base/RTCMacros.h"
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(int return_code) {
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(
int return_code) {
id decoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoder)));
OCMStub([decoderMock startDecodeWithNumberOfCores:1]).andReturn(return_code);
OCMStub([decoderMock decode:[OCMArg any]
@ -33,11 +34,14 @@ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateDecoderFactoryReturning(int retu
.andReturn(return_code);
OCMStub([decoderMock releaseDecoder]).andReturn(return_code);
id decoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory)));
id decoderFactoryMock =
OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoDecoderFactory)));
RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"
parameters:nil];
OCMStub([decoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
OCMStub([decoderFactoryMock createDecoder:[OCMArg any]]).andReturn(decoderMock);
OCMStub([decoderFactoryMock createDecoder:[OCMArg any]])
.andReturn(decoderMock);
return decoderFactoryMock;
}
@ -52,7 +56,8 @@ id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> CreateErrorDecoderFactory() {
std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
id<RTC_OBJC_TYPE(RTCVideoDecoderFactory)> factory) {
webrtc::ObjCVideoDecoderFactory decoder_factory(factory);
return decoder_factory.Create(webrtc::CreateEnvironment(), webrtc::SdpVideoFormat::H264());
return decoder_factory.Create(webrtc::CreateEnvironment(),
webrtc::SdpVideoFormat::H264());
}
#pragma mark -
@ -63,21 +68,24 @@ std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
@implementation ObjCVideoDecoderFactoryTests
- (void)testConfigureReturnsTrueOnSuccess {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
std::unique_ptr<webrtc::VideoDecoder> decoder =
GetObjCDecoder(CreateOKDecoderFactory());
webrtc::VideoDecoder::Settings settings;
EXPECT_TRUE(decoder->Configure(settings));
}
- (void)testConfigureReturnsFalseOnFail {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
std::unique_ptr<webrtc::VideoDecoder> decoder =
GetObjCDecoder(CreateErrorDecoderFactory());
webrtc::VideoDecoder::Settings settings;
EXPECT_FALSE(decoder->Configure(settings));
}
- (void)testDecodeReturnsOKOnSuccess {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
std::unique_ptr<webrtc::VideoDecoder> decoder =
GetObjCDecoder(CreateOKDecoderFactory());
webrtc::EncodedImage encoded_image;
encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create());
@ -86,7 +94,8 @@ std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
}
- (void)testDecodeReturnsErrorOnFail {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
std::unique_ptr<webrtc::VideoDecoder> decoder =
GetObjCDecoder(CreateErrorDecoderFactory());
webrtc::EncodedImage encoded_image;
encoded_image.SetEncodedData(webrtc::EncodedImageBuffer::Create());
@ -95,13 +104,15 @@ std::unique_ptr<webrtc::VideoDecoder> GetObjCDecoder(
}
- (void)testReleaseDecodeReturnsOKOnSuccess {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateOKDecoderFactory());
std::unique_ptr<webrtc::VideoDecoder> decoder =
GetObjCDecoder(CreateOKDecoderFactory());
EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK);
}
- (void)testReleaseDecodeReturnsErrorOnFail {
std::unique_ptr<webrtc::VideoDecoder> decoder = GetObjCDecoder(CreateErrorDecoderFactory());
std::unique_ptr<webrtc::VideoDecoder> decoder =
GetObjCDecoder(CreateErrorDecoderFactory());
EXPECT_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
}

View File

@ -26,21 +26,27 @@
#include "rtc_base/gunit.h"
#include "sdk/objc/native/src/objc_frame_buffer.h"
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateEncoderFactoryReturning(int return_code) {
id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateEncoderFactoryReturning(
int return_code) {
id encoderMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoder)));
OCMStub([encoderMock startEncodeWithSettings:[OCMArg any] numberOfCores:1])
.andReturn(return_code);
OCMStub([encoderMock encode:[OCMArg any] codecSpecificInfo:[OCMArg any] frameTypes:[OCMArg any]])
OCMStub([encoderMock encode:[OCMArg any]
codecSpecificInfo:[OCMArg any]
frameTypes:[OCMArg any]])
.andReturn(return_code);
OCMStub([encoderMock releaseEncoder]).andReturn(return_code);
OCMStub([encoderMock setBitrate:0 framerate:0]).andReturn(return_code);
id encoderFactoryMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory)));
RTC_OBJC_TYPE(RTCVideoCodecInfo)* supported =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264" parameters:nil];
id encoderFactoryMock =
OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoEncoderFactory)));
RTC_OBJC_TYPE(RTCVideoCodecInfo) *supported =
[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:@"H264"
parameters:nil];
OCMStub([encoderFactoryMock supportedCodecs]).andReturn(@[ supported ]);
OCMStub([encoderFactoryMock implementations]).andReturn(@[ supported ]);
OCMStub([encoderFactoryMock createEncoder:[OCMArg any]]).andReturn(encoderMock);
OCMStub([encoderFactoryMock createEncoder:[OCMArg any]])
.andReturn(encoderMock);
return encoderFactoryMock;
}
@ -52,12 +58,14 @@ id<RTC_OBJC_TYPE(RTCVideoEncoderFactory)> CreateErrorEncoderFactory() {
return CreateEncoderFactoryReturning(WEBRTC_VIDEO_CODEC_ERROR);
}
@interface RTCVideoEncoderFactoryFake : NSObject <RTC_OBJC_TYPE (RTCVideoEncoderFactory)>
@interface RTCVideoEncoderFactoryFake
: NSObject <RTC_OBJC_TYPE (RTCVideoEncoderFactory)>
- (instancetype)init NS_UNAVAILABLE;
- (instancetype)initWithScalabilityMode:(NSString *)scalabilityMode;
- (instancetype)initWithScalabilityMode:(NSString *)scalabilityMode
isPowerEfficient:(bool)isPowerEfficient NS_DESIGNATED_INITIALIZER;
isPowerEfficient:(bool)isPowerEfficient
NS_DESIGNATED_INITIALIZER;
@end
@implementation RTCVideoEncoderFactoryFake
@ -92,10 +100,12 @@ bool _isPowerEfficient;
scalabilityMode:(nullable NSString *)scalabilityMode {
if (_scalabilityMode ? [_scalabilityMode isEqualToString:scalabilityMode] :
scalabilityMode == nil) {
return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc] initWithSupported:true
isPowerEfficient:_isPowerEfficient];
return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc]
initWithSupported:true
isPowerEfficient:_isPowerEfficient];
} else {
return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc] initWithSupported:false];
return [[RTC_OBJC_TYPE(RTCVideoEncoderCodecSupport) alloc]
initWithSupported:false];
}
}
@ -116,31 +126,41 @@ std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
@implementation ObjCVideoEncoderFactoryTests
- (void)testInitEncodeReturnsOKOnSuccess {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
std::unique_ptr<webrtc::VideoEncoder> encoder =
GetObjCEncoder(CreateOKEncoderFactory());
auto* settings = new webrtc::VideoCodec();
auto *settings = new webrtc::VideoCodec();
const webrtc::VideoEncoder::Capabilities kCapabilities(false);
EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
EXPECT_EQ(encoder->InitEncode(
settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
WEBRTC_VIDEO_CODEC_OK);
}
- (void)testInitEncodeReturnsErrorOnFail {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
std::unique_ptr<webrtc::VideoEncoder> encoder =
GetObjCEncoder(CreateErrorEncoderFactory());
auto* settings = new webrtc::VideoCodec();
auto *settings = new webrtc::VideoCodec();
const webrtc::VideoEncoder::Capabilities kCapabilities(false);
EXPECT_EQ(encoder->InitEncode(settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
EXPECT_EQ(encoder->InitEncode(
settings, webrtc::VideoEncoder::Settings(kCapabilities, 1, 0)),
WEBRTC_VIDEO_CODEC_ERROR);
}
- (void)testEncodeReturnsOKOnSuccess {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
std::unique_ptr<webrtc::VideoEncoder> encoder =
GetObjCEncoder(CreateOKEncoderFactory());
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
CVPixelBufferCreate(kCFAllocatorDefault,
640,
480,
kCVPixelFormatType_32ARGB,
nil,
&pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>([[RTC_OBJC_TYPE(
RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0)
@ -152,13 +172,19 @@ std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
}
- (void)testEncodeReturnsErrorOnFail {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
std::unique_ptr<webrtc::VideoEncoder> encoder =
GetObjCEncoder(CreateErrorEncoderFactory());
CVPixelBufferRef pixel_buffer;
CVPixelBufferCreate(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_32ARGB, nil, &pixel_buffer);
CVPixelBufferCreate(kCFAllocatorDefault,
640,
480,
kCVPixelFormatType_32ARGB,
nil,
&pixel_buffer);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>(
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
rtc::make_ref_counted<webrtc::ObjCFrameBuffer>([[RTC_OBJC_TYPE(
RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixel_buffer]);
webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_rotation(webrtc::kVideoRotation_0)
@ -170,19 +196,22 @@ std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
}
- (void)testReleaseEncodeReturnsOKOnSuccess {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateOKEncoderFactory());
std::unique_ptr<webrtc::VideoEncoder> encoder =
GetObjCEncoder(CreateOKEncoderFactory());
EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_OK);
}
- (void)testReleaseEncodeReturnsErrorOnFail {
std::unique_ptr<webrtc::VideoEncoder> encoder = GetObjCEncoder(CreateErrorEncoderFactory());
std::unique_ptr<webrtc::VideoEncoder> encoder =
GetObjCEncoder(CreateErrorEncoderFactory());
EXPECT_EQ(encoder->Release(), WEBRTC_VIDEO_CODEC_ERROR);
}
- (void)testQueryCodecSupportDelegatesToObjcFactoryConvertsNulloptModeToNil {
id fakeEncoderFactory = [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:nil];
id fakeEncoderFactory =
[[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:nil];
webrtc::SdpVideoFormat codec("VP8");
webrtc::ObjCVideoEncoderFactory encoder_factory(fakeEncoderFactory);
@ -193,7 +222,8 @@ std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
}
- (void)testQueryCodecSupportDelegatesToObjcFactoryMayReturnUnsupported {
id fakeEncoderFactory = [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2"];
id fakeEncoderFactory =
[[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2"];
webrtc::SdpVideoFormat codec("VP8");
webrtc::ObjCVideoEncoderFactory encoder_factory(fakeEncoderFactory);
@ -201,26 +231,30 @@ std::unique_ptr<webrtc::VideoEncoder> GetObjCEncoder(
}
- (void)testQueryCodecSupportDelegatesToObjcFactoryIncludesPowerEfficientFlag {
id fakeEncoderFactory = [[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2"
isPowerEfficient:true];
id fakeEncoderFactory =
[[RTCVideoEncoderFactoryFake alloc] initWithScalabilityMode:@"L1T2"
isPowerEfficient:true];
webrtc::SdpVideoFormat codec("VP8");
webrtc::ObjCVideoEncoderFactory encoder_factory(fakeEncoderFactory);
webrtc::VideoEncoderFactory::CodecSupport s = encoder_factory.QueryCodecSupport(codec, "L1T2");
webrtc::VideoEncoderFactory::CodecSupport s =
encoder_factory.QueryCodecSupport(codec, "L1T2");
EXPECT_TRUE(s.is_supported);
EXPECT_TRUE(s.is_power_efficient);
}
- (void)testGetSupportedFormats {
webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory());
std::vector<webrtc::SdpVideoFormat> supportedFormats = encoder_factory.GetSupportedFormats();
std::vector<webrtc::SdpVideoFormat> supportedFormats =
encoder_factory.GetSupportedFormats();
EXPECT_EQ(supportedFormats.size(), 1u);
EXPECT_EQ(supportedFormats[0].name, "H264");
}
- (void)testGetImplementations {
webrtc::ObjCVideoEncoderFactory encoder_factory(CreateOKEncoderFactory());
std::vector<webrtc::SdpVideoFormat> supportedFormats = encoder_factory.GetImplementations();
std::vector<webrtc::SdpVideoFormat> supportedFormats =
encoder_factory.GetImplementations();
EXPECT_EQ(supportedFormats.size(), 1u);
EXPECT_EQ(supportedFormats[0].name, "H264");
}

View File

@ -33,7 +33,8 @@ struct TestTypeTraits {
};
} // namespace
using ScopedTestType = rtc::internal::ScopedTypeRef<TestTypeRef, TestTypeTraits>;
using ScopedTestType =
rtc::internal::ScopedTypeRef<TestTypeRef, TestTypeTraits>;
// In these tests we sometime introduce variables just to
// observe side-effects. Ignore the compilers complaints.