Use RTCAudioSessionDelegateAdapter in AudioDeviceIOS.

Part 3 of refactor. Also:
- better weak pointer delegate storage + tests
- we now ignore route changes when we're interrupted
- fixed bug where preferred sample rate wasn't set if audio session
   wasn't active

BUG=

Review URL: https://codereview.webrtc.org/1796983004

Cr-Commit-Position: refs/heads/master@{#12007}
This commit is contained in:
tkchin 2016-03-15 16:54:03 -07:00 committed by Commit bot
parent 4557d3333d
commit e54467f73e
15 changed files with 499 additions and 193 deletions

View File

@ -8,8 +8,19 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_OBJC_RTC_MACROS_H_
#define WEBRTC_BASE_OBJC_RTC_MACROS_H_
#if defined(__cplusplus)
#define RTC_EXPORT extern "C"
#define RTC_EXPORT extern "C"
#else
#define RTC_EXPORT extern
#define RTC_EXPORT extern
#endif
#ifdef __OBJC__
#define RTC_FWD_DECL_OBJC_CLASS(classname) @class classname
#else
#define RTC_FWD_DECL_OBJC_CLASS(classname) typedef struct objc_object classname
#endif
#endif // WEBRTC_BASE_OBJC_RTC_MACROS_H_

View File

@ -58,6 +58,7 @@
</array>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
<string>voip</string>
</array>
<key>UILaunchImages</key>

View File

@ -135,12 +135,15 @@ source_set("audio_device") {
"ios/audio_device_ios.h",
"ios/audio_device_ios.mm",
"ios/audio_device_not_implemented_ios.mm",
"ios/audio_session_observer.h",
"ios/objc/RTCAudioSession+Configuration.mm",
"ios/objc/RTCAudioSession+Private.h",
"ios/objc/RTCAudioSession.h",
"ios/objc/RTCAudioSession.mm",
"ios/objc/RTCAudioSessionConfiguration.h",
"ios/objc/RTCAudioSessionConfiguration.m",
"ios/objc/RTCAudioSessionDelegateAdapter.h",
"ios/objc/RTCAudioSessionDelegateAdapter.mm",
]
cflags += [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES.
libs = [

View File

@ -173,12 +173,15 @@
'ios/audio_device_ios.h',
'ios/audio_device_ios.mm',
'ios/audio_device_not_implemented_ios.mm',
'ios/audio_session_observer.h',
'ios/objc/RTCAudioSession+Configuration.mm',
'ios/objc/RTCAudioSession+Private.h',
'ios/objc/RTCAudioSession.h',
'ios/objc/RTCAudioSession.mm',
'ios/objc/RTCAudioSessionConfiguration.h',
'ios/objc/RTCAudioSessionConfiguration.m',
'ios/objc/RTCAudioSessionDelegateAdapter.h',
'ios/objc/RTCAudioSessionDelegateAdapter.mm',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',

View File

@ -15,8 +15,14 @@
#include <AudioUnit/AudioUnit.h>
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/objc/RTCMacros.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/modules/audio_device/audio_device_generic.h"
#include "webrtc/modules/audio_device/ios/audio_session_observer.h"
RTC_FWD_DECL_OBJC_CLASS(RTCAudioSessionDelegateAdapter);
namespace webrtc {
@ -35,7 +41,8 @@ class FineAudioBuffer;
// Recorded audio will be delivered on a real-time internal I/O thread in the
// audio unit. The audio unit will also ask for audio data to play out on this
// same thread.
class AudioDeviceIOS : public AudioDeviceGeneric {
class AudioDeviceIOS : public AudioDeviceGeneric,
public AudioSessionObserver {
public:
AudioDeviceIOS();
~AudioDeviceIOS();
@ -151,16 +158,21 @@ class AudioDeviceIOS : public AudioDeviceGeneric {
void ClearRecordingWarning() override {}
void ClearRecordingError() override {}
// AudioSessionObserver methods. May be called from any thread.
void OnInterruptionBegin() override;
void OnInterruptionEnd() override;
void OnValidRouteChange() override;
private:
// Called by the relevant AudioSessionObserver methods on |thread_|.
void HandleInterruptionBegin();
void HandleInterruptionEnd();
void HandleValidRouteChange();
// Uses current |playout_parameters_| and |record_parameters_| to inform the
// audio device buffer (ADB) about our internal audio parameters.
void UpdateAudioDeviceBuffer();
// Registers observers for the AVAudioSessionRouteChangeNotification and
// AVAudioSessionInterruptionNotification notifications.
void RegisterNotificationObservers();
void UnregisterNotificationObservers();
// Since the preferred audio parameters are only hints to the OS, the actual
// values may be different once the AVAudioSession has been activated.
// This method asks for the current hardware parameters and takes actions
@ -218,6 +230,10 @@ class AudioDeviceIOS : public AudioDeviceGeneric {
// Ensures that methods are called from the same thread as this object is
// created on.
rtc::ThreadChecker thread_checker_;
// Thread that this object is created on.
rtc::Thread* thread_;
// Invoker used to execute methods on thread_.
std::unique_ptr<rtc::AsyncInvoker> async_invoker_;
// Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
// AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
@ -286,9 +302,11 @@ class AudioDeviceIOS : public AudioDeviceGeneric {
// Set to true after successful call to InitPlayout(), false otherwise.
bool play_is_initialized_;
// Set to true if audio session is interrupted, false otherwise.
bool is_interrupted_;
// Audio interruption observer instance.
void* audio_interruption_observer_;
void* route_change_observer_;
RTCAudioSessionDelegateAdapter* audio_session_observer_;
// Contains the audio data format specification for a stream of audio.
AudioStreamBasicDescription application_format_;

View File

@ -18,16 +18,20 @@
#include "webrtc/modules/audio_device/ios/audio_device_ios.h"
#include "webrtc/base/atomicops.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/audio_device/fine_audio_buffer.h"
#include "webrtc/modules/utility/include/helpers_ios.h"
#import "webrtc/base/objc/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionConfiguration.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.h"
namespace webrtc {
@ -106,20 +110,24 @@ static void LogDeviceInfo() {
#endif // !defined(NDEBUG)
AudioDeviceIOS::AudioDeviceIOS()
: audio_device_buffer_(nullptr),
vpio_unit_(nullptr),
recording_(0),
playing_(0),
initialized_(false),
rec_is_initialized_(false),
play_is_initialized_(false),
audio_interruption_observer_(nullptr),
route_change_observer_(nullptr) {
: async_invoker_(new rtc::AsyncInvoker()),
audio_device_buffer_(nullptr),
vpio_unit_(nullptr),
recording_(0),
playing_(0),
initialized_(false),
rec_is_initialized_(false),
play_is_initialized_(false),
is_interrupted_(false) {
LOGI() << "ctor" << ios::GetCurrentThreadDescription();
thread_ = rtc::Thread::Current();
audio_session_observer_ =
[[RTCAudioSessionDelegateAdapter alloc] initWithObserver:this];
}
AudioDeviceIOS::~AudioDeviceIOS() {
LOGI() << "~dtor" << ios::GetCurrentThreadDescription();
audio_session_observer_ = nil;
RTC_DCHECK(thread_checker_.CalledOnValidThread());
Terminate();
}
@ -332,6 +340,80 @@ int AudioDeviceIOS::GetRecordAudioParameters(AudioParameters* params) const {
return 0;
}
void AudioDeviceIOS::OnInterruptionBegin() {
RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
if (thread_->IsCurrent()) {
HandleInterruptionBegin();
return;
}
async_invoker_->AsyncInvoke<void>(
thread_,
rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionBegin, this));
}
void AudioDeviceIOS::OnInterruptionEnd() {
RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
if (thread_->IsCurrent()) {
HandleInterruptionEnd();
return;
}
async_invoker_->AsyncInvoke<void>(
thread_,
rtc::Bind(&webrtc::AudioDeviceIOS::HandleInterruptionEnd, this));
}
void AudioDeviceIOS::OnValidRouteChange() {
RTC_DCHECK(async_invoker_);
RTC_DCHECK(thread_);
if (thread_->IsCurrent()) {
HandleValidRouteChange();
return;
}
async_invoker_->AsyncInvoke<void>(
thread_,
rtc::Bind(&webrtc::AudioDeviceIOS::HandleValidRouteChange, this));
}
void AudioDeviceIOS::HandleInterruptionBegin() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTCLog(@"Stopping the audio unit due to interruption begin.");
LOG_IF_ERROR(AudioOutputUnitStop(vpio_unit_),
"Failed to stop the the Voice-Processing I/O unit");
is_interrupted_ = true;
}
void AudioDeviceIOS::HandleInterruptionEnd() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTCLog(@"Starting the audio unit due to interruption end.");
LOG_IF_ERROR(AudioOutputUnitStart(vpio_unit_),
"Failed to start the the Voice-Processing I/O unit");
is_interrupted_ = false;
}
void AudioDeviceIOS::HandleValidRouteChange() {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
// Don't do anything if we're interrupted.
if (is_interrupted_) {
return;
}
// Only restart audio for a valid route change if the session sample rate
// has changed.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
const double current_sample_rate = playout_parameters_.sample_rate();
const double session_sample_rate = session.sampleRate;
if (current_sample_rate != session_sample_rate) {
RTCLog(@"Route changed caused sample rate to change from %f to %f. "
"Restarting audio unit.", current_sample_rate, session_sample_rate);
if (!RestartAudioUnitWithNewFormat(session_sample_rate)) {
RTCLogError(@"Audio restart failed.");
}
}
}
void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
LOGI() << "UpdateAudioDevicebuffer";
// AttachAudioBuffer() is called at construction by the main class but check
@ -345,155 +427,14 @@ void AudioDeviceIOS::UpdateAudioDeviceBuffer() {
audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
}
void AudioDeviceIOS::RegisterNotificationObservers() {
LOGI() << "RegisterNotificationObservers";
// This code block will be called when AVAudioSessionInterruptionNotification
// is observed.
void (^interrupt_block)(NSNotification*) = ^(NSNotification* notification) {
NSNumber* type_number =
notification.userInfo[AVAudioSessionInterruptionTypeKey];
AVAudioSessionInterruptionType type =
(AVAudioSessionInterruptionType)type_number.unsignedIntegerValue;
LOG(LS_INFO) << "Audio session interruption:";
switch (type) {
case AVAudioSessionInterruptionTypeBegan:
// The system has deactivated our audio session.
// Stop the active audio unit.
LOG(LS_INFO) << " Began => stopping the audio unit";
LOG_IF_ERROR(AudioOutputUnitStop(vpio_unit_),
"Failed to stop the the Voice-Processing I/O unit");
break;
case AVAudioSessionInterruptionTypeEnded:
// The interruption has ended. Restart the audio session and start the
// initialized audio unit again.
LOG(LS_INFO) << " Ended => restarting audio session and audio unit";
NSError* error = nil;
BOOL success = NO;
AVAudioSession* session = [AVAudioSession sharedInstance];
success = [session setActive:YES error:&error];
if (CheckAndLogError(success, error)) {
LOG_IF_ERROR(AudioOutputUnitStart(vpio_unit_),
"Failed to start the the Voice-Processing I/O unit");
}
break;
}
};
// This code block will be called when AVAudioSessionRouteChangeNotification
// is observed.
void (^route_change_block)(NSNotification*) =
^(NSNotification* notification) {
// Get reason for current route change.
NSNumber* reason_number =
notification.userInfo[AVAudioSessionRouteChangeReasonKey];
AVAudioSessionRouteChangeReason reason =
(AVAudioSessionRouteChangeReason)reason_number.unsignedIntegerValue;
bool valid_route_change = true;
LOG(LS_INFO) << "Route change:";
switch (reason) {
case AVAudioSessionRouteChangeReasonUnknown:
LOG(LS_INFO) << " ReasonUnknown";
break;
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
LOG(LS_INFO) << " NewDeviceAvailable";
break;
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
LOG(LS_INFO) << " OldDeviceUnavailable";
break;
case AVAudioSessionRouteChangeReasonCategoryChange:
// It turns out that we see this notification (at least in iOS 9.2)
// when making a switch from a BT device to e.g. Speaker using the
// iOS Control Center and that we therefore must check if the sample
// rate has changed. And if so is the case, restart the audio unit.
LOG(LS_INFO) << " CategoryChange";
LOG(LS_INFO) << " New category: " << ios::GetAudioSessionCategory();
break;
case AVAudioSessionRouteChangeReasonOverride:
LOG(LS_INFO) << " Override";
break;
case AVAudioSessionRouteChangeReasonWakeFromSleep:
LOG(LS_INFO) << " WakeFromSleep";
break;
case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
LOG(LS_INFO) << " NoSuitableRouteForCategory";
break;
case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
// The set of input and output ports has not changed, but their
// configuration has, e.g., a ports selected data source has
// changed. Ignore this type of route change since we are focusing
// on detecting headset changes.
LOG(LS_INFO) << " RouteConfigurationChange (ignored)";
valid_route_change = false;
break;
}
if (valid_route_change) {
// Log previous route configuration.
AVAudioSessionRouteDescription* prev_route =
notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
LOG(LS_INFO) << "Previous route:";
LOG(LS_INFO) << ios::StdStringFromNSString(
[NSString stringWithFormat:@"%@", prev_route]);
// Only restart audio for a valid route change and if the
// session sample rate has changed.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
const double session_sample_rate = session.sampleRate;
LOG(LS_INFO) << "session sample rate: " << session_sample_rate;
if (playout_parameters_.sample_rate() != session_sample_rate) {
if (!RestartAudioUnitWithNewFormat(session_sample_rate)) {
LOG(LS_ERROR) << "Audio restart failed";
}
}
}
};
// Get the default notification center of the current process.
NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
// Add AVAudioSessionInterruptionNotification observer.
id interruption_observer =
[center addObserverForName:AVAudioSessionInterruptionNotification
object:nil
queue:[NSOperationQueue mainQueue]
usingBlock:interrupt_block];
// Add AVAudioSessionRouteChangeNotification observer.
id route_change_observer =
[center addObserverForName:AVAudioSessionRouteChangeNotification
object:nil
queue:[NSOperationQueue mainQueue]
usingBlock:route_change_block];
// Increment refcount on observers using ARC bridge. Instance variable is a
// void* instead of an id because header is included in other pure C++
// files.
audio_interruption_observer_ = (__bridge_retained void*)interruption_observer;
route_change_observer_ = (__bridge_retained void*)route_change_observer;
}
void AudioDeviceIOS::UnregisterNotificationObservers() {
LOGI() << "UnregisterNotificationObservers";
// Transfer ownership of observer back to ARC, which will deallocate the
// observer once it exits this scope.
NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
if (audio_interruption_observer_ != nullptr) {
id observer = (__bridge_transfer id)audio_interruption_observer_;
[center removeObserver:observer];
audio_interruption_observer_ = nullptr;
}
if (route_change_observer_ != nullptr) {
id observer = (__bridge_transfer id)route_change_observer_;
[center removeObserver:observer];
route_change_observer_ = nullptr;
}
}
void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
LOGI() << "SetupAudioBuffersForActiveAudioSession";
// Verify the current values once the audio session has been activated.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
LOG(LS_INFO) << " sample rate: " << session.sampleRate;
LOG(LS_INFO) << " IO buffer duration: " << session.IOBufferDuration;
double sample_rate = session.sampleRate;
NSTimeInterval io_buffer_duration = session.IOBufferDuration;
LOG(LS_INFO) << " sample rate: " << sample_rate;
LOG(LS_INFO) << " IO buffer duration: " << io_buffer_duration;
LOG(LS_INFO) << " output channels: " << session.outputNumberOfChannels;
LOG(LS_INFO) << " input channels: " << session.inputNumberOfChannels;
LOG(LS_INFO) << " output latency: " << session.outputLatency;
@ -505,7 +446,7 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
// 16kHz.
RTCAudioSessionConfiguration* webRTCConfig =
[RTCAudioSessionConfiguration webRTCConfiguration];
if (session.sampleRate != webRTCConfig.sampleRate) {
if (sample_rate != webRTCConfig.sampleRate) {
LOG(LS_WARNING) << "Unable to set the preferred sample rate";
}
@ -514,11 +455,11 @@ void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
// number of audio frames.
// Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz.
// Hence, 128 is the size we expect to see in upcoming render callbacks.
playout_parameters_.reset(session.sampleRate, playout_parameters_.channels(),
session.IOBufferDuration);
playout_parameters_.reset(sample_rate, playout_parameters_.channels(),
io_buffer_duration);
RTC_DCHECK(playout_parameters_.is_complete());
record_parameters_.reset(session.sampleRate, record_parameters_.channels(),
session.IOBufferDuration);
record_parameters_.reset(sample_rate, record_parameters_.channels(),
io_buffer_duration);
RTC_DCHECK(record_parameters_.is_complete());
LOG(LS_INFO) << " frames per I/O buffer: "
<< playout_parameters_.frames_per_buffer();
@ -784,7 +725,7 @@ bool AudioDeviceIOS::InitPlayOrRecord() {
}
// Start observing audio session interruptions and route changes.
RegisterNotificationObservers();
[session pushDelegate:audio_session_observer_];
// Ensure that we got what what we asked for in our active audio session.
SetupAudioBuffersForActiveAudioSession();
@ -816,11 +757,11 @@ void AudioDeviceIOS::ShutdownPlayOrRecord() {
}
// Remove audio session notification observers.
UnregisterNotificationObservers();
RTCAudioSession* session = [RTCAudioSession sharedInstance];
[session removeDelegate:audio_session_observer_];
// All I/O should be stopped or paused prior to deactivating the audio
// session, hence we deactivate as last action.
RTCAudioSession* session = [RTCAudioSession sharedInstance];
[session lockForConfiguration];
[session setActive:NO error:nil];
[session unlockForConfiguration];

View File

@ -0,0 +1,37 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_IOS_AUDIO_SESSION_OBSERVER_H_
#define WEBRTC_MODULES_AUDIO_DEVICE_IOS_AUDIO_SESSION_OBSERVER_H_
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/thread.h"
namespace webrtc {
// Observer interface for listening to AVAudioSession events.
class AudioSessionObserver {
public:
// Called when audio session interruption begins.
virtual void OnInterruptionBegin() = 0;
// Called when audio session interruption ends.
virtual void OnInterruptionEnd() = 0;
// Called when audio route changes.
virtual void OnValidRouteChange() = 0;
protected:
virtual ~AudioSessionObserver() {}
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_DEVICE_IOS_AUDIO_SESSION_OBSERVER_H_

View File

@ -49,7 +49,8 @@
}
}
if (self.sampleRate != configuration.sampleRate) {
// self.sampleRate is accurate only if the audio session is active.
if (!self.isActive || self.sampleRate != configuration.sampleRate) {
NSError *sampleRateError = nil;
if (![self setPreferredSampleRate:configuration.sampleRate
error:&sampleRateError]) {
@ -59,7 +60,9 @@
}
}
if (self.IOBufferDuration != configuration.ioBufferDuration) {
// self.IOBufferDuration is accurate only if the audio session is active.
if (!self.isActive ||
self.IOBufferDuration != configuration.ioBufferDuration) {
NSError *bufferDurationError = nil;
if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
error:&bufferDurationError]) {

View File

@ -10,13 +10,12 @@
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
#include <vector>
NS_ASSUME_NONNULL_BEGIN
@interface RTCAudioSession ()
/** The delegates. */
@property(nonatomic, readonly) NSSet *delegates;
/** Number of times setActive:YES has succeeded without a balanced call to
* setActive:NO.
*/
@ -24,6 +23,23 @@ NS_ASSUME_NONNULL_BEGIN
- (BOOL)checkLock:(NSError **)outError;
/** Adds the delegate to the list of delegates, and places it at the front of
* the list. This delegate will be notified before other delegates of
* audio events.
*/
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate;
// Properties and methods for tests.
@property(nonatomic, readonly)
std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
- (void)notifyDidBeginInterruption;
- (void)notifyDidEndInterruptionWithShouldResumeSession:
(BOOL)shouldResumeSession;
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
- (void)notifyMediaServicesWereLost;
- (void)notifyMediaServicesWereReset;
@end
NS_ASSUME_NONNULL_END

View File

@ -122,10 +122,7 @@ extern NSInteger const kRTCAudioSessionErrorConfiguration;
/** Default constructor. Do not call init. */
+ (instancetype)sharedInstance;
/** Adds a delegate, which is held weakly. Even though it's held weakly, callers
* should still call |removeDelegate| when it's no longer required to ensure
* proper dealloc. This is due to internal use of an NSHashTable.
*/
/** Adds a delegate, which is held weakly. */
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate;
/** Removes an added delegate. */
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate;

View File

@ -12,6 +12,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/audio_device/ios/audio_device_ios.h"
#import "webrtc/base/objc/RTCLogging.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
@ -26,7 +27,6 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
@implementation RTCAudioSession {
rtc::CriticalSection _crit;
AVAudioSession *_session;
NSHashTable *_delegates;
NSInteger _activationCount;
NSInteger _lockRecursionCount;
BOOL _isActive;
@ -34,6 +34,7 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
@synthesize session = _session;
@synthesize delegates = _delegates;
+ (instancetype)sharedInstance {
static dispatch_once_t onceToken;
@ -47,7 +48,6 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
- (instancetype)init {
if (self = [super init]) {
_session = [AVAudioSession sharedInstance];
_delegates = [NSHashTable weakObjectsHashTable];
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center addObserver:self
@ -109,14 +109,24 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
if (!delegate) {
return;
}
@synchronized(self) {
[_delegates addObject:delegate];
_delegates.push_back(delegate);
[self removeZeroedDelegates];
}
}
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
if (!delegate) {
return;
}
@synchronized(self) {
[_delegates removeObject:delegate];
_delegates.erase(std::remove(_delegates.begin(),
_delegates.end(),
delegate));
[self removeZeroedDelegates];
}
}
@ -227,6 +237,8 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return self.session.IOBufferDuration;
}
// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
// can just do atomic increments / decrements.
- (BOOL)setActive:(BOOL)active
error:(NSError **)outError {
if (![self checkLock:outError]) {
@ -459,9 +471,26 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
return error;
}
- (NSSet *)delegates {
- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
@synchronized(self) {
return _delegates.setRepresentation;
// Note: this returns a copy.
return _delegates;
}
}
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
@synchronized(self) {
_delegates.insert(_delegates.begin(), delegate);
}
}
- (void)removeZeroedDelegates {
@synchronized(self) {
for (auto it = _delegates.begin(); it != _delegates.end(); ++it) {
if (!*it) {
_delegates.erase(it);
}
}
}
}
@ -513,14 +542,14 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
- (void)notifyDidBeginInterruption {
for (id<RTCAudioSessionDelegate> delegate in self.delegates) {
for (auto delegate : self.delegates) {
[delegate audioSessionDidBeginInterruption:self];
}
}
- (void)notifyDidEndInterruptionWithShouldResumeSession:
(BOOL)shouldResumeSession {
for (id<RTCAudioSessionDelegate> delegate in self.delegates) {
for (auto delegate : self.delegates) {
[delegate audioSessionDidEndInterruption:self
shouldResumeSession:shouldResumeSession];
}
@ -529,7 +558,7 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
for (id<RTCAudioSessionDelegate> delegate in self.delegates) {
for (auto delegate : self.delegates) {
[delegate audioSessionDidChangeRoute:self
reason:reason
previousRoute:previousRoute];
@ -537,13 +566,13 @@ NSInteger const kRTCAudioSessionErrorConfiguration = -2;
}
- (void)notifyMediaServicesWereLost {
for (id<RTCAudioSessionDelegate> delegate in self.delegates) {
for (auto delegate : self.delegates) {
[delegate audioSessionMediaServicesWereLost:self];
}
}
- (void)notifyMediaServicesWereReset {
for (id<RTCAudioSessionDelegate> delegate in self.delegates) {
for (auto delegate : self.delegates) {
[delegate audioSessionMediaServicesWereReset:self];
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
namespace webrtc {
class AudioSessionObserver;
}
/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
* methods on the AudioSessionObserver.
*/
@interface RTCAudioSessionDelegateAdapter : NSObject <RTCAudioSessionDelegate>
- (instancetype)init NS_UNAVAILABLE;
/** |observer| is a raw pointer and should be kept alive
* for this object's lifetime.
*/
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer
NS_DESIGNATED_INITIALIZER;
@end

View File

@ -0,0 +1,79 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSessionDelegateAdapter.h"
#include "webrtc/modules/audio_device/ios/audio_session_observer.h"
#import "webrtc/base/objc/RTCLogging.h"
@implementation RTCAudioSessionDelegateAdapter {
webrtc::AudioSessionObserver *_observer;
}
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
NSParameterAssert(observer);
if (self = [super init]) {
_observer = observer;
}
return self;
}
#pragma mark - RTCAudioSessionDelegate
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
_observer->OnInterruptionBegin();
}
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
shouldResumeSession:(BOOL)shouldResumeSession {
_observer->OnInterruptionEnd();
}
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
switch (reason) {
case AVAudioSessionRouteChangeReasonUnknown:
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
case AVAudioSessionRouteChangeReasonCategoryChange:
// It turns out that we see a category change (at least in iOS 9.2)
// when making a switch from a BT device to e.g. Speaker using the
// iOS Control Center and that we therefore must check if the sample
// rate has changed. And if so is the case, restart the audio unit.
case AVAudioSessionRouteChangeReasonOverride:
case AVAudioSessionRouteChangeReasonWakeFromSleep:
case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
_observer->OnValidRouteChange();
break;
case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
// The set of input and output ports has not changed, but their
// configuration has, e.g., a ports selected data source has
// changed. Ignore this type of route change since we are focusing
// on detecting headset changes.
RTCLog(@"Ignoring RouteConfigurationChange");
break;
}
}
- (void)audioSessionMediaServicesWereLost:(RTCAudioSession *)session {
}
- (void)audioSessionMediaServicesWereReset:(RTCAudioSession *)session {
}
- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
}
- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
}
@end

View File

@ -13,6 +13,39 @@
#include "testing/gtest/include/gtest/gtest.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession.h"
#import "webrtc/modules/audio_device/ios/objc/RTCAudioSession+Private.h"
@interface RTCAudioSessionTestDelegate : NSObject <RTCAudioSessionDelegate>
@end
@implementation RTCAudioSessionTestDelegate
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
}
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
shouldResumeSession:(BOOL)shouldResumeSession {
}
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
}
- (void)audioSessionMediaServicesWereLost:(RTCAudioSession *)session {
}
- (void)audioSessionMediaServicesWereReset:(RTCAudioSession *)session {
}
- (void)audioSessionShouldConfigure:(RTCAudioSession *)session {
}
- (void)audioSessionShouldUnconfigure:(RTCAudioSession *)session {
}
@end
@interface RTCAudioSessionTest : NSObject
@ -36,9 +69,111 @@
EXPECT_FALSE(session.isLocked);
}
- (void)testAddAndRemoveDelegates {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 5;
for (size_t i = 0; i < count; ++i) {
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[delegates addObject:delegate];
EXPECT_EQ(i + 1, session.delegates.size());
}
[delegates enumerateObjectsUsingBlock:^(RTCAudioSessionTestDelegate *obj,
NSUInteger idx,
BOOL *stop) {
[session removeDelegate:obj];
}];
EXPECT_EQ(0u, session.delegates.size());
}
- (void)testPushDelegate {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
NSMutableArray *delegates = [NSMutableArray array];
const size_t count = 2;
for (size_t i = 0; i < count; ++i) {
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[delegates addObject:delegate];
}
// Test that it gets added to the front of the list.
RTCAudioSessionTestDelegate *pushedDelegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session pushDelegate:pushedDelegate];
EXPECT_TRUE(pushedDelegate == session.delegates[0]);
// Test that it stays at the front of the list.
for (size_t i = 0; i < count; ++i) {
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
[delegates addObject:delegate];
}
EXPECT_TRUE(pushedDelegate == session.delegates[0]);
// Test that the next one goes to the front too.
pushedDelegate = [[RTCAudioSessionTestDelegate alloc] init];
[session pushDelegate:pushedDelegate];
EXPECT_TRUE(pushedDelegate == session.delegates[0]);
}
// Tests that delegates added to the audio session properly zero out. This is
// checking an implementation detail (that vectors of __weak work as expected).
- (void)testZeroingWeakDelegate {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
@autoreleasepool {
// Add a delegate to the session. There should be one delegate at this
// point.
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
EXPECT_EQ(1u, session.delegates.size());
EXPECT_TRUE(session.delegates[0]);
}
// The previously created delegate should've de-alloced, leaving a nil ptr.
EXPECT_FALSE(session.delegates[0]);
RTCAudioSessionTestDelegate *delegate =
[[RTCAudioSessionTestDelegate alloc] init];
[session addDelegate:delegate];
// On adding a new delegate, nil ptrs should've been cleared.
EXPECT_EQ(1u, session.delegates.size());
EXPECT_TRUE(session.delegates[0]);
}
@end
TEST(RTCAudioSessionTest, LockForConfiguration) {
namespace webrtc {
class AudioSessionTest : public ::testing::Test {
protected:
void TearDown() {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
for (id<RTCAudioSessionDelegate> delegate : session.delegates) {
[session removeDelegate:delegate];
}
}
};
TEST_F(AudioSessionTest, LockForConfiguration) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testLockForConfiguration];
}
TEST_F(AudioSessionTest, AddAndRemoveDelegates) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testAddAndRemoveDelegates];
}
TEST_F(AudioSessionTest, PushDelegate) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testPushDelegate];
}
TEST_F(AudioSessionTest, ZeroingWeakDelegate) {
RTCAudioSessionTest *test = [[RTCAudioSessionTest alloc] init];
[test testZeroingWeakDelegate];
}
} // namespace webrtc

View File

@ -481,6 +481,9 @@
'audio_device/ios/audio_device_unittest_ios.cc',
'audio_device/ios/objc/RTCAudioSessionTest.mm',
],
'xcode_settings': {
'OTHER_LDFLAGS': ['-ObjC'],
},
# This needs to be kept in sync with modules_unittests.isolate.
'mac_bundle_resources': [
'<(DEPTH)/data/audio_processing/output_data_float.pb',