diff --git a/src/build/common.gypi b/src/build/common.gypi index e41cc26212..b02450f9fb 100644 --- a/src/build/common.gypi +++ b/src/build/common.gypi @@ -60,6 +60,7 @@ # Disable these to not build components which can be externally provided. 'build_libjpeg%': 1, 'build_libyuv%': 1, + 'build_libvpx%': 1, 'libyuv_dir%': '<(DEPTH)/third_party/libyuv', @@ -98,6 +99,14 @@ # http://code.google.com/p/webrtc/issues/detail?id=163 'clang_use_chrome_plugins%': 0, }], + ['OS=="ios"', { + 'enable_video%': 0, + 'enable_protobuf%': 0, + 'build_libjpeg%': 0, + 'build_libyuv%': 0, + 'build_libvpx%': 0, + 'include_tests%': 0, + }], ], # conditions }, 'target_defaults': { @@ -156,6 +165,14 @@ }], ], }], + ['OS=="ios"', { + 'defines': [ + 'WEBRTC_MAC', + 'MAC_IPHONE', # TODO(sjlee): This should be changed to WEBRTC_IOS. + 'WEBRTC_THREAD_RR', + 'WEBRTC_CLOCK_TYPE_REALTIME', + ], + }], ['OS=="linux"', { 'defines': [ 'WEBRTC_LINUX', diff --git a/src/common_audio/signal_processing/spl_sqrt_floor_arm.s b/src/common_audio/signal_processing/spl_sqrt_floor_arm.s index cfd9ed02d1..a2c5b7d0d2 100644 --- a/src/common_audio/signal_processing/spl_sqrt_floor_arm.s +++ b/src/common_audio/signal_processing/spl_sqrt_floor_arm.s @@ -12,7 +12,6 @@ .align 2 WebRtcSpl_SqrtFloor: -.fnstart mov r1, #3 << 30 mov r2, #1 << 30 @@ -85,4 +84,3 @@ WebRtcSpl_SqrtFloor: bic r0, r2, #3 << 30 @ for rounding add: cmp r0, r2 adc r2, #1 bx lr -.fnend diff --git a/src/modules/audio_device/main/source/audio_device.gypi b/src/modules/audio_device/main/source/audio_device.gypi index 8457737ef4..d5ed6b73c2 100644 --- a/src/modules/audio_device/main/source/audio_device.gypi +++ b/src/modules/audio_device/main/source/audio_device.gypi @@ -51,16 +51,21 @@ 'linux', ], }], # OS==linux + ['OS=="ios"', { + 'include_dirs': [ + 'ios', + ], + }], # OS==ios ['OS=="mac"', { - 'include_dirs': [ - 'mac', - ], + 'include_dirs': [ + 'mac', + ], }], # OS==mac ['OS=="win"', { - 'include_dirs': [ - 'win', - '../../../../../..', - ], + 'include_dirs': [ + 'win', + '../../../../../..', + ], }], ['OS=="android"', { 'include_dirs': [ @@ -84,6 +89,10 @@ 'linux/audio_mixer_manager_alsa_linux.h', 'linux/latebindingsymboltable_linux.cc', 'linux/latebindingsymboltable_linux.h', + 'ios/audio_device_ios.cc', + 'ios/audio_device_ios.h', + 'ios/audio_device_utility_ios.cc', + 'ios/audio_device_utility_ios.h', 'mac/audio_device_mac.cc', 'mac/audio_device_mac.h', 'mac/audio_device_utility_mac.cc', @@ -140,7 +149,7 @@ }], ], }], - ['OS=="mac"', { + ['OS=="mac" or OS=="ios"', { 'link_settings': { 'libraries': [ '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework', diff --git a/src/modules/audio_device/main/source/audio_device_impl.cc b/src/modules/audio_device/main/source/audio_device_impl.cc index 455e234a45..496f6174c4 100644 --- a/src/modules/audio_device/main/source/audio_device_impl.cc +++ b/src/modules/audio_device/main/source/audio_device_impl.cc @@ -39,8 +39,8 @@ #include "audio_device_pulse_linux.h" #endif #elif defined(MAC_IPHONE) - #include "audio_device_utility_iphone.h" - #include "audio_device_iphone.h" + #include "audio_device_utility_ios.h" + #include "audio_device_ios.h" #elif defined(WEBRTC_MAC) #include "audio_device_utility_mac.h" #include "audio_device_mac.h" @@ -160,6 +160,9 @@ WebRtc_Word32 AudioDeviceModuleImpl::CheckPlatform() #elif defined(WEBRTC_LINUX) platform = kPlatformLinux; WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX"); +#elif defined(MAC_IPHONE) + platform = kPlatformIOS; + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is IOS"); #elif defined(WEBRTC_MAC) platform = kPlatformMac; WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is MAC"); diff --git a/src/modules/audio_device/main/source/audio_device_impl.h b/src/modules/audio_device/main/source/audio_device_impl.h index 559f8aae54..91a853466c 100644 --- a/src/modules/audio_device/main/source/audio_device_impl.h +++ b/src/modules/audio_device/main/source/audio_device_impl.h @@ -31,7 +31,8 @@ public: kPlatformWinCe = 2, kPlatformLinux = 3, kPlatformMac = 4, - kPlatformAndroid = 5 + kPlatformAndroid = 5, + kPlatformIOS = 6 }; WebRtc_Word32 CheckPlatform(); diff --git a/src/modules/audio_device/main/source/ios/audio_device_ios.cc b/src/modules/audio_device/main/source/ios/audio_device_ios.cc new file mode 100644 index 0000000000..b73edc24cf --- /dev/null +++ b/src/modules/audio_device/main/source/ios/audio_device_ios.cc @@ -0,0 +1,1895 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include // AudioSession + +#include "audio_device_ios.h" + +#include "trace.h" +#include "thread_wrapper.h" + +namespace webrtc { +AudioDeviceIPhone::AudioDeviceIPhone(const WebRtc_Word32 id) + : + _ptrAudioBuffer(NULL), + _critSect(*CriticalSectionWrapper::CreateCriticalSection()), + _captureWorkerThread(NULL), + _captureWorkerThreadId(0), + _id(id), + _auRemoteIO(NULL), + _initialized(false), + _isShutDown(false), + _recording(false), + _playing(false), + _recIsInitialized(false), + _playIsInitialized(false), + _recordingDeviceIsSpecified(false), + _playoutDeviceIsSpecified(false), + _micIsInitialized(false), + _speakerIsInitialized(false), + _AGC(false), + _adbSampFreq(0), + _recordingDelay(0), + _playoutDelay(0), + _playoutDelayMeasurementCounter(9999), + _recordingDelayHWAndOS(0), + _recordingDelayMeasurementCounter(9999), + _playWarning(0), + _playError(0), + _recWarning(0), + _recError(0), + _playoutBufferUsed(0), + _recordingCurrentSeq(0), + _recordingBufferTotalSize(0) { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, + "%s created", __FUNCTION__); + + memset(_playoutBuffer, 0, sizeof(_playoutBuffer)); + memset(_recordingBuffer, 0, sizeof(_recordingBuffer)); + memset(_recordingLength, 0, sizeof(_recordingLength)); + memset(_recordingSeqNumber, 0, sizeof(_recordingSeqNumber)); +} + +AudioDeviceIPhone::~AudioDeviceIPhone() { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, + "%s destroyed", __FUNCTION__); + + Terminate(); + + delete &_critSect; +} + + +// ============================================================================ +// API +// ============================================================================ + +void AudioDeviceIPhone::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + _ptrAudioBuffer = audioBuffer; + + // inform the AudioBuffer about default settings for this implementation + _ptrAudioBuffer->SetRecordingSampleRate(ENGINE_REC_BUF_SIZE_IN_SAMPLES); + _ptrAudioBuffer->SetPlayoutSampleRate(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES); + _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS); + _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS); +} + +WebRtc_Word32 AudioDeviceIPhone::ActiveAudioLayer( + AudioDeviceModule::AudioLayer& audioLayer) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + audioLayer = AudioDeviceModule::kPlatformDefaultAudio; + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::Init() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (_initialized) { + return 0; + } + + _isShutDown = false; + + // Create and start capture thread + if (_captureWorkerThread == NULL) { + _captureWorkerThread + = ThreadWrapper::CreateThread(RunCapture, this, kRealtimePriority, + "CaptureWorkerThread"); + + if (_captureWorkerThread == NULL) { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, + _id, "CreateThread() error"); + return -1; + } + + unsigned int threadID(0); + bool res = _captureWorkerThread->Start(threadID); + _captureWorkerThreadId = static_cast(threadID); + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "CaptureWorkerThread started (res=%d)", res); + } else { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, + _id, "Thread already created"); + } + + // Set preferred hardware sample rate to 16 kHz + Float64 sampleRate(16000.0); + OSStatus result = AudioSessionSetProperty( + kAudioSessionProperty_PreferredHardwareSampleRate, + sizeof(sampleRate), &sampleRate); + if (0 != result) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "Could not set preferred sample rate (result=%d)", result); + } + + _playWarning = 0; + _playError = 0; + _recWarning = 0; + _recError = 0; + + _initialized = true; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::Terminate() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + if (!_initialized) { + return 0; + } + + + // Stop capture thread + if (_captureWorkerThread != NULL) { + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "Stopping CaptureWorkerThread"); + bool res = _captureWorkerThread->Stop(); + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, + _id, "CaptureWorkerThread stopped (res=%d)", res); + delete _captureWorkerThread; + _captureWorkerThread = NULL; + } + + // Shut down AU Remote IO + ShutdownPlayOrRecord(); + + _isShutDown = true; + _initialized = false; + _speakerIsInitialized = false; + _micIsInitialized = false; + _playoutDeviceIsSpecified = false; + _recordingDeviceIsSpecified = false; + return 0; +} + +bool AudioDeviceIPhone::Initialized() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + return (_initialized); +} + +WebRtc_Word32 AudioDeviceIPhone::SpeakerIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + // speaker is always available in IOS + available = true; + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::InitSpeaker() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_initialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, + _id, " Not initialized"); + return -1; + } + + if (_playing) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, + _id, " Cannot init speaker when playing"); + return -1; + } + + if (!_playoutDeviceIsSpecified) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, + _id, " Playout device is not specified"); + return -1; + } + + // Do nothing + _speakerIsInitialized = true; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::MicrophoneIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; + + OSStatus result = -1; + UInt32 channel = 0; + UInt32 size = sizeof(channel); + result = AudioSessionGetProperty(kAudioSessionProperty_AudioInputAvailable, + &size, &channel); + if (channel != 0) { + // API is not supported on this platform, we return available = true + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, + _id, " API call not supported on this version"); + available = true; + return 0; + } + + available = (channel == 0) ? false : true; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::InitMicrophone() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_initialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, + _id, " Not initialized"); + return -1; + } + + if (_recording) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, + _id, " Cannot init mic when recording"); + return -1; + } + + if (!_recordingDeviceIsSpecified) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, + _id, " Recording device is not specified"); + return -1; + } + + // Do nothing + + _micIsInitialized = true; + + return 0; +} + +bool AudioDeviceIPhone::SpeakerIsInitialized() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + return _speakerIsInitialized; +} + +bool AudioDeviceIPhone::MicrophoneIsInitialized() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + return _micIsInitialized; +} + +WebRtc_Word32 AudioDeviceIPhone::SpeakerVolumeIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Speaker volume not supported on iOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetSpeakerVolume(WebRtc_UWord32 volume) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetSpeakerVolume(volume=%u)", volume); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::SpeakerVolume(WebRtc_UWord32& volume) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, + WebRtc_UWord16 volumeRight) { + WEBRTC_TRACE( + kTraceModuleCall, + kTraceAudioDevice, + _id, + "AudioDeviceIPhone::SetWaveOutVolume(volumeLeft=%u, volumeRight=%u)", + volumeLeft, volumeRight); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + + return -1; +} + +WebRtc_Word32 +AudioDeviceIPhone::WaveOutVolume(WebRtc_UWord16& /*volumeLeft*/, + WebRtc_UWord16& /*volumeRight*/) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::MinSpeakerVolume( + WebRtc_UWord32& minVolume) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::SpeakerMuteIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Speaker mute not supported on iOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetSpeakerMute(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::SpeakerMute(bool& enabled) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::MicrophoneMuteIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Mic mute not supported on iOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetMicrophoneMute(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::MicrophoneMute(bool& enabled) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 AudioDeviceIPhone::MicrophoneBoostIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Mic boost not supported on iOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetMicrophoneBoost(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetMicrophoneBoost(enable=%u)", enable); + + if (!_micIsInitialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Microphone not initialized"); + return -1; + } + + if (enable) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " SetMicrophoneBoost cannot be enabled on this platform"); + return -1; + } + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::MicrophoneBoost(bool& enabled) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + if (!_micIsInitialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Microphone not initialized"); + return -1; + } + + enabled = false; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::StereoRecordingIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Stereo recording not supported on iOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetStereoRecording(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetStereoRecording(enable=%u)", enable); + + if (enable) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Stereo recording is not supported on this platform"); + return -1; + } + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::StereoRecording(bool& enabled) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + enabled = false; + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::StereoPlayoutIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Stereo playout not supported on iOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetStereoPlayout(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetStereoPlayout(enable=%u)", enable); + + if (enable) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Stereo playout is not supported on this platform"); + return -1; + } + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::StereoPlayout(bool& enabled) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + enabled = false; + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetAGC(bool enable) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetAGC(enable=%d)", enable); + + _AGC = enable; + + return 0; +} + +bool AudioDeviceIPhone::AGC() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + return _AGC; +} + +WebRtc_Word32 AudioDeviceIPhone::MicrophoneVolumeIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + available = false; // Mic volume not supported on IOS + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::SetMicrophoneVolume(WebRtc_UWord32 volume) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetMicrophoneVolume(volume=%u)", volume); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::MicrophoneVolume(WebRtc_UWord32& volume) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::MicrophoneVolumeStepSize( + WebRtc_UWord16& stepSize) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word16 AudioDeviceIPhone::PlayoutDevices() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + return (WebRtc_Word16)1; +} + +WebRtc_Word32 AudioDeviceIPhone::SetPlayoutDevice(WebRtc_UWord16 index) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetPlayoutDevice(index=%u)", index); + + if (_playIsInitialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout already initialized"); + return -1; + } + + if (index !=0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " SetPlayoutDevice invalid index"); + return -1; + } + _playoutDeviceIsSpecified = true; + + return 0; +} + +WebRtc_Word32 + AudioDeviceIPhone::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "WindowsDeviceType not supported"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::PlayoutDeviceName(WebRtc_UWord16 index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::PlayoutDeviceName(index=%u)", index); + + if (index != 0) { + return -1; + } + // return empty strings + memset(name, 0, kAdmMaxDeviceNameSize); + if (guid != NULL) { + memset(guid, 0, kAdmMaxGuidSize); + } + + return 0; +} + +WebRtc_Word32 + AudioDeviceIPhone::RecordingDeviceName(WebRtc_UWord16 index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::RecordingDeviceName(index=%u)", index); + + if (index != 0) { + return -1; + } + // return empty strings + memset(name, 0, kAdmMaxDeviceNameSize); + if (guid != NULL) { + memset(guid, 0, kAdmMaxGuidSize); + } + + return 0; +} + +WebRtc_Word16 AudioDeviceIPhone::RecordingDevices() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + return (WebRtc_Word16)1; +} + +WebRtc_Word32 AudioDeviceIPhone::SetRecordingDevice(WebRtc_UWord16 index) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetRecordingDevice(index=%u)", index); + + if (_recIsInitialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording already initialized"); + return -1; + } + + if (index !=0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " SetRecordingDevice invalid index"); + return -1; + } + + _recordingDeviceIsSpecified = true; + + return 0; +} + +WebRtc_Word32 + AudioDeviceIPhone::SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "WindowsDeviceType not supported"); + return -1; +} + +// ---------------------------------------------------------------------------- +// SetLoudspeakerStatus +// +// Overrides the receiver playout route to speaker instead. See +// kAudioSessionProperty_OverrideCategoryDefaultToSpeaker in CoreAudio +// documentation. +// ---------------------------------------------------------------------------- + +WebRtc_Word32 AudioDeviceIPhone::SetLoudspeakerStatus(bool enable) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetLoudspeakerStatus(enable=%d)", enable); + + UInt32 doChangeDefaultRoute = enable ? 1 : 0; + OSStatus err = AudioSessionSetProperty( + kAudioSessionProperty_OverrideCategoryDefaultToSpeaker, + sizeof(doChangeDefaultRoute), &doChangeDefaultRoute); + + if (err != noErr) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "Error changing default output route " \ + "(only available on iOS 3.1 or later)"); + return -1; + } + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::GetLoudspeakerStatus(bool &enabled) const { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetLoudspeakerStatus(enabled=?)"); + + UInt32 route(0); + UInt32 size = sizeof(route); + OSStatus err = AudioSessionGetProperty( + kAudioSessionProperty_OverrideCategoryDefaultToSpeaker, + &size, &route); + if (err != noErr) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "Error changing default output route " \ + "(only available on iOS 3.1 or later)"); + return -1; + } + + enabled = route == 1 ? true: false; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::PlayoutIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + available = false; + + // Try to initialize the playout side + WebRtc_Word32 res = InitPlayout(); + + // Cancel effect of initialization + StopPlayout(); + + if (res != -1) { + available = true; + } + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::RecordingIsAvailable(bool& available) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + available = false; + + // Try to initialize the recording side + WebRtc_Word32 res = InitRecording(); + + // Cancel effect of initialization + StopRecording(); + + if (res != -1) { + available = true; + } + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::InitPlayout() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_initialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, " Not initialized"); + return -1; + } + + if (_playing) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Playout already started"); + return -1; + } + + if (_playIsInitialized) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout already initialized"); + return 0; + } + + if (!_playoutDeviceIsSpecified) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout device is not specified"); + return -1; + } + + // Initialize the speaker + if (InitSpeaker() == -1) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " InitSpeaker() failed"); + } + + _playIsInitialized = true; + + if (!_recIsInitialized) { + // Audio init + if (InitPlayOrRecord() == -1) { + // todo: Handle error + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " InitPlayOrRecord() failed"); + } + } else { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording already initialized - InitPlayOrRecord() not called"); + } + + return 0; +} + +bool AudioDeviceIPhone::PlayoutIsInitialized() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + return (_playIsInitialized); +} + +WebRtc_Word32 AudioDeviceIPhone::InitRecording() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_initialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Not initialized"); + return -1; + } + + if (_recording) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Recording already started"); + return -1; + } + + if (_recIsInitialized) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording already initialized"); + return 0; + } + + if (!_recordingDeviceIsSpecified) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording device is not specified"); + return -1; + } + + // Initialize the microphone + if (InitMicrophone() == -1) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " InitMicrophone() failed"); + } + + _recIsInitialized = true; + + if (!_playIsInitialized) { + // Audio init + if (InitPlayOrRecord() == -1) { + // todo: Handle error + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " InitPlayOrRecord() failed"); + } + } else { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout already initialized - InitPlayOrRecord() " \ + "not called"); + } + + return 0; +} + +bool AudioDeviceIPhone::RecordingIsInitialized() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + return (_recIsInitialized); +} + +WebRtc_Word32 AudioDeviceIPhone::StartRecording() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_recIsInitialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Recording not initialized"); + return -1; + } + + if (_recording) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording already started"); + return 0; + } + + // Reset recording buffer + memset(_recordingBuffer, 0, sizeof(_recordingBuffer)); + memset(_recordingLength, 0, sizeof(_recordingLength)); + memset(_recordingSeqNumber, 0, sizeof(_recordingSeqNumber)); + _recordingCurrentSeq = 0; + _recordingBufferTotalSize = 0; + _recordingDelay = 0; + _recordingDelayHWAndOS = 0; + // Make sure first call to update delay function will update delay + _recordingDelayMeasurementCounter = 9999; + _recWarning = 0; + _recError = 0; + + if (!_playing) { + // Start AU Remote IO + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + " Starting AU Remote IO"); + OSStatus result = AudioOutputUnitStart(_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " Error starting AU Remote IO (result=%d)", result); + return -1; + } + } + + _recording = true; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::StopRecording() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_recIsInitialized) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Recording is not initialized"); + return 0; + } + + _recording = false; + + if (!_playing) { + // Both playout and recording has stopped, shutdown the device + ShutdownPlayOrRecord(); + } + + _recIsInitialized = false; + _micIsInitialized = false; + + return 0; +} + +bool AudioDeviceIPhone::Recording() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + return (_recording); +} + +WebRtc_Word32 AudioDeviceIPhone::StartPlayout() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + // This lock is (among other things) needed to avoid concurrency issues + // with capture thread + // shutting down AU Remote IO + CriticalSectionScoped lock(_critSect); + + if (!_playIsInitialized) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Playout not initialized"); + return -1; + } + + if (_playing) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playing already started"); + return 0; + } + + // Reset playout buffer + memset(_playoutBuffer, 0, sizeof(_playoutBuffer)); + _playoutBufferUsed = 0; + _playoutDelay = 0; + // Make sure first call to update delay function will update delay + _playoutDelayMeasurementCounter = 9999; + _playWarning = 0; + _playError = 0; + + if (!_recording) { + // Start AU Remote IO + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + " Starting AU Remote IO"); + OSStatus result = AudioOutputUnitStart(_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, + " Error starting AU Remote IO (result=%d)", result); + return -1; + } + } + + _playing = true; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::StopPlayout() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_playIsInitialized) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout is not initialized"); + return 0; + } + + _playing = false; + + if (!_recording) { + // Both playout and recording has stopped, signal shutdown the device + ShutdownPlayOrRecord(); + } + + _playIsInitialized = false; + _speakerIsInitialized = false; + + return 0; +} + +bool AudioDeviceIPhone::Playing() const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + return (_playing); +} + +// ---------------------------------------------------------------------------- +// ResetAudioDevice +// +// Disable playout and recording, signal to capture thread to shutdown, +// and set enable states after shutdown to same as current. +// In capture thread audio device will be shutdown, then started again. +// ---------------------------------------------------------------------------- +WebRtc_Word32 AudioDeviceIPhone::ResetAudioDevice() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + CriticalSectionScoped lock(_critSect); + + if (!_playIsInitialized && !_recIsInitialized) { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Playout or recording not initialized, doing nothing"); + return 0; // Nothing to reset + } + + // Store the states we have before stopping to restart below + bool initPlay = _playIsInitialized; + bool play = _playing; + bool initRec = _recIsInitialized; + bool rec = _recording; + + int res(0); + + // Stop playout and recording + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + " Stopping playout and recording"); + res += StopPlayout(); + res += StopRecording(); + + // Restart + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + " Restarting playout and recording (%d, %d, %d, %d)", + initPlay, play, initRec, rec); + if (initPlay) res += InitPlayout(); + if (initRec) res += InitRecording(); + if (play) res += StartPlayout(); + if (rec) res += StartRecording(); + + if (0 != res) { + // Logging is done in init/start/stop calls above + return -1; + } + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::PlayoutDelay(WebRtc_UWord16& delayMS) const { + delayMS = _playoutDelay; + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::RecordingDelay(WebRtc_UWord16& delayMS) const { + delayMS = _recordingDelay; + return 0; +} + +WebRtc_Word32 + AudioDeviceIPhone::SetPlayoutBuffer( + const AudioDeviceModule::BufferType type, + WebRtc_UWord16 sizeMS) { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "AudioDeviceIPhone::SetPlayoutBuffer(type=%u, sizeMS=%u)", + type, sizeMS); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +WebRtc_Word32 + AudioDeviceIPhone::PlayoutBuffer(AudioDeviceModule::BufferType& type, + WebRtc_UWord16& sizeMS) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + type = AudioDeviceModule::kAdaptiveBufferSize; + + sizeMS = _playoutDelay; + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::CPULoad(WebRtc_UWord16& /*load*/) const { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " API call not supported on this platform"); + return -1; +} + +bool AudioDeviceIPhone::PlayoutWarning() const { + return (_playWarning > 0); +} + +bool AudioDeviceIPhone::PlayoutError() const { + return (_playError > 0); +} + +bool AudioDeviceIPhone::RecordingWarning() const { + return (_recWarning > 0); +} + +bool AudioDeviceIPhone::RecordingError() const { + return (_recError > 0); +} + +void AudioDeviceIPhone::ClearPlayoutWarning() { + _playWarning = 0; +} + +void AudioDeviceIPhone::ClearPlayoutError() { + _playError = 0; +} + +void AudioDeviceIPhone::ClearRecordingWarning() { + _recWarning = 0; +} + +void AudioDeviceIPhone::ClearRecordingError() { + _recError = 0; +} + +// ============================================================================ +// Private Methods +// ============================================================================ + +WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + OSStatus result = -1; + + // Check if already initialized + if (NULL != _auRemoteIO) { + // We already have initialized before and created any of the audio unit, + // check that all exist + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " Already initialized"); + // todo: Call AudioUnitReset() here and empty all buffers? + return 0; + } + + // Create AU Remote IO + AudioComponentDescription desc; + AudioComponent comp; + + desc.componentType = kAudioUnitType_Output; + desc.componentSubType = kAudioUnitSubType_RemoteIO; + desc.componentManufacturer = kAudioUnitManufacturer_Apple; + desc.componentFlags = 0; + desc.componentFlagsMask = 0; + + comp = AudioComponentFindNext(NULL, &desc); + if (NULL == comp) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not find audio component for AU Remote IO"); + return -1; + } + + result = AudioComponentInstanceNew(comp, &_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not create AU Remote IO instance (result=%d)", + result); + return -1; + } + + ////////////////////// + // Setup AU remote IO + + // Note: For AU Remote IO element 0 is output bus, element 1 is input bus + // for global scope element is irrelevant (always use element 0) + + // Enable IO on both elements + + // todo: Below we just log and continue upon error. We might want + // to close AU and return error for some cases. + // todo: Log info about setup. + + UInt32 enableIO = 1; + result = AudioUnitSetProperty(_auRemoteIO, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, + 1, // input bus + &enableIO, + sizeof(enableIO)); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not enable IO on input (result=%d)", result); + } + + result = AudioUnitSetProperty(_auRemoteIO, + kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Output, + 0, // output bus + &enableIO, + sizeof(enableIO)); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not enable IO on output (result=%d)", result); + } + + // Disable AU buffer allocation for the recorder, we allocate our own + UInt32 flag = 0; + result = AudioUnitSetProperty( + _auRemoteIO, kAudioUnitProperty_ShouldAllocateBuffer, + kAudioUnitScope_Output, 1, &flag, sizeof(flag)); + if (0 != result) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not disable AU buffer allocation (result=%d)", + result); + // Should work anyway + } + + // Initialize here already to be able to get/set stream properties. + result = AudioUnitInitialize(_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not init AU Remote IO (result=%d)", result); + } + + // Get stream format for out/0 + AudioStreamBasicDescription playoutDesc; + UInt32 size = sizeof(playoutDesc); + result = AudioUnitGetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, 0, &playoutDesc, + &size); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not get stream format AU Remote IO out/0 (result=%d)", + result); + } + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " AU Remote IO playout opened in sampling rate %f", + playoutDesc.mSampleRate); + + // Store the sampling frequency to use towards the Audio Device Buffer + // todo: Add 48 kHz (increase buffer sizes). Other fs? + if ((playoutDesc.mSampleRate > 44090.0) + && (playoutDesc.mSampleRate < 44110.0)) { + _adbSampFreq = 44000; + } else if ((playoutDesc.mSampleRate > 15990.0) + && (playoutDesc.mSampleRate < 16010.0)) { + _adbSampFreq = 16000; + } else if ((playoutDesc.mSampleRate > 7990.0) + && (playoutDesc.mSampleRate < 8010.0)) { + _adbSampFreq = 8000; + } else { + _adbSampFreq = 0; + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " AU Remote IO out/0 opened in unknown sampling rate (%f)", + playoutDesc.mSampleRate); + // todo: We should bail out here. + } + + // Set the audio device buffer sampling rate, + // we assume we get the same for play and record + if (_ptrAudioBuffer->SetRecordingSampleRate(_adbSampFreq) < 0) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not set audio device buffer recording sampling rate (%d)", + _adbSampFreq); + } + + if (_ptrAudioBuffer->SetPlayoutSampleRate(_adbSampFreq) < 0) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not set audio device buffer playout sampling rate (%d)", + _adbSampFreq); + } + + // Set stream format for in/0 (use same sampling frequency as for out/0) + playoutDesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger + | kLinearPCMFormatFlagIsPacked + | kLinearPCMFormatFlagIsNonInterleaved; + playoutDesc.mBytesPerPacket = 2; + playoutDesc.mFramesPerPacket = 1; + playoutDesc.mBytesPerFrame = 2; + playoutDesc.mChannelsPerFrame = 1; + playoutDesc.mBitsPerChannel = 16; + result = AudioUnitSetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, 0, &playoutDesc, size); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not set stream format AU Remote IO in/0 (result=%d)", + result); + } + + // Get stream format for in/1 + AudioStreamBasicDescription recordingDesc; + size = sizeof(recordingDesc); + result = AudioUnitGetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, 1, &recordingDesc, + &size); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not get stream format AU Remote IO in/1 (result=%d)", + result); + } + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, + " AU Remote IO recording opened in sampling rate %f", + recordingDesc.mSampleRate); + + if (static_cast(playoutDesc.mSampleRate) + != static_cast(recordingDesc.mSampleRate)) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " AU Remote IO recording and playout opened " \ + "in different sampling rates"); + // todo: Bail out if rec and play sampling rates are not the same? + // Add handling of different sampling rates? + } + + // Set stream format for out/1 (use same sampling frequency as for in/1) + recordingDesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger + | kLinearPCMFormatFlagIsPacked + | kLinearPCMFormatFlagIsNonInterleaved; + + recordingDesc.mBytesPerPacket = 2; + recordingDesc.mFramesPerPacket = 1; + recordingDesc.mBytesPerFrame = 2; + recordingDesc.mChannelsPerFrame = 1; + recordingDesc.mBitsPerChannel = 16; + result = AudioUnitSetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, 1, &recordingDesc, + size); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not set stream format AU Remote IO out/1 (result=%d)", + result); + } + + // Set recording callback + AURenderCallbackStruct auCbS; + memset(&auCbS, 0, sizeof(auCbS)); + auCbS.inputProc = RecordProcess; + auCbS.inputProcRefCon = this; + result = AudioUnitSetProperty(_auRemoteIO, + kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, + &auCbS, sizeof(auCbS)); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not set record callback for AU Remote IO (result=%d)", + result); + } + + // Set playout callback + memset(&auCbS, 0, sizeof(auCbS)); + auCbS.inputProc = PlayoutProcess; + auCbS.inputProcRefCon = this; + result = AudioUnitSetProperty(_auRemoteIO, + kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0, + &auCbS, sizeof(auCbS)); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + " Could not set play callback for AU Remote IO (result=%d)", + result); + } + + // Get hardware sample rate for logging (see if we get what we asked for) + Float64 sampleRate(0.0); + size = sizeof(sampleRate); + result = AudioSessionGetProperty( + kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate); + if (0 != result) { + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + " Could not get current HW sample rate (result=%d)", result); + } + WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, + " Current HW sample rate is %f, ADB sample rate is %d", + sampleRate, _adbSampFreq); + + return 0; +} + +WebRtc_Word32 AudioDeviceIPhone::ShutdownPlayOrRecord() { + WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__); + + // Close and delete AU + OSStatus result = -1; + if (NULL != _auRemoteIO) { + result = AudioOutputUnitStop(_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Error stopping AU Remote IO (result=%d)", result); + } + result = AudioUnitUninitialize(_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Error uninitializing AU Remote IO (result=%d)", result); + } + result = AudioComponentInstanceDispose(_auRemoteIO); + if (0 != result) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Error disposing AU Remote IO (result=%d)", result); + } + _auRemoteIO = NULL; + } + + return 0; +} + +// ============================================================================ +// Thread Methods +// ============================================================================ + +OSStatus + AudioDeviceIPhone::RecordProcess(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData) { + AudioDeviceIPhone* ptrThis = static_cast(inRefCon); + + return ptrThis->RecordProcessImpl(ioActionFlags, + inTimeStamp, + inBusNumber, + inNumberFrames); +} + + +OSStatus + AudioDeviceIPhone::RecordProcessImpl( + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + WebRtc_UWord32 inBusNumber, + WebRtc_UWord32 inNumberFrames) { + // Setup some basic stuff + // Use temp buffer not to lock up recording buffer more than necessary + // todo: Make dataTmp a member variable with static size that holds + // max possible frames? + WebRtc_Word16* dataTmp = new WebRtc_Word16[inNumberFrames]; + memset(dataTmp, 0, 2*inNumberFrames); + + AudioBufferList abList; + abList.mNumberBuffers = 1; + abList.mBuffers[0].mData = dataTmp; + abList.mBuffers[0].mDataByteSize = 2*inNumberFrames; // 2 bytes/sample + abList.mBuffers[0].mNumberChannels = 1; + + // Get data from mic + OSStatus res = AudioUnitRender(_auRemoteIO, ioActionFlags, inTimeStamp, + inBusNumber, inNumberFrames, &abList); + if (res != 0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Error getting rec data, error = %d", res); + + if (_recWarning > 0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Pending rec warning exists"); + } + _recWarning = 1; + + delete [] dataTmp; + return 0; + } + + if (_recording) { + // Insert all data in temp buffer into recording buffers + // There is zero or one buffer partially full at any given time, + // all others are full or empty + // Full means filled with noSamp10ms samples. + + const unsigned int noSamp10ms = _adbSampFreq / 100; + unsigned int dataPos = 0; + WebRtc_UWord16 bufPos = 0; + WebRtc_Word16 insertPos = -1; + unsigned int nCopy = 0; // Number of samples to copy + + while (dataPos < inNumberFrames) { + // Loop over all recording buffers or + // until we find the partially full buffer + // First choice is to insert into partially full buffer, + // second choice is to insert into empty buffer + bufPos = 0; + insertPos = -1; + nCopy = 0; + while (bufPos < N_REC_BUFFERS) { + if ((_recordingLength[bufPos] > 0) + && (_recordingLength[bufPos] < noSamp10ms)) { + // Found the partially full buffer + insertPos = static_cast(bufPos); + // Don't need to search more, quit loop + bufPos = N_REC_BUFFERS; + } else if ((-1 == insertPos) + && (0 == _recordingLength[bufPos])) { + // Found an empty buffer + insertPos = static_cast(bufPos); + } + ++bufPos; + } + + // Insert data into buffer + if (insertPos > -1) { + // We found a non-full buffer, copy data to it + unsigned int dataToCopy = inNumberFrames - dataPos; + unsigned int currentRecLen = _recordingLength[insertPos]; + unsigned int roomInBuffer = noSamp10ms - currentRecLen; + nCopy = (dataToCopy < roomInBuffer ? dataToCopy : roomInBuffer); + + memcpy(&_recordingBuffer[insertPos][currentRecLen], + &dataTmp[dataPos], nCopy*sizeof(WebRtc_Word16)); + if (0 == currentRecLen) { + _recordingSeqNumber[insertPos] = _recordingCurrentSeq; + ++_recordingCurrentSeq; + } + _recordingBufferTotalSize += nCopy; + // Has to be done last to avoid interrupt problems + // between threads + _recordingLength[insertPos] += nCopy; + dataPos += nCopy; + } else { + // Didn't find a non-full buffer + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Could not insert into recording buffer"); + if (_recWarning > 0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Pending rec warning exists"); + } + _recWarning = 1; + dataPos = inNumberFrames; // Don't try to insert more + } + } + } + + delete [] dataTmp; + + return 0; +} + +OSStatus + AudioDeviceIPhone::PlayoutProcess(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *inTimeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData) { + AudioDeviceIPhone* ptrThis = static_cast(inRefCon); + + return ptrThis->PlayoutProcessImpl(inNumberFrames, ioData); +} + +OSStatus + AudioDeviceIPhone::PlayoutProcessImpl(WebRtc_UWord32 inNumberFrames, + AudioBufferList *ioData) { + // Setup some basic stuff +// assert(sizeof(short) == 2); // Assumption for implementation + + WebRtc_Word16* data = + static_cast(ioData->mBuffers[0].mData); + unsigned int dataSizeBytes = ioData->mBuffers[0].mDataByteSize; + unsigned int dataSize = dataSizeBytes/2; // Number of samples + if (dataSize != inNumberFrames) { // Should always be the same + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "dataSize (%u) != inNumberFrames (%u)", + dataSize, (unsigned int)inNumberFrames); + if (_playWarning > 0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Pending play warning exists"); + } + _playWarning = 1; + } + memset(data, 0, dataSizeBytes); // Start with empty buffer + + + // Get playout data from Audio Device Buffer + + if (_playing) { + unsigned int noSamp10ms = _adbSampFreq / 100; + // todo: Member variable and allocate when samp freq is determined + WebRtc_Word16* dataTmp = new WebRtc_Word16[noSamp10ms]; + memset(dataTmp, 0, 2*noSamp10ms); + unsigned int dataPos = 0; + int noSamplesOut = 0; + unsigned int nCopy = 0; + + // First insert data from playout buffer if any + if (_playoutBufferUsed > 0) { + nCopy = (dataSize < _playoutBufferUsed) ? + dataSize : _playoutBufferUsed; + if (nCopy != _playoutBufferUsed) { + // todo: If dataSize < _playoutBufferUsed + // (should normally never be) + // we must move the remaining data + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "nCopy (%u) != _playoutBufferUsed (%u)", + nCopy, _playoutBufferUsed); + if (_playWarning > 0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Pending play warning exists"); + } + _playWarning = 1; + } + memcpy(data, _playoutBuffer, 2*nCopy); + dataPos = nCopy; + memset(_playoutBuffer, 0, sizeof(_playoutBuffer)); + _playoutBufferUsed = 0; + } + + // Now get the rest from Audio Device Buffer + while (dataPos < dataSize) { + // Update playout delay + UpdatePlayoutDelay(); + + // Ask for new PCM data to be played out using the AudioDeviceBuffer + noSamplesOut = _ptrAudioBuffer->RequestPlayoutData(noSamp10ms); + + // Get data from Audio Device Buffer + noSamplesOut = + _ptrAudioBuffer->GetPlayoutData( + reinterpret_cast(dataTmp)); + // Cast OK since only equality comparison + if (noSamp10ms != (unsigned int)noSamplesOut) { + // Should never happen + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + "noSamp10ms (%u) != noSamplesOut (%d)", + noSamp10ms, noSamplesOut); + + if (_playWarning > 0) { + WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, + " Pending play warning exists"); + } + _playWarning = 1; + } + + // Insert as much as fits in data buffer + nCopy = (dataSize-dataPos) > noSamp10ms ? + noSamp10ms : (dataSize-dataPos); + memcpy(&data[dataPos], dataTmp, 2*nCopy); + + // Save rest in playout buffer if any + if (nCopy < noSamp10ms) { + memcpy(_playoutBuffer, &dataTmp[nCopy], 2*(noSamp10ms-nCopy)); + _playoutBufferUsed = noSamp10ms - nCopy; + } + + // Update loop/index counter, if we copied less than noSamp10ms + // samples we shall quit loop anyway + dataPos += noSamp10ms; + } + + delete [] dataTmp; + } + + return 0; +} + +void AudioDeviceIPhone::UpdatePlayoutDelay() { + ++_playoutDelayMeasurementCounter; + + if (_playoutDelayMeasurementCounter >= 100) { + // Update HW and OS delay every second, unlikely to change + + _playoutDelay = 0; + + // HW output latency + Float32 f32(0); + UInt32 size = sizeof(f32); + OSStatus result = AudioSessionGetProperty( + kAudioSessionProperty_CurrentHardwareOutputLatency, &size, &f32); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "error HW latency (result=%d)", result); + } + _playoutDelay += static_cast(f32 * 1000000); + + // HW buffer duration + f32 = 0; + result = AudioSessionGetProperty( + kAudioSessionProperty_CurrentHardwareIOBufferDuration, &size, &f32); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "error HW buffer duration (result=%d)", result); + } + _playoutDelay += static_cast(f32 * 1000000); + + // AU latency + Float64 f64(0); + size = sizeof(f64); + result = AudioUnitGetProperty(_auRemoteIO, + kAudioUnitProperty_Latency, kAudioUnitScope_Global, 0, &f64, &size); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "error AU latency (result=%d)", result); + } + _playoutDelay += static_cast(f64 * 1000000); + + // To ms + _playoutDelay = (_playoutDelay - 500) / 1000; + + // Reset counter + _playoutDelayMeasurementCounter = 0; + } + + // todo: Add playout buffer? (Only used for 44.1 kHz) +} + +void AudioDeviceIPhone::UpdateRecordingDelay() { + ++_recordingDelayMeasurementCounter; + + if (_recordingDelayMeasurementCounter >= 100) { + // Update HW and OS delay every second, unlikely to change + + _recordingDelayHWAndOS = 0; + + // HW input latency + Float32 f32(0); + UInt32 size = sizeof(f32); + OSStatus result = AudioSessionGetProperty( + kAudioSessionProperty_CurrentHardwareInputLatency, &size, &f32); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "error HW latency (result=%d)", result); + } + _recordingDelayHWAndOS += static_cast(f32 * 1000000); + + // HW buffer duration + f32 = 0; + result = AudioSessionGetProperty( + kAudioSessionProperty_CurrentHardwareIOBufferDuration, &size, &f32); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "error HW buffer duration (result=%d)", result); + } + _recordingDelayHWAndOS += static_cast(f32 * 1000000); + + // AU latency + Float64 f64(0); + size = sizeof(f64); + result = AudioUnitGetProperty(_auRemoteIO, kAudioUnitProperty_Latency, + kAudioUnitScope_Global, 0, &f64, &size); + if (0 != result) { + WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, + "error AU latency (result=%d)", result); + } + _recordingDelayHWAndOS += static_cast(f64 * 1000000); + + // To ms + _recordingDelayHWAndOS = (_recordingDelayHWAndOS - 500) / 1000; + + // Reset counter + _recordingDelayMeasurementCounter = 0; + } + + _recordingDelay = _recordingDelayHWAndOS; + + // ADB recording buffer size, update every time + // Don't count the one next 10 ms to be sent, then convert samples => ms + const WebRtc_UWord32 noSamp10ms = _adbSampFreq / 100; + if (_recordingBufferTotalSize > noSamp10ms) { + _recordingDelay += + (_recordingBufferTotalSize - noSamp10ms) / (_adbSampFreq / 1000); + } +} + +bool AudioDeviceIPhone::RunCapture(void* ptrThis) { + return static_cast(ptrThis)->CaptureWorkerThread(); +} + +bool AudioDeviceIPhone::CaptureWorkerThread() { + if (_recording) { + int bufPos = 0; + unsigned int lowestSeq = 0; + int lowestSeqBufPos = 0; + bool foundBuf = true; + const unsigned int noSamp10ms = _adbSampFreq / 100; + + while (foundBuf) { + // Check if we have any buffer with data to insert + // into the Audio Device Buffer, + // and find the one with the lowest seq number + foundBuf = false; + for (bufPos = 0; bufPos < N_REC_BUFFERS; ++bufPos) { + if (noSamp10ms == _recordingLength[bufPos]) { + if (!foundBuf) { + lowestSeq = _recordingSeqNumber[bufPos]; + lowestSeqBufPos = bufPos; + foundBuf = true; + } else if (_recordingSeqNumber[bufPos] < lowestSeq) { + lowestSeq = _recordingSeqNumber[bufPos]; + lowestSeqBufPos = bufPos; + } + } + } // for + + // Insert data into the Audio Device Buffer if found any + if (foundBuf) { + // Update recording delay + UpdateRecordingDelay(); + + // Set the recorded buffer + _ptrAudioBuffer->SetRecordedBuffer( + reinterpret_cast( + _recordingBuffer[lowestSeqBufPos]), + _recordingLength[lowestSeqBufPos]); + + // Don't need to set the current mic level in ADB since we only + // support digital AGC, + // and besides we cannot get or set the IOS mic level anyway. + + // Set VQE info, use clockdrift == 0 + _ptrAudioBuffer->SetVQEData(_playoutDelay, _recordingDelay, 0); + + // Deliver recorded samples at specified sample rate, mic level + // etc. to the observer using callback + _ptrAudioBuffer->DeliverRecordedData(); + + // Make buffer available + _recordingSeqNumber[lowestSeqBufPos] = 0; + _recordingBufferTotalSize -= _recordingLength[lowestSeqBufPos]; + // Must be done last to avoid interrupt problems between threads + _recordingLength[lowestSeqBufPos] = 0; + } + } // while (foundBuf) + } // if (_recording) + + { + // Normal case + // Sleep thread (5ms) to let other threads get to work + // todo: Is 5 ms optimal? Sleep shorter if inserted into the Audio + // Device Buffer? + timespec t; + t.tv_sec = 0; + t.tv_nsec = 5*1000*1000; + nanosleep(&t, NULL); + } + + return true; +} + +} // namespace webrtc + diff --git a/src/modules/audio_device/main/source/ios/audio_device_ios.h b/src/modules/audio_device/main/source/ios/audio_device_ios.h new file mode 100644 index 0000000000..d494603c16 --- /dev/null +++ b/src/modules/audio_device/main/source/ios/audio_device_ios.h @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_IPHONE_H +#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_IPHONE_H + +#include + +#include "audio_device_generic.h" +#include "critical_section_wrapper.h" + +namespace webrtc { +class ThreadWrapper; + +const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 44000; +const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 44000; + +const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording +const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout +const WebRtc_UWord32 N_DEVICE_CHANNELS = 8; + +const WebRtc_UWord32 ENGINE_REC_BUF_SIZE_IN_SAMPLES = (N_REC_SAMPLES_PER_SEC + / 100); +const WebRtc_UWord32 ENGINE_PLAY_BUF_SIZE_IN_SAMPLES = (N_PLAY_SAMPLES_PER_SEC + / 100); + +// Number of 10 ms recording blocks in recording buffer +const WebRtc_UWord16 N_REC_BUFFERS = 20; + +class AudioDeviceIPhone : public AudioDeviceGeneric { +public: + AudioDeviceIPhone(const WebRtc_Word32 id); + ~AudioDeviceIPhone(); + + // Retrieve the currently utilized audio layer + virtual WebRtc_Word32 + ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const; + + // Main initializaton and termination + virtual WebRtc_Word32 Init(); + virtual WebRtc_Word32 Terminate(); + virtual bool Initialized() const; + + // Device enumeration + virtual WebRtc_Word16 PlayoutDevices(); + virtual WebRtc_Word16 RecordingDevices(); + virtual WebRtc_Word32 PlayoutDeviceName(WebRtc_UWord16 index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]); + virtual WebRtc_Word32 RecordingDeviceName(WebRtc_UWord16 index, + char name[kAdmMaxDeviceNameSize], + char guid[kAdmMaxGuidSize]); + + // Device selection + virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index); + virtual WebRtc_Word32 + SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device); + virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index); + virtual WebRtc_Word32 SetRecordingDevice( + AudioDeviceModule::WindowsDeviceType device); + + // Audio transport initialization + virtual WebRtc_Word32 PlayoutIsAvailable(bool& available); + virtual WebRtc_Word32 InitPlayout(); + virtual bool PlayoutIsInitialized() const; + virtual WebRtc_Word32 RecordingIsAvailable(bool& available); + virtual WebRtc_Word32 InitRecording(); + virtual bool RecordingIsInitialized() const; + + // Audio transport control + virtual WebRtc_Word32 StartPlayout(); + virtual WebRtc_Word32 StopPlayout(); + virtual bool Playing() const; + virtual WebRtc_Word32 StartRecording(); + virtual WebRtc_Word32 StopRecording(); + virtual bool Recording() const; + + // Microphone Automatic Gain Control (AGC) + virtual WebRtc_Word32 SetAGC(bool enable); + virtual bool AGC() const; + + // Volume control based on the Windows Wave API (Windows only) + virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft, + WebRtc_UWord16 volumeRight); + virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft, + WebRtc_UWord16& volumeRight) const; + + // Audio mixer initialization + virtual WebRtc_Word32 SpeakerIsAvailable(bool& available); + virtual WebRtc_Word32 InitSpeaker(); + virtual bool SpeakerIsInitialized() const; + virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available); + virtual WebRtc_Word32 InitMicrophone(); + virtual bool MicrophoneIsInitialized() const; + + // Speaker volume controls + virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available); + virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume); + virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const; + virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const; + virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const; + virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const; + + // Microphone volume controls + virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available); + virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume); + virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const; + virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const; + virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const; + virtual WebRtc_Word32 + MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const; + + // Microphone mute control + virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available); + virtual WebRtc_Word32 SetMicrophoneMute(bool enable); + virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const; + + // Speaker mute control + virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available); + virtual WebRtc_Word32 SetSpeakerMute(bool enable); + virtual WebRtc_Word32 SpeakerMute(bool& enabled) const; + + // Microphone boost control + virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available); + virtual WebRtc_Word32 SetMicrophoneBoost(bool enable); + virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const; + + // Stereo support + virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available); + virtual WebRtc_Word32 SetStereoPlayout(bool enable); + virtual WebRtc_Word32 StereoPlayout(bool& enabled) const; + virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available); + virtual WebRtc_Word32 SetStereoRecording(bool enable); + virtual WebRtc_Word32 StereoRecording(bool& enabled) const; + + // Delay information and control + virtual WebRtc_Word32 + SetPlayoutBuffer(const AudioDeviceModule::BufferType type, + WebRtc_UWord16 sizeMS); + virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type, + WebRtc_UWord16& sizeMS) const; + virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const; + virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const; + + // CPU load + virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const; + +public: + virtual bool PlayoutWarning() const; + virtual bool PlayoutError() const; + virtual bool RecordingWarning() const; + virtual bool RecordingError() const; + virtual void ClearPlayoutWarning(); + virtual void ClearPlayoutError(); + virtual void ClearRecordingWarning(); + virtual void ClearRecordingError(); + +public: + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + + // Reset Audio Deivce (for mobile devices only) + virtual WebRtc_Word32 ResetAudioDevice(); + + // enable or disable loud speaker (for iphone only) + virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable); + virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enabled) const; + +private: + void Lock() { + _critSect.Enter(); + } + + void UnLock() { + _critSect.Leave(); + } + + WebRtc_Word32 Id() { + return _id; + } + + // Init and shutdown + WebRtc_Word32 InitPlayOrRecord(); + WebRtc_Word32 ShutdownPlayOrRecord(); + + void UpdateRecordingDelay(); + void UpdatePlayoutDelay(); + + static OSStatus RecordProcess(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *WebRtc_Word32imeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData); + + static OSStatus PlayoutProcess(void *inRefCon, + AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *WebRtc_Word32imeStamp, + UInt32 inBusNumber, + UInt32 inNumberFrames, + AudioBufferList *ioData); + + OSStatus RecordProcessImpl(AudioUnitRenderActionFlags *ioActionFlags, + const AudioTimeStamp *WebRtc_Word32imeStamp, + WebRtc_UWord32 inBusNumber, + WebRtc_UWord32 inNumberFrames); + + OSStatus PlayoutProcessImpl(WebRtc_UWord32 inNumberFrames, + AudioBufferList *ioData); + + static bool RunCapture(void* ptrThis); + bool CaptureWorkerThread(); + +private: + AudioDeviceBuffer* _ptrAudioBuffer; + + CriticalSectionWrapper& _critSect; + + ThreadWrapper* _captureWorkerThread; + WebRtc_UWord32 _captureWorkerThreadId; + + WebRtc_Word32 _id; + + AudioUnit _auRemoteIO; + +private: + bool _initialized; + bool _isShutDown; + bool _recording; + bool _playing; + bool _recIsInitialized; + bool _playIsInitialized; + + bool _recordingDeviceIsSpecified; + bool _playoutDeviceIsSpecified; + bool _micIsInitialized; + bool _speakerIsInitialized; + + bool _AGC; + + // The sampling rate to use with Audio Device Buffer + WebRtc_UWord32 _adbSampFreq; + + // Delay calculation + WebRtc_UWord32 _recordingDelay; + WebRtc_UWord32 _playoutDelay; + WebRtc_UWord32 _playoutDelayMeasurementCounter; + WebRtc_UWord32 _recordingDelayHWAndOS; + WebRtc_UWord32 _recordingDelayMeasurementCounter; + + // Errors and warnings count + WebRtc_UWord16 _playWarning; + WebRtc_UWord16 _playError; + WebRtc_UWord16 _recWarning; + WebRtc_UWord16 _recError; + + // Playout buffer, needed for 44.0 / 44.1 kHz mismatch + WebRtc_Word16 _playoutBuffer[ENGINE_PLAY_BUF_SIZE_IN_SAMPLES]; + WebRtc_UWord32 _playoutBufferUsed; // How much is filled + + // Recording buffers + WebRtc_Word16 + _recordingBuffer[N_REC_BUFFERS][ENGINE_REC_BUF_SIZE_IN_SAMPLES]; + WebRtc_UWord32 _recordingLength[N_REC_BUFFERS]; + WebRtc_UWord32 _recordingSeqNumber[N_REC_BUFFERS]; + WebRtc_UWord32 _recordingCurrentSeq; + + // Current total size all data in buffers, used for delay estimate + WebRtc_UWord32 _recordingBufferTotalSize; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_AUDIO_DEVICE_IPHONE_H_ diff --git a/src/modules/audio_device/main/source/ios/audio_device_utility_ios.cc b/src/modules/audio_device/main/source/ios/audio_device_utility_ios.cc new file mode 100644 index 0000000000..87c7a50297 --- /dev/null +++ b/src/modules/audio_device/main/source/ios/audio_device_utility_ios.cc @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "audio_device_utility_ios.h" +#include "audio_device_config.h" + +#include "critical_section_wrapper.h" +#include "trace.h" + +namespace webrtc { +AudioDeviceUtilityIPhone::AudioDeviceUtilityIPhone(const WebRtc_Word32 id) +: + _critSect(*CriticalSectionWrapper::CreateCriticalSection()), + _id(id), + _lastError(AudioDeviceModule::kAdmErrNone) { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, + "%s created", __FUNCTION__); +} + +AudioDeviceUtilityIPhone::~AudioDeviceUtilityIPhone() { + WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, + "%s destroyed", __FUNCTION__); + CriticalSectionScoped lock(_critSect); + + delete &_critSect; +} + +WebRtc_Word32 AudioDeviceUtilityIPhone::Init() { + WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, + "%s", __FUNCTION__); + + WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, + " OS info: %s", "iOS"); + + return 0; +} + +} // namespace webrtc diff --git a/src/modules/audio_device/main/source/ios/audio_device_utility_ios.h b/src/modules/audio_device/main/source/ios/audio_device_utility_ios.h new file mode 100644 index 0000000000..663b1f771a --- /dev/null +++ b/src/modules/audio_device/main/source/ios/audio_device_utility_ios.h @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_IPHONE_H +#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_IPHONE_H + +#include "audio_device_utility.h" +#include "audio_device.h" + +namespace webrtc { +class CriticalSectionWrapper; + +class AudioDeviceUtilityIPhone: public AudioDeviceUtility { +public: + AudioDeviceUtilityIPhone(const WebRtc_Word32 id); + AudioDeviceUtilityIPhone(); + virtual ~AudioDeviceUtilityIPhone(); + + virtual WebRtc_Word32 Init(); + +private: + CriticalSectionWrapper& _critSect; + WebRtc_Word32 _id; + AudioDeviceModule::ErrorCode _lastError; +}; + +} // namespace webrtc + +#endif // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_IPHONE_H diff --git a/src/modules/video_coding/codecs/vp8/vp8.gyp b/src/modules/video_coding/codecs/vp8/vp8.gyp index 18b50ab665..33574b4e7f 100644 --- a/src/modules/video_coding/codecs/vp8/vp8.gyp +++ b/src/modules/video_coding/codecs/vp8/vp8.gyp @@ -16,7 +16,6 @@ 'target_name': 'webrtc_vp8', 'type': '<(library)', 'dependencies': [ - '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx', '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers', '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv', ], @@ -27,6 +26,11 @@ '<(webrtc_root)/modules/interface', ], 'conditions': [ + ['build_libvpx==1', { + 'dependencies': [ + '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx', + ], + }], # TODO(mikhal): Investigate this mechanism for handling differences # between the Chromium and standalone builds. # http://code.google.com/p/webrtc/issues/detail?id=201 @@ -34,7 +38,7 @@ 'defines': [ 'WEBRTC_LIBVPX_VERSION=960' # Bali ], - },{ + }, { 'defines': [ 'WEBRTC_LIBVPX_VERSION=971' # Cayuga ], @@ -100,7 +104,6 @@ 'dependencies': [ 'webrtc_vp8', '<(DEPTH)/testing/gtest.gyp:gtest', - '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx', '<(webrtc_root)/test/test.gyp:test_support_main', ], 'include_dirs': [ @@ -110,6 +113,13 @@ 'reference_picture_selection_unittest.cc', 'temporal_layers_unittest.cc', ], + 'conditions': [ + ['build_libvpx==1', { + 'dependencies': [ + '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx', + ], + }], + ], }, ], # targets }], # include_tests diff --git a/src/system_wrappers/interface/tick_util.h b/src/system_wrappers/interface/tick_util.h index 0cd85d0050..0ce3d90777 100644 --- a/src/system_wrappers/interface/tick_util.h +++ b/src/system_wrappers/interface/tick_util.h @@ -184,7 +184,11 @@ inline TickTime TickTime::Now() if (retval != KERN_SUCCESS) { // TODO(wu): Implement CHECK similar to chrome for all the platforms. // Then replace this with a CHECK(retval == KERN_SUCCESS); +#ifndef MAC_IPHONE asm("int3"); +#else + __builtin_trap(); +#endif // MAC_IPHONE } } // Use timebase to convert absolute time tick units into nanoseconds. diff --git a/src/system_wrappers/source/event.cc b/src/system_wrappers/source/event.cc index 71a81932b5..adc33dad33 100644 --- a/src/system_wrappers/source/event.cc +++ b/src/system_wrappers/source/event.cc @@ -13,7 +13,7 @@ #if defined(_WIN32) #include #include "event_win.h" -#elif defined(WEBRTC_MAC) +#elif defined(WEBRTC_MAC) && !defined(MAC_IPHONE) #include #include #include "event_posix.h" @@ -49,7 +49,7 @@ int EventWrapper::KeyPressed() { return 0; } -#elif defined(WEBRTC_MAC) +#elif defined(WEBRTC_MAC) && !defined(MAC_IPHONE) bool keyDown = false; // loop through all Mac virtual key constant values for(int keyIndex = 0; keyIndex <= 0x5C; keyIndex++) diff --git a/src/system_wrappers/source/system_wrappers.gyp b/src/system_wrappers/source/system_wrappers.gyp index b530d6f483..8da94026c7 100644 --- a/src/system_wrappers/source/system_wrappers.gyp +++ b/src/system_wrappers/source/system_wrappers.gyp @@ -151,7 +151,22 @@ 'trace_impl_no_op.cc', ], }] - ] # conditions + ], # conditions + 'target_conditions': [ + # We need to do this in a target_conditions block to override the + # filename_rules filters. + ['OS=="ios"', { + # Pull in specific Mac files for iOS (which have been filtered out + # by file name rules). + 'sources/': [ + ['include', '^atomic32_mac\\.'], + ['include', '^cpu_mac\\.'], + ], + 'sources!': [ + 'atomic32_posix.cc', + ], + }], + ], }, ], # targets 'conditions': [ diff --git a/src/voice_engine/voe_audio_processing_impl.cc b/src/voice_engine/voe_audio_processing_impl.cc index 3012687785..cabecc9dcd 100644 --- a/src/voice_engine/voe_audio_processing_impl.cc +++ b/src/voice_engine/voe_audio_processing_impl.cc @@ -788,7 +788,7 @@ int VoEAudioProcessingImpl::RegisterRxVadObserver( WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "RegisterRxVadObserver()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); @@ -808,7 +808,7 @@ int VoEAudioProcessingImpl::DeRegisterRxVadObserver(int channel) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "DeRegisterRxVadObserver()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); @@ -850,7 +850,7 @@ int VoEAudioProcessingImpl::SetEcMetricsStatus(bool enable) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetEcMetricsStatus(enable=%d)", enable); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_ECHO if (!_shared->statistics().Initialized()) { @@ -878,7 +878,7 @@ int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetEcMetricsStatus(enabled=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_ECHO if (!_shared->statistics().Initialized()) { @@ -916,7 +916,7 @@ int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetEchoMetrics(ERL=?, ERLE=?, RERL=?, A_NLP=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_ECHO if (!_shared->statistics().Initialized()) { @@ -960,7 +960,7 @@ int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetEcDelayMetrics(median=?, std=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_ECHO if (!_shared->statistics().Initialized()) { @@ -1026,7 +1026,7 @@ int VoEAudioProcessingImpl::SetTypingDetectionStatus(bool enable) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetTypingDetectionStatus()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); @@ -1060,7 +1060,7 @@ int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetTypingDetectionStatus()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION if (!_shared->statistics().Initialized()) { @@ -1085,7 +1085,7 @@ int VoEAudioProcessingImpl::TimeSinceLastTyping(int &seconds) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "TimeSinceLastTyping()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION if (!_shared->statistics().Initialized()) { @@ -1120,7 +1120,7 @@ int VoEAudioProcessingImpl::SetTypingDetectionParameters(int timeWindow, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetTypingDetectionParameters()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voe_call_report_impl.cc b/src/voice_engine/voe_call_report_impl.cc index ef4c39c29b..778cb816bf 100644 --- a/src/voice_engine/voe_call_report_impl.cc +++ b/src/voice_engine/voe_call_report_impl.cc @@ -57,7 +57,7 @@ int VoECallReportImpl::ResetCallReportStatistics(int channel) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "ResetCallReportStatistics(channel=%d)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -129,7 +129,7 @@ int VoECallReportImpl::GetEchoMetricSummary(EchoStatistics& stats) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetEchoMetricSummary()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -230,7 +230,7 @@ int VoECallReportImpl::GetRoundTripTimeSummary(int channel, StatVal& delaysMs) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetRoundTripTimeSummary()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -256,7 +256,7 @@ int VoECallReportImpl::GetDeadOrAliveSummary(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetDeadOrAliveSummary(channel=%d)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -298,7 +298,7 @@ int VoECallReportImpl::WriteReportToFile(const char* fileNameUTF8) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "WriteReportToFile(fileNameUTF8=%s)", fileNameUTF8); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voe_codec_impl.cc b/src/voice_engine/voe_codec_impl.cc index b1879bb613..588ffe6ee9 100644 --- a/src/voice_engine/voe_codec_impl.cc +++ b/src/voice_engine/voe_codec_impl.cc @@ -270,7 +270,7 @@ int VoECodecImpl::SetAMRWbEncFormat(int channel, AmrMode mode) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetAMRWbEncFormat(channel=%d, mode=%d)", channel, mode); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_CODEC_GSMAMRWB if (!_shared->statistics().Initialized()) { @@ -298,7 +298,7 @@ int VoECodecImpl::SetAMRWbDecFormat(int channel, AmrMode mode) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetAMRWbDecFormat(channel=%i, mode=%i)", channel, mode); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_CODEC_GSMAMRWB if (!_shared->statistics().Initialized()) { @@ -410,7 +410,7 @@ int VoECodecImpl::SetISACInitTargetRate(int channel, int rateBps, "SetISACInitTargetRate(channel=%d, rateBps=%d, " "useFixedFrameSize=%d)", channel, rateBps, useFixedFrameSize); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_CODEC_ISAC if (!_shared->statistics().Initialized()) { @@ -438,7 +438,7 @@ int VoECodecImpl::SetISACMaxRate(int channel, int rateBps) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetISACMaxRate(channel=%d, rateBps=%d)", channel, rateBps); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_CODEC_ISAC if (!_shared->statistics().Initialized()) { @@ -467,7 +467,7 @@ int VoECodecImpl::SetISACMaxPayloadSize(int channel, int sizeBytes) "SetISACMaxPayloadSize(channel=%d, sizeBytes=%d)", channel, sizeBytes); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifdef WEBRTC_CODEC_ISAC if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voe_dtmf_impl.cc b/src/voice_engine/voe_dtmf_impl.cc index e7c22bf5b9..05817378ff 100644 --- a/src/voice_engine/voe_dtmf_impl.cc +++ b/src/voice_engine/voe_dtmf_impl.cc @@ -395,7 +395,7 @@ int VoEDtmfImpl::SetDtmfPlayoutStatus(int channel, bool enable) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetDtmfPlayoutStatus(channel=%d, enable=%d)", channel, enable); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -417,7 +417,7 @@ int VoEDtmfImpl::GetDtmfPlayoutStatus(int channel, bool& enabled) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetDtmfPlayoutStatus(channel=%d, enabled=?)", channel); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); diff --git a/src/voice_engine/voe_external_media_impl.cc b/src/voice_engine/voe_external_media_impl.cc index f62fa2455f..0158c3d979 100644 --- a/src/voice_engine/voe_external_media_impl.cc +++ b/src/voice_engine/voe_external_media_impl.cc @@ -59,7 +59,7 @@ int VoEExternalMediaImpl::RegisterExternalMediaProcessing( "RegisterExternalMediaProcessing(channel=%d, type=%d, " "processObject=0x%x)", channel, type, &processObject); ANDROID_NOT_SUPPORTED(shared_->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(shared_->statistics()); if (!shared_->statistics().Initialized()) { shared_->SetLastError(VE_NOT_INITED, kTraceError); @@ -103,7 +103,7 @@ int VoEExternalMediaImpl::DeRegisterExternalMediaProcessing( WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), "DeRegisterExternalMediaProcessing(channel=%d)", channel); ANDROID_NOT_SUPPORTED(shared_->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(shared_->statistics()); if (!shared_->statistics().Initialized()) { shared_->SetLastError(VE_NOT_INITED, kTraceError); @@ -144,7 +144,7 @@ int VoEExternalMediaImpl::SetExternalRecordingStatus(bool enable) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), "SetExternalRecordingStatus(enable=%d)", enable); ANDROID_NOT_SUPPORTED(shared_->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(shared_->statistics()); #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT if (shared_->audio_device()->Recording()) { @@ -173,7 +173,7 @@ int VoEExternalMediaImpl::ExternalRecordingInsertData( &speechData10ms[0], lengthSamples, samplingFreqHz, current_delay_ms); ANDROID_NOT_SUPPORTED(shared_->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(shared_->statistics()); #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT if (!shared_->statistics().Initialized()) @@ -265,7 +265,7 @@ int VoEExternalMediaImpl::SetExternalPlayoutStatus(bool enable) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1), "SetExternalPlayoutStatus(enable=%d)", enable); ANDROID_NOT_SUPPORTED(shared_->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(shared_->statistics()); #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT if (shared_->audio_device()->Playing()) { @@ -293,7 +293,7 @@ int VoEExternalMediaImpl::ExternalPlayoutGetData( ", current_delay_ms=%d)", &speechData10ms[0], samplingFreqHz, current_delay_ms); ANDROID_NOT_SUPPORTED(shared_->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(shared_->statistics()); #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT if (!shared_->statistics().Initialized()) { diff --git a/src/voice_engine/voe_hardware_impl.cc b/src/voice_engine/voe_hardware_impl.cc index 851967e405..6bec665b13 100644 --- a/src/voice_engine/voe_hardware_impl.cc +++ b/src/voice_engine/voe_hardware_impl.cc @@ -163,7 +163,7 @@ int VoEHardwareImpl::GetNumOfRecordingDevices(int& devices) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetNumOfRecordingDevices(devices=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -184,7 +184,7 @@ int VoEHardwareImpl::GetNumOfPlayoutDevices(int& devices) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetNumOfPlayoutDevices(devices=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -208,7 +208,7 @@ int VoEHardwareImpl::GetRecordingDeviceName(int index, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetRecordingDeviceName(index=%d)", index); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -266,7 +266,7 @@ int VoEHardwareImpl::GetPlayoutDeviceName(int index, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetPlayoutDeviceName(index=%d)", index); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -325,7 +325,7 @@ int VoEHardwareImpl::SetRecordingDevice(int index, index, (int) recordingChannel); CriticalSectionScoped cs(_shared->crit_sec()); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -455,7 +455,7 @@ int VoEHardwareImpl::SetPlayoutDevice(int index) "SetPlayoutDevice(index=%d)", index); CriticalSectionScoped cs(_shared->crit_sec()); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -559,7 +559,7 @@ int VoEHardwareImpl::GetRecordingDeviceStatus(bool& isAvailable) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetRecordingDeviceStatus()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -593,7 +593,7 @@ int VoEHardwareImpl::GetPlayoutDeviceStatus(bool& isAvailable) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetPlayoutDeviceStatus()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -656,7 +656,7 @@ int VoEHardwareImpl::AudioDeviceControl(unsigned int par1, unsigned int par2, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "AudioDeviceControl(%i, %i, %i)", par1, par2, par3); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); @@ -671,7 +671,7 @@ int VoEHardwareImpl::SetLoudspeakerStatus(bool enable) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetLoudspeakerStatus(enable=%i)", (int) enable); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -698,7 +698,7 @@ int VoEHardwareImpl::GetLoudspeakerStatus(bool& enabled) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetLoudspeakerStatus()"); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #if defined(WEBRTC_ANDROID) if (!_shared->statistics().Initialized()) @@ -727,7 +727,7 @@ int VoEHardwareImpl::GetCPULoad(int& loadPercent) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetCPULoad()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -758,7 +758,7 @@ int VoEHardwareImpl::GetSystemCPULoad(int& loadPercent) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetSystemCPULoad(loadPercent=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voe_neteq_stats_impl.cc b/src/voice_engine/voe_neteq_stats_impl.cc index c82f4143fa..50f2dfbd77 100644 --- a/src/voice_engine/voe_neteq_stats_impl.cc +++ b/src/voice_engine/voe_neteq_stats_impl.cc @@ -55,7 +55,7 @@ int VoENetEqStatsImpl::GetNetworkStatistics(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetNetworkStatistics(channel=%d, stats=?)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voe_network_impl.cc b/src/voice_engine/voe_network_impl.cc index 174abcab22..d0b9895c70 100644 --- a/src/voice_engine/voe_network_impl.cc +++ b/src/voice_engine/voe_network_impl.cc @@ -216,7 +216,7 @@ int VoENetworkImpl::GetLocalIP(char ipAddr[64], bool ipv6) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetLocalIP(ipAddr[]=?, ipv6=%d)", ipv6); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifndef WEBRTC_EXTERNAL_TRANSPORT if (!_shared->statistics().Initialized()) { @@ -304,7 +304,7 @@ int VoENetworkImpl::EnableIPv6(int channel) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "EnableIPv6(channel=%d)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #ifndef WEBRTC_EXTERNAL_TRANSPORT if (!_shared->statistics().Initialized()) { @@ -594,7 +594,7 @@ int VoENetworkImpl::SetSendGQoS(int channel, " overrideDSCP=%d)", channel, (int) enable, serviceType, overrideDSCP); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #if !defined(_WIN32) _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceWarning, "SetSendGQOS() is not supported on this platform"); @@ -635,7 +635,7 @@ int VoENetworkImpl::GetSendGQoS(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetSendGQOS(channel=%d)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); #if !defined(_WIN32) _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceWarning, "GetSendGQOS() is not supported on this platform"); diff --git a/src/voice_engine/voe_video_sync_impl.cc b/src/voice_engine/voe_video_sync_impl.cc index a509f70252..2a7ff7d896 100644 --- a/src/voice_engine/voe_video_sync_impl.cc +++ b/src/voice_engine/voe_video_sync_impl.cc @@ -52,7 +52,7 @@ int VoEVideoSyncImpl::GetPlayoutTimestamp(int channel, unsigned int& timestamp) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetPlayoutTimestamp(channel=%d, timestamp=?)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -77,7 +77,7 @@ int VoEVideoSyncImpl::SetInitTimestamp(int channel, "SetInitTimestamp(channel=%d, timestamp=%lu)", channel, timestamp); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -102,7 +102,7 @@ int VoEVideoSyncImpl::SetInitSequenceNumber(int channel, "SetInitSequenceNumber(channel=%d, sequenceNumber=%hd)", channel, sequenceNumber); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -126,7 +126,7 @@ int VoEVideoSyncImpl::SetMinimumPlayoutDelay(int channel,int delayMs) "SetMinimumPlayoutDelay(channel=%d, delayMs=%d)", channel, delayMs); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -149,7 +149,7 @@ int VoEVideoSyncImpl::GetDelayEstimate(int channel, int& delayMs) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetDelayEstimate(channel=%d, delayMs=?)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -172,7 +172,7 @@ int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetPlayoutBufferSize(bufferMs=?)"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voe_volume_control_impl.cc b/src/voice_engine/voe_volume_control_impl.cc index f821ab30f0..f0d165217a 100644 --- a/src/voice_engine/voe_volume_control_impl.cc +++ b/src/voice_engine/voe_volume_control_impl.cc @@ -54,7 +54,7 @@ int VoEVolumeControlImpl::SetSpeakerVolume(unsigned int volume) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetSpeakerVolume(volume=%u)", volume); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -96,7 +96,7 @@ int VoEVolumeControlImpl::GetSpeakerVolume(unsigned int& volume) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetSpeakerVolume()"); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -180,7 +180,7 @@ int VoEVolumeControlImpl::SetMicVolume(unsigned int volume) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetMicVolume(volume=%u)", volume); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -239,7 +239,7 @@ int VoEVolumeControlImpl::GetMicVolume(unsigned int& volume) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetMicVolume()"); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -491,7 +491,7 @@ int VoEVolumeControlImpl::SetChannelOutputVolumeScaling(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetChannelOutputVolumeScaling(channel=%d, scaling=%3.2f)", channel, scaling); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); @@ -520,7 +520,7 @@ int VoEVolumeControlImpl::GetChannelOutputVolumeScaling(int channel, { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetChannelOutputVolumeScaling(channel=%d, scaling=?)", channel); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError); @@ -545,7 +545,7 @@ int VoEVolumeControlImpl::SetOutputVolumePan(int channel, "SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)", channel, left, right); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { @@ -599,7 +599,7 @@ int VoEVolumeControlImpl::GetOutputVolumePan(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetOutputVolumePan(channel=%d, left=?, right=?)", channel); ANDROID_NOT_SUPPORTED(_shared->statistics()); - IPHONE_NOT_SUPPORTED(); + IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) { diff --git a/src/voice_engine/voice_engine_defines.h b/src/voice_engine/voice_engine_defines.h index 7d4c7299cd..c16e0fe35f 100644 --- a/src/voice_engine/voice_engine_defines.h +++ b/src/voice_engine/voice_engine_defines.h @@ -553,9 +553,9 @@ namespace webrtc #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE \ GainControl::kAdaptiveDigital - #define IPHONE_NOT_SUPPORTED() \ - _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, \ - "API call not supported"); \ + #define IPHONE_NOT_SUPPORTED(stat) \ + stat.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, \ + "API call not supported"); \ return -1; #else // Non-iPhone @@ -576,11 +576,11 @@ namespace webrtc // Defines // ---------------------------------------------------------------------------- - #define IPHONE_NOT_SUPPORTED() + #define IPHONE_NOT_SUPPORTED(stat) #endif #else -#define IPHONE_NOT_SUPPORTED() +#define IPHONE_NOT_SUPPORTED(stat) #endif // #ifdef WEBRTC_MAC