The VoE functionality to apply receive-side processing to VoE channels is unused. I'm removing it so we can avoid instantiating a full APM per channel (and thus also for webrtc::AudioSendStream and webrtc::AudioReceiveStream), and then never use it.

The following APIs are removed from VoEAudioProcessing:

  virtual int SetRxNsStatus(int channel,
                            bool enable,
                            NsModes mode = kNsUnchanged) = 0;
  virtual int GetRxNsStatus(int channel, bool& enabled, NsModes& mode) = 0;
  virtual int SetRxAgcStatus(int channel,
                             bool enable,
                             AgcModes mode = kAgcUnchanged) = 0;
  virtual int GetRxAgcStatus(int channel, bool& enabled, AgcModes& mode) = 0;
  virtual int SetRxAgcConfig(int channel, AgcConfig config) = 0;
  virtual int GetRxAgcConfig(int channel, AgcConfig& config) = 0;
  virtual int RegisterRxVadObserver(int channel,
                                    VoERxVadCallback& observer) = 0;
  virtual int DeRegisterRxVadObserver(int channel) = 0;

BUG=webrtc:4690

Review-Url: https://codereview.webrtc.org/2295113002
Cr-Commit-Position: refs/heads/master@{#14227}
This commit is contained in:
solenberg 2016-09-15 04:29:13 -07:00 committed by Commit bot
parent 70d01242f8
commit 11ace15c19
10 changed files with 7 additions and 749 deletions

View File

@ -488,18 +488,6 @@ class FakeWebRtcVoiceEngine
enabledCNG = cng_enabled_;
return 0;
}
WEBRTC_STUB(SetRxNsStatus, (int channel, bool enable, webrtc::NsModes mode));
WEBRTC_STUB(GetRxNsStatus, (int channel, bool& enabled,
webrtc::NsModes& mode));
WEBRTC_STUB(SetRxAgcStatus, (int channel, bool enable,
webrtc::AgcModes mode));
WEBRTC_STUB(GetRxAgcStatus, (int channel, bool& enabled,
webrtc::AgcModes& mode));
WEBRTC_STUB(SetRxAgcConfig, (int channel, webrtc::AgcConfig config));
WEBRTC_STUB(GetRxAgcConfig, (int channel, webrtc::AgcConfig& config));
WEBRTC_STUB(RegisterRxVadObserver, (int, webrtc::VoERxVadCallback&));
WEBRTC_STUB(DeRegisterRxVadObserver, (int channel));
WEBRTC_STUB(VoiceActivityIndicator, (int channel));
WEBRTC_FUNC(SetEcMetricsStatus, (bool enable)) {
ec_metrics_enabled_ = enable;

View File

@ -79,15 +79,6 @@ class MockVoiceEngine : public VoiceEngineImpl {
MOCK_METHOD2(GetAecmMode, int(AecmModes& mode, bool& enabledCNG));
MOCK_METHOD1(EnableHighPassFilter, int(bool enable));
MOCK_METHOD0(IsHighPassFilterEnabled, bool());
MOCK_METHOD3(SetRxNsStatus, int(int channel, bool enable, NsModes mode));
MOCK_METHOD3(GetRxNsStatus, int(int channel, bool& enabled, NsModes& mode));
MOCK_METHOD3(SetRxAgcStatus, int(int channel, bool enable, AgcModes mode));
MOCK_METHOD3(GetRxAgcStatus, int(int channel, bool& enabled, AgcModes& mode));
MOCK_METHOD2(SetRxAgcConfig, int(int channel, AgcConfig config));
MOCK_METHOD2(GetRxAgcConfig, int(int channel, AgcConfig& config));
MOCK_METHOD2(RegisterRxVadObserver,
int(int channel, VoERxVadCallback& observer));
MOCK_METHOD1(DeRegisterRxVadObserver, int(int channel));
MOCK_METHOD1(VoiceActivityIndicator, int(int channel));
MOCK_METHOD1(SetEcMetricsStatus, int(bool enable));
MOCK_METHOD1(GetEcMetricsStatus, int(bool& enabled));

View File

@ -387,15 +387,6 @@ int32_t Channel::InFrameType(FrameType frame_type) {
return 0;
}
int32_t Channel::OnRxVadDetected(int vadDecision) {
rtc::CritScope cs(&_callbackCritSect);
if (_rxVadObserverPtr) {
_rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
}
return 0;
}
bool Channel::SendRtp(const uint8_t* data,
size_t len,
const PacketOptions& options) {
@ -585,10 +576,6 @@ MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
audioFrame->Mute();
}
if (_RxVadDetection) {
UpdateRxVadDetection(*audioFrame);
}
// Convert module ID to internal VoE channel ID
audioFrame->id_ = VoEChannelId(audioFrame->id_);
// Store speech type for dead-or-alive detection
@ -596,14 +583,6 @@ MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
ChannelState::State state = channel_state_.Get();
if (state.rx_apm_is_enabled) {
int err = rx_audioproc_->ProcessStream(audioFrame);
if (err) {
LOG(LS_ERROR) << "ProcessStream() error: " << err;
assert(false);
}
}
{
// Pass the audio buffers to an optional sink callback, before applying
// scaling/panning, as that applies to the mix operation.
@ -861,8 +840,6 @@ Channel::Channel(int32_t channelId,
_voiceEngineObserverPtr(NULL),
_callbackCritSectPtr(NULL),
_transportPtr(NULL),
_rxVadObserverPtr(NULL),
_oldVadDecision(-1),
_sendFrameType(0),
_externalMixing(false),
_mixFileWithMicrophone(false),
@ -875,9 +852,6 @@ Channel::Channel(int32_t channelId,
_lastPayloadType(0),
_includeAudioLevelIndication(false),
_outputSpeechType(AudioFrame::kNormalSpeech),
_RxVadDetection(false),
_rxAgcIsEnabled(false),
_rxNsIsEnabled(false),
restored_packet_in_use_(false),
rtcp_observer_(new VoERtcpObserver(this)),
network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
@ -919,10 +893,6 @@ Channel::Channel(int32_t channelId,
statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
statistics_proxy_.get());
Config audioproc_config;
audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
}
Channel::~Channel() {
@ -1078,15 +1048,6 @@ int32_t Channel::Init() {
}
}
if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
LOG(LS_ERROR) << "noise_suppression()->set_level(kDefaultNsMode) failed.";
return -1;
}
if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
LOG(LS_ERROR) << "gain_control()->set_mode(kDefaultRxAgcMode) failed.";
return -1;
}
return 0;
}
@ -2309,243 +2270,11 @@ int Channel::SetSendTelephoneEventPayloadType(int payload_type) {
return 0;
}
int Channel::UpdateRxVadDetection(AudioFrame& audioFrame) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::UpdateRxVadDetection()");
int vadDecision = 1;
vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive) ? 1 : 0;
if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr) {
OnRxVadDetected(vadDecision);
_oldVadDecision = vadDecision;
}
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::UpdateRxVadDetection() => vadDecision=%d",
vadDecision);
return 0;
}
int Channel::RegisterRxVadObserver(VoERxVadCallback& observer) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::RegisterRxVadObserver()");
rtc::CritScope cs(&_callbackCritSect);
if (_rxVadObserverPtr) {
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceError,
"RegisterRxVadObserver() observer already enabled");
return -1;
}
_rxVadObserverPtr = &observer;
_RxVadDetection = true;
return 0;
}
int Channel::DeRegisterRxVadObserver() {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::DeRegisterRxVadObserver()");
rtc::CritScope cs(&_callbackCritSect);
if (!_rxVadObserverPtr) {
_engineStatisticsPtr->SetLastError(
VE_INVALID_OPERATION, kTraceWarning,
"DeRegisterRxVadObserver() observer already disabled");
return 0;
}
_rxVadObserverPtr = NULL;
_RxVadDetection = false;
return 0;
}
int Channel::VoiceActivityIndicator(int& activity) {
activity = _sendFrameType;
return 0;
}
#ifdef WEBRTC_VOICE_ENGINE_AGC
int Channel::SetRxAgcStatus(bool enable, AgcModes mode) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetRxAgcStatus(enable=%d, mode=%d)", (int)enable,
(int)mode);
GainControl::Mode agcMode = kDefaultRxAgcMode;
switch (mode) {
case kAgcDefault:
break;
case kAgcUnchanged:
agcMode = rx_audioproc_->gain_control()->mode();
break;
case kAgcFixedDigital:
agcMode = GainControl::kFixedDigital;
break;
case kAgcAdaptiveDigital:
agcMode = GainControl::kAdaptiveDigital;
break;
default:
_engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
"SetRxAgcStatus() invalid Agc mode");
return -1;
}
if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc mode");
return -1;
}
if (rx_audioproc_->gain_control()->Enable(enable) != 0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError, "SetRxAgcStatus() failed to set Agc state");
return -1;
}
_rxAgcIsEnabled = enable;
channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
return 0;
}
int Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode) {
bool enable = rx_audioproc_->gain_control()->is_enabled();
GainControl::Mode agcMode = rx_audioproc_->gain_control()->mode();
enabled = enable;
switch (agcMode) {
case GainControl::kFixedDigital:
mode = kAgcFixedDigital;
break;
case GainControl::kAdaptiveDigital:
mode = kAgcAdaptiveDigital;
break;
default:
_engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
"GetRxAgcStatus() invalid Agc mode");
return -1;
}
return 0;
}
int Channel::SetRxAgcConfig(AgcConfig config) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetRxAgcConfig()");
if (rx_audioproc_->gain_control()->set_target_level_dbfs(
config.targetLeveldBOv) != 0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError,
"SetRxAgcConfig() failed to set target peak |level|"
"(or envelope) of the Agc");
return -1;
}
if (rx_audioproc_->gain_control()->set_compression_gain_db(
config.digitalCompressionGaindB) != 0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError,
"SetRxAgcConfig() failed to set the range in |gain| the"
" digital compression stage may apply");
return -1;
}
if (rx_audioproc_->gain_control()->enable_limiter(config.limiterEnable) !=
0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError,
"SetRxAgcConfig() failed to set hard limiter to the signal");
return -1;
}
return 0;
}
int Channel::GetRxAgcConfig(AgcConfig& config) {
config.targetLeveldBOv = rx_audioproc_->gain_control()->target_level_dbfs();
config.digitalCompressionGaindB =
rx_audioproc_->gain_control()->compression_gain_db();
config.limiterEnable = rx_audioproc_->gain_control()->is_limiter_enabled();
return 0;
}
#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
#ifdef WEBRTC_VOICE_ENGINE_NR
int Channel::SetRxNsStatus(bool enable, NsModes mode) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetRxNsStatus(enable=%d, mode=%d)", (int)enable,
(int)mode);
NoiseSuppression::Level nsLevel = kDefaultNsMode;
switch (mode) {
case kNsDefault:
break;
case kNsUnchanged:
nsLevel = rx_audioproc_->noise_suppression()->level();
break;
case kNsConference:
nsLevel = NoiseSuppression::kHigh;
break;
case kNsLowSuppression:
nsLevel = NoiseSuppression::kLow;
break;
case kNsModerateSuppression:
nsLevel = NoiseSuppression::kModerate;
break;
case kNsHighSuppression:
nsLevel = NoiseSuppression::kHigh;
break;
case kNsVeryHighSuppression:
nsLevel = NoiseSuppression::kVeryHigh;
break;
}
if (rx_audioproc_->noise_suppression()->set_level(nsLevel) != 0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS level");
return -1;
}
if (rx_audioproc_->noise_suppression()->Enable(enable) != 0) {
_engineStatisticsPtr->SetLastError(
VE_APM_ERROR, kTraceError, "SetRxNsStatus() failed to set NS state");
return -1;
}
_rxNsIsEnabled = enable;
channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
return 0;
}
int Channel::GetRxNsStatus(bool& enabled, NsModes& mode) {
bool enable = rx_audioproc_->noise_suppression()->is_enabled();
NoiseSuppression::Level ncLevel = rx_audioproc_->noise_suppression()->level();
enabled = enable;
switch (ncLevel) {
case NoiseSuppression::kLow:
mode = kNsLowSuppression;
break;
case NoiseSuppression::kModerate:
mode = kNsModerateSuppression;
break;
case NoiseSuppression::kHigh:
mode = kNsHighSuppression;
break;
case NoiseSuppression::kVeryHigh:
mode = kNsVeryHighSuppression;
break;
}
return 0;
}
#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
int Channel::SetLocalSSRC(unsigned int ssrc) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::SetLocalSSRC()");

View File

@ -83,22 +83,12 @@ class VoERtcpObserver;
class ChannelState {
public:
struct State {
State()
: rx_apm_is_enabled(false),
input_external_media(false),
output_file_playing(false),
input_file_playing(false),
playing(false),
sending(false),
receiving(false) {}
bool rx_apm_is_enabled;
bool input_external_media;
bool output_file_playing;
bool input_file_playing;
bool playing;
bool sending;
bool receiving;
bool input_external_media = false;
bool output_file_playing = false;
bool input_file_playing = false;
bool playing = false;
bool sending = false;
bool receiving = false;
};
ChannelState() {}
@ -114,11 +104,6 @@ class ChannelState {
return state_;
}
void SetRxApmIsEnabled(bool enable) {
rtc::CritScope lock(&lock_);
state_.rx_apm_is_enabled = enable;
}
void SetInputExternalMedia(bool enable) {
rtc::CritScope lock(&lock_);
state_.input_external_media = enable;
@ -308,20 +293,7 @@ class Channel
int SetSendTelephoneEventPayloadType(int payload_type);
// VoEAudioProcessingImpl
int UpdateRxVadDetection(AudioFrame& audioFrame);
int RegisterRxVadObserver(VoERxVadCallback& observer);
int DeRegisterRxVadObserver();
int VoiceActivityIndicator(int& activity);
#ifdef WEBRTC_VOICE_ENGINE_AGC
int SetRxAgcStatus(bool enable, AgcModes mode);
int GetRxAgcStatus(bool& enabled, AgcModes& mode);
int SetRxAgcConfig(AgcConfig config);
int GetRxAgcConfig(AgcConfig& config);
#endif
#ifdef WEBRTC_VOICE_ENGINE_NR
int SetRxNsStatus(bool enable, NsModes mode);
int GetRxNsStatus(bool& enabled, NsModes& mode);
#endif
// VoERTP_RTCP
int SetLocalSSRC(unsigned int ssrc);
@ -375,8 +347,6 @@ class Channel
// From ACMVADCallback in the ACM
int32_t InFrameType(FrameType frame_type) override;
int32_t OnRxVadDetected(int vadDecision);
// From RtpData in the RTP/RTCP module
int32_t OnReceivedPayloadData(const uint8_t* payloadData,
size_t payloadSize,
@ -541,9 +511,6 @@ class Channel
rtc::CriticalSection* _callbackCritSectPtr; // owned by base
Transport* _transportPtr; // WebRtc socket or external transport
RMSLevel rms_level_;
std::unique_ptr<AudioProcessing> rx_audioproc_; // far end AudioProcessing
VoERxVadCallback* _rxVadObserverPtr;
int32_t _oldVadDecision;
int32_t _sendFrameType; // Send data is voice, 1-voice, 0-otherwise
// VoEBase
bool _externalMixing;
@ -563,9 +530,6 @@ class Channel
// VoEVideoSync
rtc::CriticalSection video_sync_lock_;
// VoEAudioProcessing
bool _RxVadDetection;
bool _rxAgcIsEnabled;
bool _rxNsIsEnabled;
bool restored_packet_in_use_;
// RtcpBandwidthObserver
std::unique_ptr<VoERtcpObserver> rtcp_observer_;

View File

@ -43,15 +43,6 @@ namespace webrtc {
class VoiceEngine;
// VoERxVadCallback
class WEBRTC_DLLEXPORT VoERxVadCallback {
public:
virtual void OnRxVad(int channel, int vadDecision) = 0;
protected:
virtual ~VoERxVadCallback() {}
};
// VoEAudioProcessing
class WEBRTC_DLLEXPORT VoEAudioProcessing {
public:
@ -126,42 +117,6 @@ class WEBRTC_DLLEXPORT VoEAudioProcessing {
virtual int EnableHighPassFilter(bool enable) = 0;
virtual bool IsHighPassFilterEnabled() = 0;
// Sets status and mode of the receiving-side (Rx) NS.
// The Rx NS reduces noise in the received signal for the specified
// |channel|. Intended for advanced usage only.
virtual int SetRxNsStatus(int channel,
bool enable,
NsModes mode = kNsUnchanged) = 0;
// Gets status and mode of the receiving-side NS.
virtual int GetRxNsStatus(int channel, bool& enabled, NsModes& mode) = 0;
// Sets status and mode of the receiving-side (Rx) AGC.
// The Rx AGC adjusts the received signal to an appropriate level
// for the specified |channel|. Intended for advanced usage only.
virtual int SetRxAgcStatus(int channel,
bool enable,
AgcModes mode = kAgcUnchanged) = 0;
// Gets status and mode of the receiving-side AGC.
virtual int GetRxAgcStatus(int channel, bool& enabled, AgcModes& mode) = 0;
// Modifies the AGC configuration on the receiving side for the
// specified |channel|.
virtual int SetRxAgcConfig(int channel, AgcConfig config) = 0;
// Gets the AGC configuration on the receiving side.
virtual int GetRxAgcConfig(int channel, AgcConfig& config) = 0;
// Registers a VoERxVadCallback |observer| instance and enables Rx VAD
// notifications for the specified |channel|.
virtual int RegisterRxVadObserver(int channel,
VoERxVadCallback& observer) = 0;
// Deregisters the VoERxVadCallback |observer| and disables Rx VAD
// notifications for the specified |channel|.
virtual int DeRegisterRxVadObserver(int channel) = 0;
// Gets the VAD/DTX activity for the specified |channel|.
// The returned value is 1 if frames of audio contains speech
// and 0 if silence. The output is always 1 if VAD is disabled.

View File

@ -64,46 +64,3 @@ TEST_F(AgcConfigTest, CanGetAndSetAgcStatus) {
EXPECT_EQ(agc_config.targetLeveldBOv,
actual_config.targetLeveldBOv);
}
TEST_F(AgcConfigTest, HasCorrectDefaultRxConfiguration) {
webrtc::AgcConfig agc_config;
EXPECT_EQ(0, voe_apm_->GetRxAgcConfig(channel_, agc_config));
EXPECT_EQ(default_agc_config_.targetLeveldBOv, agc_config.targetLeveldBOv);
EXPECT_EQ(default_agc_config_.digitalCompressionGaindB,
agc_config.digitalCompressionGaindB);
EXPECT_EQ(default_agc_config_.limiterEnable, agc_config.limiterEnable);
}
TEST_F(AgcConfigTest, DealsWithInvalidRxParameters) {
webrtc::AgcConfig agc_config = default_agc_config_;
agc_config.digitalCompressionGaindB = 91;
EXPECT_EQ(-1, voe_apm_->SetRxAgcConfig(channel_, agc_config)) <<
"Should not be able to set RX gain to more than 90 dB.";
EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
agc_config = default_agc_config_;
agc_config.targetLeveldBOv = 32;
EXPECT_EQ(-1, voe_apm_->SetRxAgcConfig(channel_, agc_config)) <<
"Should not be able to set target level to more than 31.";
EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
}
TEST_F(AgcConfigTest, CanGetAndSetRxAgcStatus) {
webrtc::AgcConfig agc_config;
agc_config.digitalCompressionGaindB = 17;
agc_config.targetLeveldBOv = 11;
agc_config.limiterEnable = false;
webrtc::AgcConfig actual_config;
EXPECT_EQ(0, voe_apm_->SetRxAgcConfig(channel_, agc_config));
EXPECT_EQ(0, voe_apm_->GetRxAgcConfig(channel_, actual_config));
EXPECT_EQ(agc_config.digitalCompressionGaindB,
actual_config.digitalCompressionGaindB);
EXPECT_EQ(agc_config.limiterEnable,
actual_config.limiterEnable);
EXPECT_EQ(agc_config.targetLeveldBOv,
actual_config.targetLeveldBOv);
}

View File

@ -12,22 +12,6 @@
#include "webrtc/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
#include "webrtc/voice_engine/test/auto_test/voe_standard_test.h"
class RxCallback : public webrtc::VoERxVadCallback {
public:
RxCallback() :
vad_decision(-1) {
}
virtual void OnRxVad(int, int vadDecision) {
char msg[128];
sprintf(msg, "RX VAD detected decision %d \n", vadDecision);
TEST_LOG("%s", msg);
vad_decision = vadDecision;
}
int vad_decision;
};
class AudioProcessingTest : public AfterStreamingFixture {
protected:
// Note: Be careful with this one, it is used in the
@ -43,17 +27,6 @@ class AudioProcessingTest : public AfterStreamingFixture {
EXPECT_EQ(agc_mode_to_set, agc_mode);
}
void TryEnablingRxAgcWithMode(webrtc::AgcModes agc_mode_to_set) {
EXPECT_EQ(0, voe_apm_->SetRxAgcStatus(channel_, true, agc_mode_to_set));
bool rx_agc_enabled = false;
webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
EXPECT_EQ(0, voe_apm_->GetRxAgcStatus(channel_, rx_agc_enabled, agc_mode));
EXPECT_TRUE(rx_agc_enabled);
EXPECT_EQ(agc_mode_to_set, agc_mode);
}
// EC modes can map to other EC modes, so we have a separate parameter
// for what we expect the EC mode to be set to.
void TryEnablingEcWithMode(webrtc::EcModes ec_mode_to_set,
@ -94,18 +67,6 @@ class AudioProcessingTest : public AfterStreamingFixture {
EXPECT_EQ(expected_ns_mode, ns_mode);
}
void TryEnablingRxNsWithMode(webrtc::NsModes ns_mode_to_set,
webrtc::NsModes expected_ns_mode) {
EXPECT_EQ(0, voe_apm_->SetRxNsStatus(channel_, true, ns_mode_to_set));
bool ns_status = true;
webrtc::NsModes ns_mode = webrtc::kNsDefault;
EXPECT_EQ(0, voe_apm_->GetRxNsStatus(channel_, ns_status, ns_mode));
EXPECT_TRUE(ns_status);
EXPECT_EQ(expected_ns_mode, ns_mode);
}
void TryDetectingSilence() {
// Here, speech is running. Shut down speech.
EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
@ -199,26 +160,6 @@ TEST_F(AudioProcessingTest, CanEnableAndDisableEcModeSeveralTimesInARow) {
EXPECT_EQ(webrtc::kEcAec, ec_mode);
}
// TODO(phoglund): Reenable below test when it's no longer flaky.
TEST_F(AudioProcessingTest, DISABLED_TestVoiceActivityDetectionWithObserver) {
RxCallback rx_callback;
EXPECT_EQ(0, voe_apm_->RegisterRxVadObserver(channel_, rx_callback));
// The extra sleeps are to allow decisions some time to propagate to the
// observer.
TryDetectingSilence();
Sleep(100);
EXPECT_EQ(0, rx_callback.vad_decision);
TryDetectingSpeechAfterSilence();
Sleep(100);
EXPECT_EQ(1, rx_callback.vad_decision);
EXPECT_EQ(0, voe_apm_->DeRegisterRxVadObserver(channel_));
}
#endif // !WEBRTC_IOS && !WEBRTC_ANDROID
TEST_F(AudioProcessingTest, EnablingEcAecmShouldEnableEcAecm) {
@ -248,25 +189,6 @@ TEST_F(AudioProcessingTest, CanSetAecmMode) {
TryEnablingAecmWithMode(webrtc::kAecmSpeakerphone, false);
}
TEST_F(AudioProcessingTest, RxAgcShouldBeOffByDefault) {
bool rx_agc_enabled = true;
webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
EXPECT_EQ(0, voe_apm_->GetRxAgcStatus(channel_, rx_agc_enabled, agc_mode));
EXPECT_FALSE(rx_agc_enabled);
EXPECT_EQ(webrtc::kAgcAdaptiveDigital, agc_mode);
}
TEST_F(AudioProcessingTest, CanTurnOnDigitalRxAcg) {
TryEnablingRxAgcWithMode(webrtc::kAgcAdaptiveDigital);
TryEnablingRxAgcWithMode(webrtc::kAgcFixedDigital);
}
TEST_F(AudioProcessingTest, CannotTurnOnAdaptiveAnalogRxAgc) {
EXPECT_EQ(-1, voe_apm_->SetRxAgcStatus(
channel_, true, webrtc::kAgcAdaptiveAnalog));
}
TEST_F(AudioProcessingTest, NsIsOffWithModerateSuppressionByDefault) {
bool ns_status = true;
webrtc::NsModes ns_mode = webrtc::kNsDefault;
@ -294,33 +216,6 @@ TEST_F(AudioProcessingTest, CanSetNsMode) {
webrtc::kNsModerateSuppression);
}
TEST_F(AudioProcessingTest, RxNsIsOffWithModerateSuppressionByDefault) {
bool ns_status = true;
webrtc::NsModes ns_mode = webrtc::kNsDefault;
EXPECT_EQ(0, voe_apm_->GetRxNsStatus(channel_, ns_status, ns_mode));
EXPECT_FALSE(ns_status);
EXPECT_EQ(webrtc::kNsModerateSuppression, ns_mode);
}
TEST_F(AudioProcessingTest, CanSetRxNsMode) {
EXPECT_EQ(0, voe_apm_->SetRxNsStatus(channel_, true));
// See comments on the regular NS test above.
TryEnablingRxNsWithMode(webrtc::kNsHighSuppression,
webrtc::kNsHighSuppression);
TryEnablingRxNsWithMode(webrtc::kNsLowSuppression,
webrtc::kNsLowSuppression);
TryEnablingRxNsWithMode(webrtc::kNsModerateSuppression,
webrtc::kNsModerateSuppression);
TryEnablingRxNsWithMode(webrtc::kNsVeryHighSuppression,
webrtc::kNsVeryHighSuppression);
TryEnablingRxNsWithMode(webrtc::kNsConference,
webrtc::kNsHighSuppression);
TryEnablingRxNsWithMode(webrtc::kNsDefault,
webrtc::kNsModerateSuppression);
}
TEST_F(AudioProcessingTest, VadIsDisabledByDefault) {
bool vad_enabled;
bool disabled_dtx;

View File

@ -220,10 +220,8 @@ void RunTest(std::string out_path) {
CodecInst cinst;
bool enable_aec = false;
bool enable_agc = false;
bool enable_rx_agc = false;
bool enable_cng = false;
bool enable_ns = false;
bool enable_rx_ns = false;
bool typing_detection = false;
bool muted = false;
bool opus_stereo = false;
@ -586,24 +584,6 @@ void RunTest(std::string out_path) {
// Will use plughw for hardware devices
res = hardware->SetRecordingDevice(num_rd);
VALIDATE;
} else if (option_selection == option_index++) {
// Remote AGC
enable_rx_agc = !enable_rx_agc;
res = apm->SetRxAgcStatus(chan, enable_rx_agc);
VALIDATE;
if (enable_rx_agc)
printf("\n Receive-side AGC is now on! \n");
else
printf("\n Receive-side AGC is now off! \n");
} else if (option_selection == option_index++) {
// Remote NS
enable_rx_ns = !enable_rx_ns;
res = apm->SetRxNsStatus(chan, enable_rx_ns);
VALIDATE;
if (enable_rx_ns)
printf("\n Receive-side NS is now on! \n");
else
printf("\n Receive-side NS is now off! \n");
} else if (option_selection == option_index++) {
AgcModes agcmode;
bool enable;

View File

@ -313,151 +313,6 @@ int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) {
#endif
}
int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
bool enable,
NsModes mode) {
#ifdef WEBRTC_VOICE_ENGINE_NR
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"SetRxNsStatus() failed to locate channel");
return -1;
}
return channelPtr->SetRxNsStatus(enable, mode);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
"SetRxNsStatus() NS is not supported");
return -1;
#endif
}
int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
bool& enabled,
NsModes& mode) {
#ifdef WEBRTC_VOICE_ENGINE_NR
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetRxNsStatus() failed to locate channel");
return -1;
}
return channelPtr->GetRxNsStatus(enabled, mode);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
"GetRxNsStatus() NS is not supported");
return -1;
#endif
}
int VoEAudioProcessingImpl::SetRxAgcStatus(int channel,
bool enable,
AgcModes mode) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetRxAgcStatus(channel=%d, enable=%d, mode=%d)", channel,
(int)enable, (int)mode);
#ifdef WEBRTC_VOICE_ENGINE_AGC
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"SetRxAgcStatus() failed to locate channel");
return -1;
}
return channelPtr->SetRxAgcStatus(enable, mode);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
"SetRxAgcStatus() Agc is not supported");
return -1;
#endif
}
int VoEAudioProcessingImpl::GetRxAgcStatus(int channel,
bool& enabled,
AgcModes& mode) {
#ifdef WEBRTC_VOICE_ENGINE_AGC
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetRxAgcStatus() failed to locate channel");
return -1;
}
return channelPtr->GetRxAgcStatus(enabled, mode);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
"GetRxAgcStatus() Agc is not supported");
return -1;
#endif
}
int VoEAudioProcessingImpl::SetRxAgcConfig(int channel, AgcConfig config) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetRxAgcConfig(channel=%d)", channel);
#ifdef WEBRTC_VOICE_ENGINE_AGC
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"SetRxAgcConfig() failed to locate channel");
return -1;
}
return channelPtr->SetRxAgcConfig(config);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
"SetRxAgcConfig() Agc is not supported");
return -1;
#endif
}
int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config) {
#ifdef WEBRTC_VOICE_ENGINE_AGC
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetRxAgcConfig() failed to locate channel");
return -1;
}
return channelPtr->GetRxAgcConfig(config);
#else
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
"GetRxAgcConfig() Agc is not supported");
return -1;
#endif
}
bool VoEAudioProcessing::DriftCompensationSupported() {
#if defined(WEBRTC_DRIFT_COMPENSATION_SUPPORTED)
return true;
@ -724,42 +579,6 @@ bool VoEAudioProcessingImpl::IsHighPassFilterEnabled() {
return _shared->audio_processing()->high_pass_filter()->is_enabled();
}
int VoEAudioProcessingImpl::RegisterRxVadObserver(int channel,
VoERxVadCallback& observer) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"RegisterRxVadObserver()");
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"RegisterRxVadObserver() failed to locate channel");
return -1;
}
return channelPtr->RegisterRxVadObserver(observer);
}
int VoEAudioProcessingImpl::DeRegisterRxVadObserver(int channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"DeRegisterRxVadObserver()");
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"DeRegisterRxVadObserver() failed to locate channel");
return -1;
}
return channelPtr->DeRegisterRxVadObserver();
}
int VoEAudioProcessingImpl::VoiceActivityIndicator(int channel) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"VoiceActivityIndicator(channel=%d)", channel);
@ -772,7 +591,7 @@ int VoEAudioProcessingImpl::VoiceActivityIndicator(int channel) {
voe::Channel* channelPtr = ch.channel();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"DeRegisterRxVadObserver() failed to locate channel");
"VoiceActivityIndicator() failed to locate channel");
return -1;
}
int activity(-1);

View File

@ -31,22 +31,6 @@ class VoEAudioProcessingImpl : public VoEAudioProcessing {
int GetAgcConfig(AgcConfig& config) override;
int SetRxNsStatus(int channel,
bool enable,
NsModes mode = kNsUnchanged) override;
int GetRxNsStatus(int channel, bool& enabled, NsModes& mode) override;
int SetRxAgcStatus(int channel,
bool enable,
AgcModes mode = kAgcUnchanged) override;
int GetRxAgcStatus(int channel, bool& enabled, AgcModes& mode) override;
int SetRxAgcConfig(int channel, AgcConfig config) override;
int GetRxAgcConfig(int channel, AgcConfig& config) override;
int SetEcStatus(bool enable, EcModes mode = kEcUnchanged) override;
int GetEcStatus(bool& enabled, EcModes& mode) override;
int EnableDriftCompensation(bool enable) override;
@ -63,10 +47,6 @@ class VoEAudioProcessingImpl : public VoEAudioProcessing {
int EnableHighPassFilter(bool enable) override;
bool IsHighPassFilterEnabled() override;
int RegisterRxVadObserver(int channel, VoERxVadCallback& observer) override;
int DeRegisterRxVadObserver(int channel) override;
int VoiceActivityIndicator(int channel) override;
int SetEcMetricsStatus(bool enable) override;