Propagate muted info from VoE Channel to AudioConferenceMixer

Required updating of a few related classes and tests.

BUG=webrtc:5609
NOTRY=True

Review-Url: https://codereview.webrtc.org/1986093002
Cr-Commit-Position: refs/heads/master@{#12794}
This commit is contained in:
henrik.lundin 2016-05-18 05:36:01 -07:00 committed by Commit bot
parent 84f8df71af
commit 42dda50860
5 changed files with 55 additions and 17 deletions

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INCLUDE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
#include "webrtc/base/checks.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/typedefs.h"
@ -25,8 +26,34 @@ public:
// audio every time it's called.
//
// If it returns -1, the frame will not be added to the mix.
//
// NOTE: This function should not be called. It will remain for a short
// time so that subclasses can override it without getting warnings.
// TODO(henrik.lundin) Remove this function.
virtual int32_t GetAudioFrame(int32_t id,
AudioFrame* audioFrame) = 0;
AudioFrame* audioFrame) {
RTC_CHECK(false);
return -1;
}
// The implementation of GetAudioFrameWithMuted should update audio_frame
// with new audio every time it's called. The return value will be
// interpreted as follows.
enum class AudioFrameInfo {
kNormal, // The samples in audio_frame are valid and should be used.
kMuted, // The samples in audio_frame should not be used, but should be
// implicitly interpreted as zero. Other fields in audio_frame
// may be read and should contain meaningful values.
kError // audio_frame will not be used.
};
virtual AudioFrameInfo GetAudioFrameWithMuted(int32_t id,
AudioFrame* audio_frame) {
return GetAudioFrame(id, audio_frame) == -1 ?
AudioFrameInfo::kError :
AudioFrameInfo::kNormal;
}
// Returns true if the participant was mixed this mix iteration.
bool IsMixed() const;

View File

@ -555,13 +555,14 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
audioFrame->sample_rate_hz_ = _outputFrequency;
bool muted = false;
if((*participant)->GetAudioFrame(_id, audioFrame) != 0) {
auto ret = (*participant)->GetAudioFrameWithMuted(_id, audioFrame);
if (ret == MixerParticipant::AudioFrameInfo::kError) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"failed to GetAudioFrame() from participant");
"failed to GetAudioFrameWithMuted() from participant");
_audioFramePool->PushMemory(audioFrame);
continue;
}
const bool muted = (ret == MixerParticipant::AudioFrameInfo::kMuted);
if (_participantList.size() != 1) {
// TODO(wu): Issue 3390, add support for multiple participants case.
audioFrame->ntp_time_ms_ = -1;
@ -720,10 +721,10 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
AudioFrameList* additionalFramesList) const {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"GetAdditionalAudio(additionalFramesList)");
// The GetAudioFrame() callback may result in the participant being removed
// from additionalParticipantList_. If that happens it will invalidate any
// iterators. Create a copy of the participants list such that the list of
// participants can be traversed safely.
// The GetAudioFrameWithMuted() callback may result in the participant being
// removed from additionalParticipantList_. If that happens it will
// invalidate any iterators. Create a copy of the participants list such
// that the list of participants can be traversed safely.
MixerParticipantList additionalParticipantList;
additionalParticipantList.insert(additionalParticipantList.begin(),
_additionalParticipantList.begin(),
@ -741,10 +742,10 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
return;
}
audioFrame->sample_rate_hz_ = _outputFrequency;
bool muted = false;
if((*participant)->GetAudioFrame(_id, audioFrame) != 0) {
auto ret = (*participant)->GetAudioFrameWithMuted(_id, audioFrame);
if (ret == MixerParticipant::AudioFrameInfo::kError) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"failed to GetAudioFrame() from participant");
"failed to GetAudioFrameWithMuted() from participant");
_audioFramePool->PushMemory(audioFrame);
continue;
}
@ -753,7 +754,8 @@ void AudioConferenceMixerImpl::GetAdditionalAudio(
_audioFramePool->PushMemory(audioFrame);
continue;
}
additionalFramesList->push_back(FrameAndMuteInfo(audioFrame, muted));
additionalFramesList->push_back(FrameAndMuteInfo(
audioFrame, ret == MixerParticipant::AudioFrameInfo::kMuted));
}
}

View File

@ -476,7 +476,9 @@ bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
}
int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
int32_t id,
AudioFrame* audioFrame) {
if (event_log_) {
unsigned int ssrc;
RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
@ -492,7 +494,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
// error so that the audio mixer module doesn't add it to the mix. As
// a result, it won't be played out and the actions skipped here are
// irrelevant.
return -1;
return MixerParticipant::AudioFrameInfo::kError;
}
RTC_DCHECK(!muted);
@ -621,7 +623,8 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame) {
}
}
return 0;
return muted ? MixerParticipant::AudioFrameInfo::kMuted
: MixerParticipant::AudioFrameInfo::kNormal;
}
int32_t Channel::NeededFrequency(int32_t id) const {

View File

@ -394,7 +394,9 @@ class Channel
bool SendRtcp(const uint8_t* data, size_t len) override;
// From MixerParticipant
int32_t GetAudioFrame(int32_t id, AudioFrame* audioFrame) override;
MixerParticipant::AudioFrameInfo GetAudioFrameWithMuted(
int32_t id,
AudioFrame* audioFrame) override;
int32_t NeededFrequency(int32_t id) const override;
// From FileCallback

View File

@ -153,7 +153,11 @@ int VoEExternalMediaImpl::GetAudioFrame(int channel, int desired_sample_rate_hz,
}
frame->sample_rate_hz_ =
desired_sample_rate_hz == 0 ? -1 : desired_sample_rate_hz;
return channelPtr->GetAudioFrame(channel, frame);
auto ret = channelPtr->GetAudioFrameWithMuted(channel, frame);
if (ret == MixerParticipant::AudioFrameInfo::kMuted) {
frame->Mute();
}
return ret == MixerParticipant::AudioFrameInfo::kError ? -1 : 0;
}
int VoEExternalMediaImpl::SetExternalMixing(int channel, bool enable) {