Remove VoEExternalMedia interface.
BUG=webrtc:4690 Review-Url: https://codereview.webrtc.org/2645033002 Cr-Commit-Position: refs/heads/master@{#16608}
This commit is contained in:
parent
4555036ce0
commit
e374e0139b
@ -142,14 +142,6 @@ enum FileFormats {
|
||||
kFileFormatPcm32kHzFile = 9
|
||||
};
|
||||
|
||||
enum ProcessingTypes {
|
||||
kPlaybackPerChannel = 0,
|
||||
kPlaybackAllChannelsMixed,
|
||||
kRecordingPerChannel,
|
||||
kRecordingAllChannelsMixed,
|
||||
kRecordingPreprocessing
|
||||
};
|
||||
|
||||
enum FrameType {
|
||||
kEmptyFrame = 0,
|
||||
kAudioFrameSpeech = 1,
|
||||
|
||||
@ -159,17 +159,6 @@ class MockVoiceEngine : public VoiceEngineImpl {
|
||||
MOCK_METHOD2(SetOpusMaxPlaybackRate, int(int channel, int frequency_hz));
|
||||
MOCK_METHOD2(SetOpusDtx, int(int channel, bool enable_dtx));
|
||||
|
||||
// VoEExternalMedia
|
||||
MOCK_METHOD3(RegisterExternalMediaProcessing,
|
||||
int(int channel,
|
||||
ProcessingTypes type,
|
||||
VoEMediaProcess& processObject));
|
||||
MOCK_METHOD2(DeRegisterExternalMediaProcessing,
|
||||
int(int channel, ProcessingTypes type));
|
||||
MOCK_METHOD3(GetAudioFrame,
|
||||
int(int channel, int desired_sample_rate_hz, AudioFrame* frame));
|
||||
MOCK_METHOD2(SetExternalMixing, int(int channel, bool enable));
|
||||
|
||||
// VoEFile
|
||||
MOCK_METHOD7(StartPlayingFileLocally,
|
||||
int(int channel,
|
||||
|
||||
@ -78,7 +78,6 @@ rtc_static_library("voice_engine") {
|
||||
"include/voe_base.h",
|
||||
"include/voe_codec.h",
|
||||
"include/voe_errors.h",
|
||||
"include/voe_external_media.h",
|
||||
"include/voe_file.h",
|
||||
"include/voe_hardware.h",
|
||||
"include/voe_neteq_stats.h",
|
||||
@ -106,8 +105,6 @@ rtc_static_library("voice_engine") {
|
||||
"voe_base_impl.h",
|
||||
"voe_codec_impl.cc",
|
||||
"voe_codec_impl.h",
|
||||
"voe_external_media_impl.cc",
|
||||
"voe_external_media_impl.h",
|
||||
"voe_file_impl.cc",
|
||||
"voe_file_impl.h",
|
||||
"voe_hardware_impl.cc",
|
||||
@ -277,7 +274,6 @@ if (rtc_include_tests) {
|
||||
"test/channel_transport/udp_socket_manager_unittest.cc",
|
||||
"test/channel_transport/udp_socket_wrapper_unittest.cc",
|
||||
"test/channel_transport/udp_transport_unittest.cc",
|
||||
"transmit_mixer_unittest.cc",
|
||||
"transport_feedback_packet_loss_tracker_unittest.cc",
|
||||
"utility_unittest.cc",
|
||||
"voe_audio_processing_unittest.cc",
|
||||
@ -371,7 +367,6 @@ if (rtc_include_tests) {
|
||||
"test/auto_test/standard/codec_before_streaming_test.cc",
|
||||
"test/auto_test/standard/codec_test.cc",
|
||||
"test/auto_test/standard/dtmf_test.cc",
|
||||
"test/auto_test/standard/external_media_test.cc",
|
||||
"test/auto_test/standard/file_before_streaming_test.cc",
|
||||
"test/auto_test/standard/file_test.cc",
|
||||
"test/auto_test/standard/hardware_before_initializing_test.cc",
|
||||
|
||||
@ -35,7 +35,6 @@
|
||||
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
|
||||
#include "webrtc/modules/utility/include/process_thread.h"
|
||||
#include "webrtc/system_wrappers/include/trace.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
|
||||
#include "webrtc/voice_engine/output_mixer.h"
|
||||
#include "webrtc/voice_engine/statistics.h"
|
||||
@ -658,18 +657,6 @@ MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
|
||||
muted = false; // We may have added non-zero samples.
|
||||
}
|
||||
|
||||
// External media
|
||||
if (_outputExternalMedia) {
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
const bool isStereo = (audioFrame->num_channels_ == 2);
|
||||
if (_outputExternalMediaCallbackPtr) {
|
||||
_outputExternalMediaCallbackPtr->Process(
|
||||
_channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
|
||||
audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
|
||||
isStereo);
|
||||
}
|
||||
}
|
||||
|
||||
// Record playout if enabled
|
||||
{
|
||||
rtc::CritScope cs(&_fileCritSect);
|
||||
@ -863,9 +850,6 @@ Channel::Channel(int32_t channelId,
|
||||
_outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
|
||||
_outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
|
||||
_outputFileRecording(false),
|
||||
_outputExternalMedia(false),
|
||||
_inputExternalMediaCallbackPtr(NULL),
|
||||
_outputExternalMediaCallbackPtr(NULL),
|
||||
_timeStamp(0), // This is just an offset, RTP module will add it's own
|
||||
// random offset
|
||||
ntp_estimator_(Clock::GetRealTimeClock()),
|
||||
@ -884,7 +868,6 @@ Channel::Channel(int32_t channelId,
|
||||
_callbackCritSectPtr(NULL),
|
||||
_transportPtr(NULL),
|
||||
_sendFrameType(0),
|
||||
_externalMixing(false),
|
||||
_mixFileWithMicrophone(false),
|
||||
input_mute_(false),
|
||||
previous_frame_muted_(false),
|
||||
@ -942,12 +925,6 @@ Channel::~Channel() {
|
||||
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
|
||||
"Channel::~Channel() - dtor");
|
||||
|
||||
if (_outputExternalMedia) {
|
||||
DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
|
||||
}
|
||||
if (channel_state_.Get().input_external_media) {
|
||||
DeRegisterExternalMediaProcessing(kRecordingPerChannel);
|
||||
}
|
||||
StopSend();
|
||||
StopPlayout();
|
||||
|
||||
@ -1134,14 +1111,12 @@ int32_t Channel::StartPlayout() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!_externalMixing) {
|
||||
// Add participant as candidates for mixing.
|
||||
if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
||||
"StartPlayout() failed to add participant to mixer");
|
||||
return -1;
|
||||
}
|
||||
// Add participant as candidates for mixing.
|
||||
if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
||||
"StartPlayout() failed to add participant to mixer");
|
||||
return -1;
|
||||
}
|
||||
|
||||
channel_state_.SetPlaying(true);
|
||||
@ -1158,14 +1133,12 @@ int32_t Channel::StopPlayout() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!_externalMixing) {
|
||||
// Remove participant as candidates for mixing
|
||||
if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
||||
"StopPlayout() failed to remove participant from mixer");
|
||||
return -1;
|
||||
}
|
||||
// Remove participant as candidates for mixing
|
||||
if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
||||
"StopPlayout() failed to remove participant from mixer");
|
||||
return -1;
|
||||
}
|
||||
|
||||
channel_state_.SetPlaying(false);
|
||||
@ -2668,17 +2641,6 @@ uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
|
||||
bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
|
||||
AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
|
||||
|
||||
if (channel_state_.Get().input_external_media) {
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
const bool isStereo = (_audioFrame.num_channels_ == 2);
|
||||
if (_inputExternalMediaCallbackPtr) {
|
||||
_inputExternalMediaCallbackPtr->Process(
|
||||
_channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
|
||||
_audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
|
||||
isStereo);
|
||||
}
|
||||
}
|
||||
|
||||
if (_includeAudioLevelIndication) {
|
||||
size_t length =
|
||||
_audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
|
||||
@ -2770,85 +2732,6 @@ void Channel::OnOverheadChanged(size_t overhead_bytes_per_packet) {
|
||||
UpdateOverheadForEncoder();
|
||||
}
|
||||
|
||||
int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
|
||||
VoEMediaProcess& processObject) {
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
||||
"Channel::RegisterExternalMediaProcessing()");
|
||||
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
|
||||
if (kPlaybackPerChannel == type) {
|
||||
if (_outputExternalMediaCallbackPtr) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_INVALID_OPERATION, kTraceError,
|
||||
"Channel::RegisterExternalMediaProcessing() "
|
||||
"output external media already enabled");
|
||||
return -1;
|
||||
}
|
||||
_outputExternalMediaCallbackPtr = &processObject;
|
||||
_outputExternalMedia = true;
|
||||
} else if (kRecordingPerChannel == type) {
|
||||
if (_inputExternalMediaCallbackPtr) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_INVALID_OPERATION, kTraceError,
|
||||
"Channel::RegisterExternalMediaProcessing() "
|
||||
"output external media already enabled");
|
||||
return -1;
|
||||
}
|
||||
_inputExternalMediaCallbackPtr = &processObject;
|
||||
channel_state_.SetInputExternalMedia(true);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
||||
"Channel::DeRegisterExternalMediaProcessing()");
|
||||
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
|
||||
if (kPlaybackPerChannel == type) {
|
||||
if (!_outputExternalMediaCallbackPtr) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_INVALID_OPERATION, kTraceWarning,
|
||||
"Channel::DeRegisterExternalMediaProcessing() "
|
||||
"output external media already disabled");
|
||||
return 0;
|
||||
}
|
||||
_outputExternalMedia = false;
|
||||
_outputExternalMediaCallbackPtr = NULL;
|
||||
} else if (kRecordingPerChannel == type) {
|
||||
if (!_inputExternalMediaCallbackPtr) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_INVALID_OPERATION, kTraceWarning,
|
||||
"Channel::DeRegisterExternalMediaProcessing() "
|
||||
"input external media already disabled");
|
||||
return 0;
|
||||
}
|
||||
channel_state_.SetInputExternalMedia(false);
|
||||
_inputExternalMediaCallbackPtr = NULL;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Channel::SetExternalMixing(bool enabled) {
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
||||
"Channel::SetExternalMixing(enabled=%d)", enabled);
|
||||
|
||||
if (channel_state_.Get().playing) {
|
||||
_engineStatisticsPtr->SetLastError(
|
||||
VE_INVALID_OPERATION, kTraceError,
|
||||
"Channel::SetExternalMixing() "
|
||||
"external mixing cannot be changed while playing.");
|
||||
return -1;
|
||||
}
|
||||
|
||||
_externalMixing = enabled;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
|
||||
return audio_coding_->GetNetworkStatistics(&stats);
|
||||
}
|
||||
|
||||
@ -55,7 +55,6 @@ class RtpReceiver;
|
||||
class RTPReceiverAudio;
|
||||
class RtpRtcp;
|
||||
class TelephoneEventHandler;
|
||||
class VoEMediaProcess;
|
||||
class VoERTPObserver;
|
||||
class VoiceEngineObserver;
|
||||
|
||||
@ -83,7 +82,6 @@ class VoERtcpObserver;
|
||||
class ChannelState {
|
||||
public:
|
||||
struct State {
|
||||
bool input_external_media = false;
|
||||
bool output_file_playing = false;
|
||||
bool input_file_playing = false;
|
||||
bool playing = false;
|
||||
@ -103,11 +101,6 @@ class ChannelState {
|
||||
return state_;
|
||||
}
|
||||
|
||||
void SetInputExternalMedia(bool enable) {
|
||||
rtc::CritScope lock(&lock_);
|
||||
state_.input_external_media = enable;
|
||||
}
|
||||
|
||||
void SetOutputFilePlaying(bool enable) {
|
||||
rtc::CritScope lock(&lock_);
|
||||
state_.output_file_playing = enable;
|
||||
@ -251,12 +244,6 @@ class Channel
|
||||
|
||||
void SetMixWithMicStatus(bool mix);
|
||||
|
||||
// VoEExternalMediaProcessing
|
||||
int RegisterExternalMediaProcessing(ProcessingTypes type,
|
||||
VoEMediaProcess& processObject);
|
||||
int DeRegisterExternalMediaProcessing(ProcessingTypes type);
|
||||
int SetExternalMixing(bool enabled);
|
||||
|
||||
// VoEVolumeControl
|
||||
int GetSpeechOutputLevel(uint32_t& level) const;
|
||||
int GetSpeechOutputLevelFullRange(uint32_t& level) const;
|
||||
@ -379,7 +366,6 @@ class Channel
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
return _externalTransport;
|
||||
}
|
||||
bool ExternalMixing() const { return _externalMixing; }
|
||||
RtpRtcp* RtpRtcpModulePtr() const { return _rtpRtcpModule.get(); }
|
||||
int8_t OutputEnergyLevel() const { return _outputAudioLevel.Level(); }
|
||||
uint32_t Demultiplex(const AudioFrame& audioFrame);
|
||||
@ -469,9 +455,6 @@ class Channel
|
||||
int _outputFilePlayerId;
|
||||
int _outputFileRecorderId;
|
||||
bool _outputFileRecording;
|
||||
bool _outputExternalMedia;
|
||||
VoEMediaProcess* _inputExternalMediaCallbackPtr;
|
||||
VoEMediaProcess* _outputExternalMediaCallbackPtr;
|
||||
uint32_t _timeStamp;
|
||||
|
||||
RemoteNtpTimeEstimator ntp_estimator_ GUARDED_BY(ts_stats_lock_);
|
||||
@ -504,7 +487,6 @@ class Channel
|
||||
RmsLevel rms_level_;
|
||||
int32_t _sendFrameType; // Send data is voice, 1-voice, 0-otherwise
|
||||
// VoEBase
|
||||
bool _externalMixing;
|
||||
bool _mixFileWithMicrophone;
|
||||
// VoEVolumeControl
|
||||
bool input_mute_ GUARDED_BY(volume_settings_critsect_);
|
||||
|
||||
@ -1,85 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
|
||||
#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
|
||||
|
||||
#include "webrtc/common_types.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VoiceEngine;
|
||||
class AudioFrame;
|
||||
|
||||
class WEBRTC_DLLEXPORT VoEMediaProcess {
|
||||
public:
|
||||
// The VoiceEngine user should override the Process() method in a
|
||||
// derived class. Process() will be called when audio is ready to
|
||||
// be processed. The audio can be accessed in several different modes
|
||||
// given by the |type| parameter. The function should modify the
|
||||
// original data and ensure that it is copied back to the |audio10ms|
|
||||
// array. The number of samples in the frame cannot be changed.
|
||||
// The sampling frequency will depend upon the codec used.
|
||||
// If |isStereo| is true, audio10ms will contain 16-bit PCM data
|
||||
// samples in interleaved stereo format (L0,R0,L1,R1,...).
|
||||
virtual void Process(int channel,
|
||||
ProcessingTypes type,
|
||||
int16_t audio10ms[],
|
||||
size_t length,
|
||||
int samplingFreq,
|
||||
bool isStereo) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VoEMediaProcess() {}
|
||||
};
|
||||
|
||||
class WEBRTC_DLLEXPORT VoEExternalMedia {
|
||||
public:
|
||||
// Factory for the VoEExternalMedia sub-API. Increases an internal
|
||||
// reference counter if successful. Returns NULL if the API is not
|
||||
// supported or if construction fails.
|
||||
static VoEExternalMedia* GetInterface(VoiceEngine* voiceEngine);
|
||||
|
||||
// Releases the VoEExternalMedia sub-API and decreases an internal
|
||||
// reference counter. Returns the new reference count. This value should
|
||||
// be zero for all sub-API:s before the VoiceEngine object can be safely
|
||||
// deleted.
|
||||
virtual int Release() = 0;
|
||||
|
||||
// Installs a VoEMediaProcess derived instance and activates external
|
||||
// media for the specified |channel| and |type|.
|
||||
virtual int RegisterExternalMediaProcessing(
|
||||
int channel,
|
||||
ProcessingTypes type,
|
||||
VoEMediaProcess& processObject) = 0;
|
||||
|
||||
// Removes the VoEMediaProcess derived instance and deactivates external
|
||||
// media for the specified |channel| and |type|.
|
||||
virtual int DeRegisterExternalMediaProcessing(int channel,
|
||||
ProcessingTypes type) = 0;
|
||||
|
||||
// Pulls an audio frame from the specified |channel| for external mixing.
|
||||
// If the |desired_sample_rate_hz| is 0, the signal will be returned with
|
||||
// its native frequency, otherwise it will be resampled. Valid frequencies
|
||||
// are 16, 22, 32, 44 or 48 kHz.
|
||||
virtual int GetAudioFrame(int channel,
|
||||
int desired_sample_rate_hz,
|
||||
AudioFrame* frame) = 0;
|
||||
|
||||
// Sets the state of external mixing. Cannot be changed during playback.
|
||||
virtual int SetExternalMixing(int channel, bool enable) = 0;
|
||||
|
||||
protected:
|
||||
VoEExternalMedia() {}
|
||||
virtual ~VoEExternalMedia() {}
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
|
||||
@ -15,7 +15,6 @@
|
||||
#include "webrtc/modules/audio_processing/include/audio_processing.h"
|
||||
#include "webrtc/system_wrappers/include/file_wrapper.h"
|
||||
#include "webrtc/system_wrappers/include/trace.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/statistics.h"
|
||||
#include "webrtc/voice_engine/utility.h"
|
||||
|
||||
@ -94,8 +93,6 @@ OutputMixer::OutputMixer(uint32_t instanceId) :
|
||||
_mixerModule(*AudioConferenceMixer::Create(instanceId)),
|
||||
_audioLevel(),
|
||||
_instanceId(instanceId),
|
||||
_externalMediaCallbackPtr(NULL),
|
||||
_externalMedia(false),
|
||||
_panLeft(1.0f),
|
||||
_panRight(1.0f),
|
||||
_mixingFrequencyHz(8000),
|
||||
@ -126,10 +123,6 @@ OutputMixer::~OutputMixer()
|
||||
{
|
||||
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
|
||||
"OutputMixer::~OutputMixer() - dtor");
|
||||
if (_externalMedia)
|
||||
{
|
||||
DeRegisterExternalMediaProcessing();
|
||||
}
|
||||
{
|
||||
rtc::CritScope cs(&_fileCritSect);
|
||||
if (output_file_recorder_) {
|
||||
@ -160,31 +153,6 @@ OutputMixer::SetAudioProcessingModule(AudioProcessing* audioProcessingModule)
|
||||
return 0;
|
||||
}
|
||||
|
||||
int OutputMixer::RegisterExternalMediaProcessing(
|
||||
VoEMediaProcess& proccess_object)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
|
||||
"OutputMixer::RegisterExternalMediaProcessing()");
|
||||
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
_externalMediaCallbackPtr = &proccess_object;
|
||||
_externalMedia = true;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int OutputMixer::DeRegisterExternalMediaProcessing()
|
||||
{
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
|
||||
"OutputMixer::DeRegisterExternalMediaProcessing()");
|
||||
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
_externalMedia = false;
|
||||
_externalMediaCallbackPtr = NULL;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t
|
||||
OutputMixer::SetMixabilityStatus(MixerParticipant& participant,
|
||||
bool mixable)
|
||||
@ -479,25 +447,6 @@ OutputMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm)
|
||||
}
|
||||
}
|
||||
|
||||
// --- External media processing
|
||||
{
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
if (_externalMedia)
|
||||
{
|
||||
const bool is_stereo = (_audioFrame.num_channels_ == 2);
|
||||
if (_externalMediaCallbackPtr)
|
||||
{
|
||||
_externalMediaCallbackPtr->Process(
|
||||
-1,
|
||||
kPlaybackAllChannelsMixed,
|
||||
(int16_t*)_audioFrame.data_,
|
||||
_audioFrame.samples_per_channel_,
|
||||
_audioFrame.sample_rate_hz_,
|
||||
is_stereo);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Measure audio level (0-9) for the combined signal
|
||||
_audioLevel.ComputeLevel(_audioFrame);
|
||||
|
||||
|
||||
@ -26,7 +26,6 @@ namespace webrtc {
|
||||
|
||||
class AudioProcessing;
|
||||
class FileWrapper;
|
||||
class VoEMediaProcess;
|
||||
|
||||
namespace voe {
|
||||
|
||||
@ -45,12 +44,6 @@ public:
|
||||
int32_t SetAudioProcessingModule(
|
||||
AudioProcessing* audioProcessingModule);
|
||||
|
||||
// VoEExternalMedia
|
||||
int RegisterExternalMediaProcessing(
|
||||
VoEMediaProcess& proccess_object);
|
||||
|
||||
int DeRegisterExternalMediaProcessing();
|
||||
|
||||
int32_t MixActiveChannels();
|
||||
|
||||
int32_t DoOperationsOnCombinedSignal(bool feed_data_to_apm);
|
||||
@ -105,7 +98,6 @@ private:
|
||||
Statistics* _engineStatisticsPtr;
|
||||
AudioProcessing* _audioProcessingModulePtr;
|
||||
|
||||
rtc::CriticalSection _callbackCritSect;
|
||||
// Protects output_file_recorder_ and _outputFileRecording.
|
||||
rtc::CriticalSection _fileCritSect;
|
||||
AudioConferenceMixer& _mixerModule;
|
||||
@ -116,8 +108,6 @@ private:
|
||||
PushResampler<int16_t> audioproc_resampler_;
|
||||
AudioLevel _audioLevel; // measures audio level for the combined signal
|
||||
int _instanceId;
|
||||
VoEMediaProcess* _externalMediaCallbackPtr;
|
||||
bool _externalMedia;
|
||||
float _panLeft;
|
||||
float _panRight;
|
||||
int _mixingFrequencyHz;
|
||||
|
||||
@ -1,45 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#ifndef VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKE_MEDIA_PROCESS_H_
|
||||
#define VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKE_MEDIA_PROCESS_H_
|
||||
|
||||
#include <math.h>
|
||||
|
||||
class FakeMediaProcess : public webrtc::VoEMediaProcess {
|
||||
public:
|
||||
FakeMediaProcess() : frequency(0) {}
|
||||
virtual void Process(int channel,
|
||||
const webrtc::ProcessingTypes type,
|
||||
int16_t audio_10ms[],
|
||||
size_t length,
|
||||
int sampling_freq_hz,
|
||||
bool stereo) {
|
||||
for (size_t i = 0; i < length; i++) {
|
||||
if (!stereo) {
|
||||
audio_10ms[i] = static_cast<int16_t>(audio_10ms[i] *
|
||||
sin(2.0 * 3.14 * frequency * 400.0 / sampling_freq_hz));
|
||||
} else {
|
||||
// Interleaved stereo.
|
||||
audio_10ms[2 * i] = static_cast<int16_t> (
|
||||
audio_10ms[2 * i] * sin(2.0 * 3.14 *
|
||||
frequency * 400.0 / sampling_freq_hz));
|
||||
audio_10ms[2 * i + 1] = static_cast<int16_t> (
|
||||
audio_10ms[2 * i + 1] * sin(2.0 * 3.14 *
|
||||
frequency * 400.0 / sampling_freq_hz));
|
||||
}
|
||||
frequency++;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
int frequency;
|
||||
};
|
||||
|
||||
#endif // VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKE_MEDIA_PROCESS_H_
|
||||
@ -25,7 +25,6 @@ BeforeInitializationFixture::BeforeInitializationFixture()
|
||||
voe_file_ = webrtc::VoEFile::GetInterface(voice_engine_);
|
||||
voe_vsync_ = webrtc::VoEVideoSync::GetInterface(voice_engine_);
|
||||
voe_hardware_ = webrtc::VoEHardware::GetInterface(voice_engine_);
|
||||
voe_xmedia_ = webrtc::VoEExternalMedia::GetInterface(voice_engine_);
|
||||
voe_neteq_stats_ = webrtc::VoENetEqStats::GetInterface(voice_engine_);
|
||||
}
|
||||
|
||||
@ -39,7 +38,6 @@ BeforeInitializationFixture::~BeforeInitializationFixture() {
|
||||
voe_file_->Release();
|
||||
voe_vsync_->Release();
|
||||
voe_hardware_->Release();
|
||||
voe_xmedia_->Release();
|
||||
voe_neteq_stats_->Release();
|
||||
|
||||
EXPECT_TRUE(webrtc::VoiceEngine::Delete(voice_engine_));
|
||||
|
||||
@ -19,7 +19,6 @@
|
||||
#include "webrtc/voice_engine/include/voe_base.h"
|
||||
#include "webrtc/voice_engine/include/voe_codec.h"
|
||||
#include "webrtc/voice_engine/include/voe_errors.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/include/voe_file.h"
|
||||
#include "webrtc/voice_engine/include/voe_hardware.h"
|
||||
#include "webrtc/voice_engine/include/voe_neteq_stats.h"
|
||||
@ -59,7 +58,6 @@ class BeforeInitializationFixture : public testing::Test {
|
||||
webrtc::VoEFile* voe_file_;
|
||||
webrtc::VoEVideoSync* voe_vsync_;
|
||||
webrtc::VoEHardware* voe_hardware_;
|
||||
webrtc::VoEExternalMedia* voe_xmedia_;
|
||||
webrtc::VoENetEqStats* voe_neteq_stats_;
|
||||
};
|
||||
|
||||
|
||||
@ -1,109 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/base/arraysize.h"
|
||||
#include "webrtc/modules/include/module_common_types.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/test/auto_test/fakes/fake_media_process.h"
|
||||
#include "webrtc/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
|
||||
|
||||
class ExternalMediaTest : public AfterStreamingFixture {
|
||||
protected:
|
||||
void TestRegisterExternalMedia(int channel, webrtc::ProcessingTypes type) {
|
||||
FakeMediaProcess fake_media_process;
|
||||
EXPECT_EQ(0, voe_xmedia_->RegisterExternalMediaProcessing(
|
||||
channel, type, fake_media_process));
|
||||
Sleep(2000);
|
||||
|
||||
TEST_LOG("Back to normal.\n");
|
||||
EXPECT_EQ(0, voe_xmedia_->DeRegisterExternalMediaProcessing(
|
||||
channel, type));
|
||||
Sleep(2000);
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ManualRegisterExternalMediaProcessingOnAllChannelsAffectsPlayout) {
|
||||
TEST_LOG("Enabling external media processing: audio should be affected.\n");
|
||||
TestRegisterExternalMedia(-1, webrtc::kPlaybackAllChannelsMixed);
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ManualRegisterExternalMediaOnSingleChannelAffectsPlayout) {
|
||||
TEST_LOG("Enabling external media processing: audio should be affected.\n");
|
||||
TestRegisterExternalMedia(channel_, webrtc::kRecordingPerChannel);
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ManualRegisterExternalMediaOnAllChannelsMixedAffectsRecording) {
|
||||
SwitchToManualMicrophone();
|
||||
TEST_LOG("Speak and verify your voice is distorted.\n");
|
||||
TestRegisterExternalMedia(-1, webrtc::kRecordingAllChannelsMixed);
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ExternalMixingCannotBeChangedDuringPlayback) {
|
||||
EXPECT_EQ(-1, voe_xmedia_->SetExternalMixing(channel_, true));
|
||||
EXPECT_EQ(-1, voe_xmedia_->SetExternalMixing(channel_, false));
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ExternalMixingIsRequiredForGetAudioFrame) {
|
||||
webrtc::AudioFrame frame;
|
||||
EXPECT_EQ(-1, voe_xmedia_->GetAudioFrame(channel_, 0, &frame));
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ExternalMixingPreventsAndRestoresRegularPlayback) {
|
||||
PausePlaying();
|
||||
ASSERT_EQ(0, voe_xmedia_->SetExternalMixing(channel_, true));
|
||||
TEST_LOG("Verify that no sound is played out.\n");
|
||||
ResumePlaying();
|
||||
Sleep(1000);
|
||||
PausePlaying();
|
||||
ASSERT_EQ(0, voe_xmedia_->SetExternalMixing(channel_, false));
|
||||
ResumePlaying();
|
||||
TEST_LOG("Verify that sound is played out.\n");
|
||||
ResumePlaying();
|
||||
Sleep(1000);
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ExternalMixingWorks) {
|
||||
webrtc::AudioFrame frame;
|
||||
PausePlaying();
|
||||
EXPECT_EQ(0, voe_xmedia_->SetExternalMixing(channel_, true));
|
||||
ResumePlaying();
|
||||
EXPECT_EQ(0, voe_xmedia_->GetAudioFrame(channel_, 0, &frame));
|
||||
EXPECT_GT(frame.sample_rate_hz_, 0);
|
||||
EXPECT_GT(frame.samples_per_channel_, 0U);
|
||||
PausePlaying();
|
||||
EXPECT_EQ(0, voe_xmedia_->SetExternalMixing(channel_, false));
|
||||
ResumePlaying();
|
||||
}
|
||||
|
||||
TEST_F(ExternalMediaTest,
|
||||
ExternalMixingResamplesToDesiredFrequency) {
|
||||
const int kValidFrequencies[] = {8000, 16000, 22000, 32000, 48000};
|
||||
webrtc::AudioFrame frame;
|
||||
PausePlaying();
|
||||
EXPECT_EQ(0, voe_xmedia_->SetExternalMixing(channel_, true));
|
||||
ResumePlaying();
|
||||
for (size_t i = 0; i < arraysize(kValidFrequencies); i++) {
|
||||
int f = kValidFrequencies[i];
|
||||
EXPECT_EQ(0, voe_xmedia_->GetAudioFrame(channel_, f, &frame))
|
||||
<< "Resampling succeeds for freq=" << f;
|
||||
EXPECT_EQ(f, frame.sample_rate_hz_);
|
||||
EXPECT_EQ(static_cast<size_t>(f / 100), frame.samples_per_channel_);
|
||||
}
|
||||
PausePlaying();
|
||||
EXPECT_EQ(0, voe_xmedia_->SetExternalMixing(channel_, false));
|
||||
ResumePlaying();
|
||||
}
|
||||
@ -40,8 +40,6 @@ void SubAPIManager::DisplayStatus() const {
|
||||
TEST_LOG(" Base\n");
|
||||
if (_codec)
|
||||
TEST_LOG(" Codec\n");
|
||||
if (_externalMedia)
|
||||
TEST_LOG(" ExternalMedia\n");
|
||||
if (_file)
|
||||
TEST_LOG(" File\n");
|
||||
if (_hardware)
|
||||
@ -64,8 +62,6 @@ void SubAPIManager::DisplayStatus() const {
|
||||
TEST_LOG(" Base\n");
|
||||
if (!_codec)
|
||||
TEST_LOG(" Codec\n");
|
||||
if (!_externalMedia)
|
||||
TEST_LOG(" ExternamMedia\n");
|
||||
if (!_file)
|
||||
TEST_LOG(" File\n");
|
||||
if (!_hardware)
|
||||
|
||||
@ -31,7 +31,6 @@ class SubAPIManager {
|
||||
SubAPIManager()
|
||||
: _base(true),
|
||||
_codec(false),
|
||||
_externalMedia(false),
|
||||
_file(false),
|
||||
_hardware(false),
|
||||
_netEqStats(false),
|
||||
@ -41,7 +40,6 @@ class SubAPIManager {
|
||||
_volumeControl(false),
|
||||
_apm(false) {
|
||||
_codec = true;
|
||||
_externalMedia = true;
|
||||
_file = true;
|
||||
_hardware = true;
|
||||
_netEqStats = true;
|
||||
@ -56,7 +54,7 @@ class SubAPIManager {
|
||||
|
||||
private:
|
||||
bool _base, _codec;
|
||||
bool _externalMedia, _file, _hardware;
|
||||
bool _file, _hardware;
|
||||
bool _netEqStats, _network, _rtp_rtcp, _videoSync, _volumeControl, _apm;
|
||||
};
|
||||
|
||||
|
||||
@ -26,7 +26,6 @@
|
||||
#define _TEST_NETWORK_
|
||||
#define _TEST_VIDEO_SYNC_
|
||||
#define _TEST_NETEQ_STATS_
|
||||
#define _TEST_XMEDIA_
|
||||
|
||||
#define TESTED_AUDIO_LAYER kAudioPlatformDefault
|
||||
//#define TESTED_AUDIO_LAYER kAudioLinuxPulse
|
||||
|
||||
@ -29,7 +29,6 @@
|
||||
#include "webrtc/voice_engine/include/voe_base.h"
|
||||
#include "webrtc/voice_engine/include/voe_codec.h"
|
||||
#include "webrtc/voice_engine/include/voe_errors.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/include/voe_file.h"
|
||||
#include "webrtc/voice_engine/include/voe_hardware.h"
|
||||
#include "webrtc/voice_engine/include/voe_neteq_stats.h"
|
||||
@ -61,7 +60,6 @@ VoENetwork* netw = NULL;
|
||||
VoEFile* file = NULL;
|
||||
VoEVideoSync* vsync = NULL;
|
||||
VoEHardware* hardware = NULL;
|
||||
VoEExternalMedia* xmedia = NULL;
|
||||
VoENetEqStats* neteqst = NULL;
|
||||
|
||||
void RunTest(std::string out_path);
|
||||
@ -133,7 +131,6 @@ int main(int argc, char** argv) {
|
||||
file = VoEFile::GetInterface(m_voe);
|
||||
vsync = VoEVideoSync::GetInterface(m_voe);
|
||||
hardware = VoEHardware::GetInterface(m_voe);
|
||||
xmedia = VoEExternalMedia::GetInterface(m_voe);
|
||||
neteqst = VoENetEqStats::GetInterface(m_voe);
|
||||
|
||||
MyObserver my_observer;
|
||||
@ -204,9 +201,6 @@ int main(int argc, char** argv) {
|
||||
if (hardware)
|
||||
hardware->Release();
|
||||
|
||||
if (xmedia)
|
||||
xmedia->Release();
|
||||
|
||||
if (neteqst)
|
||||
neteqst->Release();
|
||||
|
||||
|
||||
@ -19,7 +19,6 @@
|
||||
#include "webrtc/system_wrappers/include/trace.h"
|
||||
#include "webrtc/voice_engine/channel.h"
|
||||
#include "webrtc/voice_engine/channel_manager.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
#include "webrtc/voice_engine/statistics.h"
|
||||
#include "webrtc/voice_engine/utility.h"
|
||||
#include "webrtc/voice_engine/voe_base_impl.h"
|
||||
@ -199,8 +198,6 @@ TransmitMixer::TransmitMixer(uint32_t instanceId) :
|
||||
_instanceId(instanceId),
|
||||
_mixFileWithMicrophone(false),
|
||||
_captureLevel(0),
|
||||
external_postproc_ptr_(NULL),
|
||||
external_preproc_ptr_(NULL),
|
||||
_mute(false),
|
||||
stereo_codec_(false),
|
||||
swap_stereo_channels_(false)
|
||||
@ -218,8 +215,6 @@ TransmitMixer::~TransmitMixer()
|
||||
{
|
||||
_processThreadPtr->DeRegisterModule(&_monitorModule);
|
||||
}
|
||||
DeRegisterExternalMediaProcessing(kRecordingAllChannelsMixed);
|
||||
DeRegisterExternalMediaProcessing(kRecordingPreprocessing);
|
||||
{
|
||||
rtc::CritScope cs(&_critSect);
|
||||
if (file_recorder_) {
|
||||
@ -323,17 +318,6 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
|
||||
nChannels,
|
||||
samplesPerSec);
|
||||
|
||||
{
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
if (external_preproc_ptr_) {
|
||||
external_preproc_ptr_->Process(-1, kRecordingPreprocessing,
|
||||
_audioFrame.data_,
|
||||
_audioFrame.samples_per_channel_,
|
||||
_audioFrame.sample_rate_hz_,
|
||||
_audioFrame.num_channels_ == 2);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Near-end audio processing.
|
||||
ProcessAudio(totalDelayMS, clockDrift, currentMicLevel, keyPressed);
|
||||
|
||||
@ -366,17 +350,6 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
|
||||
RecordAudioToFile(_audioFrame.sample_rate_hz_);
|
||||
}
|
||||
|
||||
{
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
if (external_postproc_ptr_) {
|
||||
external_postproc_ptr_->Process(-1, kRecordingAllChannelsMixed,
|
||||
_audioFrame.data_,
|
||||
_audioFrame.samples_per_channel_,
|
||||
_audioFrame.sample_rate_hz_,
|
||||
_audioFrame.num_channels_ == 2);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Measure audio level of speech after all processing.
|
||||
_audioLevel.ComputeLevel(_audioFrame);
|
||||
return 0;
|
||||
@ -962,43 +935,6 @@ TransmitMixer::SetMixWithMicStatus(bool mix)
|
||||
_mixFileWithMicrophone = mix;
|
||||
}
|
||||
|
||||
int TransmitMixer::RegisterExternalMediaProcessing(
|
||||
VoEMediaProcess* object,
|
||||
ProcessingTypes type) {
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
|
||||
"TransmitMixer::RegisterExternalMediaProcessing()");
|
||||
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
if (!object) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Store the callback object according to the processing type.
|
||||
if (type == kRecordingAllChannelsMixed) {
|
||||
external_postproc_ptr_ = object;
|
||||
} else if (type == kRecordingPreprocessing) {
|
||||
external_preproc_ptr_ = object;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int TransmitMixer::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
|
||||
"TransmitMixer::DeRegisterExternalMediaProcessing()");
|
||||
|
||||
rtc::CritScope cs(&_callbackCritSect);
|
||||
if (type == kRecordingAllChannelsMixed) {
|
||||
external_postproc_ptr_ = NULL;
|
||||
} else if (type == kRecordingPreprocessing) {
|
||||
external_preproc_ptr_ = NULL;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
TransmitMixer::SetMute(bool enable)
|
||||
{
|
||||
@ -1192,12 +1128,6 @@ void TransmitMixer::TypingDetection(bool keyPressed)
|
||||
}
|
||||
#endif
|
||||
|
||||
int TransmitMixer::GetMixingFrequency()
|
||||
{
|
||||
assert(_audioFrame.sample_rate_hz_ != 0);
|
||||
return _audioFrame.sample_rate_hz_;
|
||||
}
|
||||
|
||||
#if WEBRTC_VOICE_ENGINE_TYPING_DETECTION
|
||||
int TransmitMixer::TimeSinceLastTyping(int &seconds)
|
||||
{
|
||||
|
||||
@ -35,8 +35,6 @@ namespace webrtc {
|
||||
|
||||
class AudioProcessing;
|
||||
class ProcessThread;
|
||||
class VoEExternalMedia;
|
||||
class VoEMediaProcess;
|
||||
|
||||
namespace voe {
|
||||
|
||||
@ -83,13 +81,6 @@ public:
|
||||
|
||||
int32_t StopSend();
|
||||
|
||||
// VoEExternalMedia
|
||||
int RegisterExternalMediaProcessing(VoEMediaProcess* object,
|
||||
ProcessingTypes type);
|
||||
int DeRegisterExternalMediaProcessing(ProcessingTypes type);
|
||||
|
||||
int GetMixingFrequency();
|
||||
|
||||
// VoEVolumeControl
|
||||
int SetMute(bool enable);
|
||||
|
||||
@ -228,8 +219,6 @@ private:
|
||||
int _instanceId;
|
||||
bool _mixFileWithMicrophone;
|
||||
uint32_t _captureLevel;
|
||||
VoEMediaProcess* external_postproc_ptr_;
|
||||
VoEMediaProcess* external_preproc_ptr_;
|
||||
bool _mute;
|
||||
bool stereo_codec_;
|
||||
bool swap_stereo_channels_;
|
||||
|
||||
@ -1,59 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/voice_engine/transmit_mixer.h"
|
||||
|
||||
#include "webrtc/test/gtest.h"
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace voe {
|
||||
namespace {
|
||||
|
||||
class MediaCallback : public VoEMediaProcess {
|
||||
public:
|
||||
virtual void Process(int channel, ProcessingTypes type,
|
||||
int16_t audio[], size_t samples_per_channel,
|
||||
int sample_rate_hz, bool is_stereo) {
|
||||
}
|
||||
};
|
||||
|
||||
// TODO(andrew): Mock VoEMediaProcess, and verify the behavior when calling
|
||||
// PrepareDemux().
|
||||
TEST(TransmitMixerTest, RegisterExternalMediaCallback) {
|
||||
TransmitMixer* tm = NULL;
|
||||
ASSERT_EQ(0, TransmitMixer::Create(tm, 0));
|
||||
ASSERT_TRUE(tm != NULL);
|
||||
MediaCallback callback;
|
||||
EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(NULL,
|
||||
kRecordingPreprocessing));
|
||||
EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(&callback,
|
||||
kPlaybackPerChannel));
|
||||
EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(&callback,
|
||||
kPlaybackAllChannelsMixed));
|
||||
EXPECT_EQ(-1, tm->RegisterExternalMediaProcessing(&callback,
|
||||
kRecordingPerChannel));
|
||||
EXPECT_EQ(0, tm->RegisterExternalMediaProcessing(&callback,
|
||||
kRecordingAllChannelsMixed));
|
||||
EXPECT_EQ(0, tm->RegisterExternalMediaProcessing(&callback,
|
||||
kRecordingPreprocessing));
|
||||
EXPECT_EQ(-1, tm->DeRegisterExternalMediaProcessing(kPlaybackPerChannel));
|
||||
EXPECT_EQ(-1, tm->DeRegisterExternalMediaProcessing(
|
||||
kPlaybackAllChannelsMixed));
|
||||
EXPECT_EQ(-1, tm->DeRegisterExternalMediaProcessing(kRecordingPerChannel));
|
||||
EXPECT_EQ(0, tm->DeRegisterExternalMediaProcessing(
|
||||
kRecordingAllChannelsMixed));
|
||||
EXPECT_EQ(0, tm->DeRegisterExternalMediaProcessing(kRecordingPreprocessing));
|
||||
TransmitMixer::Destroy(tm);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace voe
|
||||
} // namespace webrtc
|
||||
@ -1,176 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/voice_engine/voe_external_media_impl.h"
|
||||
|
||||
#include "webrtc/audio/utility/audio_frame_operations.h"
|
||||
#include "webrtc/system_wrappers/include/trace.h"
|
||||
#include "webrtc/voice_engine/channel.h"
|
||||
#include "webrtc/voice_engine/include/voe_errors.h"
|
||||
#include "webrtc/voice_engine/output_mixer.h"
|
||||
#include "webrtc/voice_engine/transmit_mixer.h"
|
||||
#include "webrtc/voice_engine/voice_engine_impl.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VoEExternalMedia* VoEExternalMedia::GetInterface(VoiceEngine* voiceEngine) {
|
||||
if (NULL == voiceEngine) {
|
||||
return NULL;
|
||||
}
|
||||
VoiceEngineImpl* s = static_cast<VoiceEngineImpl*>(voiceEngine);
|
||||
s->AddRef();
|
||||
return s;
|
||||
}
|
||||
|
||||
VoEExternalMediaImpl::VoEExternalMediaImpl(voe::SharedData* shared)
|
||||
:
|
||||
#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
|
||||
playout_delay_ms_(0),
|
||||
#endif
|
||||
shared_(shared) {
|
||||
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(shared_->instance_id(), -1),
|
||||
"VoEExternalMediaImpl() - ctor");
|
||||
}
|
||||
|
||||
VoEExternalMediaImpl::~VoEExternalMediaImpl() {
|
||||
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(shared_->instance_id(), -1),
|
||||
"~VoEExternalMediaImpl() - dtor");
|
||||
}
|
||||
|
||||
int VoEExternalMediaImpl::RegisterExternalMediaProcessing(
|
||||
int channel,
|
||||
ProcessingTypes type,
|
||||
VoEMediaProcess& processObject) {
|
||||
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
|
||||
"RegisterExternalMediaProcessing(channel=%d, type=%d, "
|
||||
"processObject=0x%x)",
|
||||
channel, type, &processObject);
|
||||
if (!shared_->statistics().Initialized()) {
|
||||
shared_->SetLastError(VE_NOT_INITED, kTraceError);
|
||||
return -1;
|
||||
}
|
||||
switch (type) {
|
||||
case kPlaybackPerChannel:
|
||||
case kRecordingPerChannel: {
|
||||
voe::ChannelOwner ch = shared_->channel_manager().GetChannel(channel);
|
||||
voe::Channel* channelPtr = ch.channel();
|
||||
if (channelPtr == NULL) {
|
||||
shared_->SetLastError(
|
||||
VE_CHANNEL_NOT_VALID, kTraceError,
|
||||
"RegisterExternalMediaProcessing() failed to locate "
|
||||
"channel");
|
||||
return -1;
|
||||
}
|
||||
return channelPtr->RegisterExternalMediaProcessing(type, processObject);
|
||||
}
|
||||
case kPlaybackAllChannelsMixed: {
|
||||
return shared_->output_mixer()->RegisterExternalMediaProcessing(
|
||||
processObject);
|
||||
}
|
||||
case kRecordingAllChannelsMixed:
|
||||
case kRecordingPreprocessing: {
|
||||
return shared_->transmit_mixer()->RegisterExternalMediaProcessing(
|
||||
&processObject, type);
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int VoEExternalMediaImpl::DeRegisterExternalMediaProcessing(
|
||||
int channel,
|
||||
ProcessingTypes type) {
|
||||
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
|
||||
"DeRegisterExternalMediaProcessing(channel=%d)", channel);
|
||||
if (!shared_->statistics().Initialized()) {
|
||||
shared_->SetLastError(VE_NOT_INITED, kTraceError);
|
||||
return -1;
|
||||
}
|
||||
switch (type) {
|
||||
case kPlaybackPerChannel:
|
||||
case kRecordingPerChannel: {
|
||||
voe::ChannelOwner ch = shared_->channel_manager().GetChannel(channel);
|
||||
voe::Channel* channelPtr = ch.channel();
|
||||
if (channelPtr == NULL) {
|
||||
shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
|
||||
"RegisterExternalMediaProcessing() "
|
||||
"failed to locate channel");
|
||||
return -1;
|
||||
}
|
||||
return channelPtr->DeRegisterExternalMediaProcessing(type);
|
||||
}
|
||||
case kPlaybackAllChannelsMixed: {
|
||||
return shared_->output_mixer()->DeRegisterExternalMediaProcessing();
|
||||
}
|
||||
case kRecordingAllChannelsMixed:
|
||||
case kRecordingPreprocessing: {
|
||||
return shared_->transmit_mixer()->DeRegisterExternalMediaProcessing(type);
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
int VoEExternalMediaImpl::GetAudioFrame(int channel, int desired_sample_rate_hz,
|
||||
AudioFrame* frame) {
|
||||
if (!shared_->statistics().Initialized()) {
|
||||
shared_->SetLastError(VE_NOT_INITED, kTraceError);
|
||||
return -1;
|
||||
}
|
||||
voe::ChannelOwner ch = shared_->channel_manager().GetChannel(channel);
|
||||
voe::Channel* channelPtr = ch.channel();
|
||||
if (channelPtr == NULL) {
|
||||
shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
|
||||
"GetAudioFrame() failed to locate channel");
|
||||
return -1;
|
||||
}
|
||||
if (!channelPtr->ExternalMixing()) {
|
||||
shared_->SetLastError(VE_INVALID_OPERATION, kTraceError,
|
||||
"GetAudioFrame() was called on channel that is not"
|
||||
" externally mixed.");
|
||||
return -1;
|
||||
}
|
||||
if (!channelPtr->Playing()) {
|
||||
shared_->SetLastError(
|
||||
VE_INVALID_OPERATION, kTraceError,
|
||||
"GetAudioFrame() was called on channel that is not playing.");
|
||||
return -1;
|
||||
}
|
||||
if (desired_sample_rate_hz == -1) {
|
||||
shared_->SetLastError(VE_BAD_ARGUMENT, kTraceError,
|
||||
"GetAudioFrame() was called with bad sample rate.");
|
||||
return -1;
|
||||
}
|
||||
frame->sample_rate_hz_ =
|
||||
desired_sample_rate_hz == 0 ? -1 : desired_sample_rate_hz;
|
||||
auto ret = channelPtr->GetAudioFrameWithMuted(channel, frame);
|
||||
if (ret == MixerParticipant::AudioFrameInfo::kMuted) {
|
||||
AudioFrameOperations::Mute(frame);
|
||||
}
|
||||
return ret == MixerParticipant::AudioFrameInfo::kError ? -1 : 0;
|
||||
}
|
||||
|
||||
int VoEExternalMediaImpl::SetExternalMixing(int channel, bool enable) {
|
||||
WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
|
||||
VoEId(shared_->instance_id(), channel),
|
||||
"SetExternalMixing(channel=%d, enable=%d)", channel, enable);
|
||||
if (!shared_->statistics().Initialized()) {
|
||||
shared_->SetLastError(VE_NOT_INITED, kTraceError);
|
||||
return -1;
|
||||
}
|
||||
voe::ChannelOwner ch = shared_->channel_manager().GetChannel(channel);
|
||||
voe::Channel* channelPtr = ch.channel();
|
||||
if (channelPtr == NULL) {
|
||||
shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
|
||||
"SetExternalMixing() failed to locate channel");
|
||||
return -1;
|
||||
}
|
||||
return channelPtr->SetExternalMixing(enable);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
@ -1,45 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
|
||||
#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
|
||||
|
||||
#include "webrtc/voice_engine/include/voe_external_media.h"
|
||||
|
||||
#include "webrtc/voice_engine/shared_data.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VoEExternalMediaImpl : public VoEExternalMedia {
|
||||
public:
|
||||
int RegisterExternalMediaProcessing(int channel,
|
||||
ProcessingTypes type,
|
||||
VoEMediaProcess& processObject) override;
|
||||
|
||||
int DeRegisterExternalMediaProcessing(int channel,
|
||||
ProcessingTypes type) override;
|
||||
|
||||
int GetAudioFrame(int channel,
|
||||
int desired_sample_rate_hz,
|
||||
AudioFrame* frame) override;
|
||||
|
||||
int SetExternalMixing(int channel, bool enable) override;
|
||||
|
||||
protected:
|
||||
VoEExternalMediaImpl(voe::SharedData* shared);
|
||||
~VoEExternalMediaImpl() override;
|
||||
|
||||
private:
|
||||
voe::SharedData* shared_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
|
||||
@ -18,7 +18,6 @@
|
||||
#include "webrtc/voice_engine/voe_base_impl.h"
|
||||
#include "webrtc/voice_engine/voe_audio_processing_impl.h"
|
||||
#include "webrtc/voice_engine/voe_codec_impl.h"
|
||||
#include "webrtc/voice_engine/voe_external_media_impl.h"
|
||||
#include "webrtc/voice_engine/voe_file_impl.h"
|
||||
#include "webrtc/voice_engine/voe_hardware_impl.h"
|
||||
#include "webrtc/voice_engine/voe_neteq_stats_impl.h"
|
||||
@ -36,7 +35,6 @@ class VoiceEngineImpl : public voe::SharedData, // Must be the first base class
|
||||
public VoiceEngine,
|
||||
public VoEAudioProcessingImpl,
|
||||
public VoECodecImpl,
|
||||
public VoEExternalMediaImpl,
|
||||
public VoEFileImpl,
|
||||
public VoEHardwareImpl,
|
||||
public VoENetEqStatsImpl,
|
||||
@ -50,7 +48,6 @@ class VoiceEngineImpl : public voe::SharedData, // Must be the first base class
|
||||
: SharedData(),
|
||||
VoEAudioProcessingImpl(this),
|
||||
VoECodecImpl(this),
|
||||
VoEExternalMediaImpl(this),
|
||||
VoEFileImpl(this),
|
||||
VoEHardwareImpl(this),
|
||||
VoENetEqStatsImpl(this),
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user