Updating AsyncAudioProcessing API, part 1.

Add an API to pass AudioFrameProcessor as a unique_ptr.

Bug: webrtc:15111
Change-Id: I4cefa35399c05c6e81c496e0b0387b95809bd8f8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/301984
Reviewed-by: Olga Sharonova <olka@webrtc.org>
Reviewed-by: Markus Handell <handellm@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Peter Hanspers <peterhanspers@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#40187}
This commit is contained in:
Peter Hanspers 2023-05-30 13:43:41 +02:00 committed by WebRTC LUCI CQ
parent 56d69e2c67
commit a9bba047b7
10 changed files with 177 additions and 21 deletions

View File

@ -39,6 +39,7 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
AudioFrameProcessor* audio_frame_processor,
std::unique_ptr<AudioFrameProcessor> owned_audio_frame_processor,
std::unique_ptr<FieldTrialsView> field_trials) {
if (!field_trials) {
field_trials = std::make_unique<webrtc::FieldTrialBasedConfig>();
@ -64,7 +65,12 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
media_dependencies.adm = std::move(default_adm);
media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory);
media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory);
media_dependencies.audio_frame_processor = audio_frame_processor;
if (audio_frame_processor) {
media_dependencies.audio_frame_processor = audio_frame_processor;
} else if (owned_audio_frame_processor) {
media_dependencies.owned_audio_frame_processor =
std::move(owned_audio_frame_processor);
}
if (audio_processing) {
media_dependencies.audio_processing = std::move(audio_processing);
} else {
@ -80,4 +86,44 @@ rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
return CreateModularPeerConnectionFactory(std::move(dependencies));
}
rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr<AudioDeviceModule> default_adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
std::unique_ptr<VideoEncoderFactory> video_encoder_factory,
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
AudioFrameProcessor* audio_frame_processor) {
return CreatePeerConnectionFactory(
network_thread, worker_thread, signaling_thread, default_adm,
audio_encoder_factory, audio_decoder_factory,
std::move(video_encoder_factory), std::move(video_decoder_factory),
audio_mixer, audio_processing, audio_frame_processor, nullptr, nullptr);
}
rtc::scoped_refptr<PeerConnectionFactoryInterface> CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr<AudioDeviceModule> default_adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
std::unique_ptr<VideoEncoderFactory> video_encoder_factory,
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioFrameProcessor> owned_audio_frame_processor,
std::unique_ptr<FieldTrialsView> field_trials) {
return CreatePeerConnectionFactory(
network_thread, worker_thread, signaling_thread, default_adm,
audio_encoder_factory, audio_decoder_factory,
std::move(video_encoder_factory), std::move(video_decoder_factory),
audio_mixer, audio_processing, nullptr,
std::move(owned_audio_frame_processor), std::move(field_trials));
}
} // namespace webrtc

View File

@ -37,6 +37,9 @@ class AudioProcessing;
// Create a new instance of PeerConnectionFactoryInterface with optional video
// codec factories. These video factories represents all video codecs, i.e. no
// extra internal video codecs will be added.
// TODO(bugs.webrtc.org/15111):
// Remove the method with the raw AudioFrameProcessor pointer in the
// follow-up.
RTC_EXPORT rtc::scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory(
rtc::Thread* network_thread,
@ -49,7 +52,21 @@ CreatePeerConnectionFactory(
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
AudioFrameProcessor* audio_frame_processor = nullptr,
AudioFrameProcessor* audio_frame_processor = nullptr);
RTC_EXPORT rtc::scoped_refptr<PeerConnectionFactoryInterface>
CreatePeerConnectionFactory(
rtc::Thread* network_thread,
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
rtc::scoped_refptr<AudioDeviceModule> default_adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
std::unique_ptr<VideoEncoderFactory> video_encoder_factory,
std::unique_ptr<VideoDecoderFactory> video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer,
rtc::scoped_refptr<AudioProcessing> audio_processing,
std::unique_ptr<AudioFrameProcessor> owned_audio_frame_processor,
std::unique_ptr<FieldTrialsView> field_trials = nullptr);
} // namespace webrtc

View File

@ -37,7 +37,7 @@ TEST(NullWebRtcVideoEngineTest, CheckInterface) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr,
webrtc::AudioProcessingBuilder().Create(), nullptr, trials);
webrtc::AudioProcessingBuilder().Create(), nullptr, nullptr, trials);
CompositeMediaEngine engine(std::move(audio_engine),
std::make_unique<NullWebRtcVideoEngine>());

View File

@ -46,7 +46,8 @@ std::unique_ptr<MediaEngineInterface> CreateMediaEngine(
std::move(dependencies.audio_decoder_factory),
std::move(dependencies.audio_mixer),
std::move(dependencies.audio_processing),
dependencies.audio_frame_processor, trials);
dependencies.audio_frame_processor,
std::move(dependencies.owned_audio_frame_processor), trials);
#ifdef HAVE_WEBRTC_VIDEO
auto video_engine = std::make_unique<WebRtcVideoEngine>(
std::move(dependencies.video_encoder_factory),

View File

@ -49,7 +49,10 @@ struct MediaEngineDependencies {
rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory;
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer;
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing;
// TODO(bugs.webrtc.org/15111):
// Remove the raw AudioFrameProcessor pointer in the follow-up.
webrtc::AudioFrameProcessor* audio_frame_processor = nullptr;
std::unique_ptr<webrtc::AudioFrameProcessor> owned_audio_frame_processor;
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory;
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory;

View File

@ -313,7 +313,10 @@ WebRtcVoiceEngine::WebRtcVoiceEngine(
const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing,
// TODO(bugs.webrtc.org/15111):
// Remove the raw AudioFrameProcessor pointer in the follow-up.
webrtc::AudioFrameProcessor* audio_frame_processor,
std::unique_ptr<webrtc::AudioFrameProcessor> owned_audio_frame_processor,
const webrtc::FieldTrialsView& trials)
: task_queue_factory_(task_queue_factory),
adm_(adm),
@ -322,6 +325,7 @@ WebRtcVoiceEngine::WebRtcVoiceEngine(
audio_mixer_(audio_mixer),
apm_(audio_processing),
audio_frame_processor_(audio_frame_processor),
owned_audio_frame_processor_(std::move(owned_audio_frame_processor)),
minimized_remsampling_on_mobile_trial_enabled_(
IsEnabled(trials, "WebRTC-Audio-MinimizeResamplingOnMobile")) {
RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::WebRtcVoiceEngine";
@ -387,10 +391,15 @@ void WebRtcVoiceEngine::Init() {
}
config.audio_processing = apm_;
config.audio_device_module = adm_;
if (audio_frame_processor_)
if (audio_frame_processor_) {
config.async_audio_processing_factory =
rtc::make_ref_counted<webrtc::AsyncAudioProcessing::Factory>(
*audio_frame_processor_, *task_queue_factory_);
} else if (owned_audio_frame_processor_) {
config.async_audio_processing_factory =
rtc::make_ref_counted<webrtc::AsyncAudioProcessing::Factory>(
std::move(owned_audio_frame_processor_), *task_queue_factory_);
}
audio_state_ = webrtc::AudioState::Create(config);
}

View File

@ -89,7 +89,10 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface {
const rtc::scoped_refptr<webrtc::AudioDecoderFactory>& decoder_factory,
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer,
rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing,
// TODO(bugs.webrtc.org/15111):
// Remove the raw AudioFrameProcessor pointer in the follow-up.
webrtc::AudioFrameProcessor* audio_frame_processor,
std::unique_ptr<webrtc::AudioFrameProcessor> owned_audio_frame_processor,
const webrtc::FieldTrialsView& trials);
WebRtcVoiceEngine() = delete;
@ -156,7 +159,10 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface {
// The audio processing module.
rtc::scoped_refptr<webrtc::AudioProcessing> apm_;
// Asynchronous audio processing.
// TODO(bugs.webrtc.org/15111):
// Remove the raw AudioFrameProcessor pointer in the follow-up.
webrtc::AudioFrameProcessor* const audio_frame_processor_;
std::unique_ptr<webrtc::AudioFrameProcessor> owned_audio_frame_processor_;
// The primary instance of WebRtc VoiceEngine.
rtc::scoped_refptr<webrtc::AudioState> audio_state_;
std::vector<AudioCodec> send_codecs_;

View File

@ -167,7 +167,7 @@ TEST(WebRtcVoiceEngineTestStubLibrary, StartupShutdown) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
nullptr, trials);
nullptr, nullptr, trials);
engine.Init();
}
}
@ -213,7 +213,7 @@ class WebRtcVoiceEngineTestFake : public ::testing::TestWithParam<bool> {
auto decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
engine_.reset(new cricket::WebRtcVoiceEngine(
task_queue_factory_.get(), adm_.get(), encoder_factory, decoder_factory,
nullptr, apm_, nullptr, field_trials_));
nullptr, apm_, nullptr, nullptr, field_trials_));
engine_->Init();
send_parameters_.codecs.push_back(kPcmuCodec);
recv_parameters_.codecs.push_back(kPcmuCodec);
@ -3641,7 +3641,7 @@ TEST(WebRtcVoiceEngineTest, StartupShutdown) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
nullptr, field_trials);
nullptr, nullptr, field_trials);
engine.Init();
webrtc::RtcEventLogNull event_log;
webrtc::Call::Config call_config(&event_log);
@ -3673,7 +3673,7 @@ TEST(WebRtcVoiceEngineTest, StartupShutdownWithExternalADM) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
nullptr, field_trials);
nullptr, nullptr, field_trials);
engine.Init();
webrtc::RtcEventLogNull event_log;
webrtc::Call::Config call_config(&event_log);
@ -3709,7 +3709,7 @@ TEST(WebRtcVoiceEngineTest, HasCorrectPayloadTypeMapping) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
nullptr, field_trials);
nullptr, nullptr, field_trials);
engine.Init();
for (const cricket::AudioCodec& codec : engine.send_codecs()) {
auto is_codec = [&codec](const char* name, int clockrate = 0) {
@ -3759,7 +3759,7 @@ TEST(WebRtcVoiceEngineTest, Has32Channels) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::MockAudioDecoderFactory::CreateUnusedFactory(), nullptr, apm,
nullptr, field_trials);
nullptr, nullptr, field_trials);
engine.Init();
webrtc::RtcEventLogNull event_log;
webrtc::Call::Config call_config(&event_log);
@ -3810,7 +3810,7 @@ TEST(WebRtcVoiceEngineTest, SetRecvCodecs) {
task_queue_factory.get(), adm.get(),
webrtc::MockAudioEncoderFactory::CreateUnusedFactory(),
webrtc::CreateBuiltinAudioDecoderFactory(), nullptr, apm, nullptr,
field_trials);
nullptr, field_trials);
engine.Init();
webrtc::RtcEventLogNull event_log;
webrtc::Call::Config call_config(&event_log);
@ -3838,7 +3838,8 @@ TEST(WebRtcVoiceEngineTest, SetRtpSendParametersMaxBitrate) {
cricket::WebRtcVoiceEngine engine(task_queue_factory.get(), adm.get(),
webrtc::CreateBuiltinAudioEncoderFactory(),
webrtc::CreateBuiltinAudioDecoderFactory(),
nullptr, nullptr, nullptr, field_trials);
nullptr, nullptr, nullptr, nullptr,
field_trials);
engine.Init();
webrtc::RtcEventLogNull event_log;
webrtc::Call::Config call_config(&event_log);
@ -3914,7 +3915,7 @@ TEST(WebRtcVoiceEngineTest, CollectRecvCodecs) {
webrtc::FieldTrialBasedConfig field_trials;
cricket::WebRtcVoiceEngine engine(
task_queue_factory.get(), adm.get(), unused_encoder_factory,
mock_decoder_factory, nullptr, apm, nullptr, field_trials);
mock_decoder_factory, nullptr, apm, nullptr, nullptr, field_trials);
engine.Init();
auto codecs = engine.recv_codecs();
EXPECT_EQ(11u, codecs.size());

View File

@ -24,16 +24,33 @@ AsyncAudioProcessing::Factory::Factory(AudioFrameProcessor& frame_processor,
: frame_processor_(frame_processor),
task_queue_factory_(task_queue_factory) {}
AsyncAudioProcessing::Factory::Factory(
std::unique_ptr<AudioFrameProcessor> frame_processor,
TaskQueueFactory& task_queue_factory)
: frame_processor_(*frame_processor),
owned_frame_processor_(std::move(frame_processor)),
task_queue_factory_(task_queue_factory) {}
std::unique_ptr<AsyncAudioProcessing>
AsyncAudioProcessing::Factory::CreateAsyncAudioProcessing(
AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback) {
return std::make_unique<AsyncAudioProcessing>(
frame_processor_, task_queue_factory_,
std::move(on_frame_processed_callback));
if (owned_frame_processor_) {
return std::make_unique<AsyncAudioProcessing>(
std::move(owned_frame_processor_), task_queue_factory_,
std::move(on_frame_processed_callback));
} else {
return std::make_unique<AsyncAudioProcessing>(
frame_processor_, task_queue_factory_,
std::move(on_frame_processed_callback));
}
}
AsyncAudioProcessing::~AsyncAudioProcessing() {
frame_processor_.SetSink(nullptr);
if (owned_frame_processor_) {
owned_frame_processor_->SetSink(nullptr);
} else {
frame_processor_.SetSink(nullptr);
}
}
AsyncAudioProcessing::AsyncAudioProcessing(
@ -52,10 +69,33 @@ AsyncAudioProcessing::AsyncAudioProcessing(
});
}
void AsyncAudioProcessing::Process(std::unique_ptr<AudioFrame> frame) {
task_queue_.PostTask([this, frame = std::move(frame)]() mutable {
frame_processor_.Process(std::move(frame));
AsyncAudioProcessing::AsyncAudioProcessing(
std::unique_ptr<AudioFrameProcessor> frame_processor,
TaskQueueFactory& task_queue_factory,
AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback)
: on_frame_processed_callback_(std::move(on_frame_processed_callback)),
frame_processor_(*frame_processor),
owned_frame_processor_(std::move(frame_processor)),
task_queue_(task_queue_factory.CreateTaskQueue(
"AsyncAudioProcessing",
TaskQueueFactory::Priority::NORMAL)) {
owned_frame_processor_->SetSink([this](std::unique_ptr<AudioFrame> frame) {
task_queue_.PostTask([this, frame = std::move(frame)]() mutable {
on_frame_processed_callback_(std::move(frame));
});
});
}
void AsyncAudioProcessing::Process(std::unique_ptr<AudioFrame> frame) {
if (owned_frame_processor_) {
task_queue_.PostTask([this, frame = std::move(frame)]() mutable {
owned_frame_processor_->Process(std::move(frame));
});
} else {
task_queue_.PostTask([this, frame = std::move(frame)]() mutable {
frame_processor_.Process(std::move(frame));
});
}
}
} // namespace webrtc

View File

@ -38,12 +38,23 @@ class AsyncAudioProcessing final {
~Factory();
Factory(AudioFrameProcessor& frame_processor,
TaskQueueFactory& task_queue_factory);
Factory(std::unique_ptr<AudioFrameProcessor> frame_processor,
TaskQueueFactory& task_queue_factory);
std::unique_ptr<AsyncAudioProcessing> CreateAsyncAudioProcessing(
AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback);
private:
// TODO(bugs.webrtc.org/15111):
// Remove 'AudioFrameProcessor& frame_processor_' in favour of
// std::unique_ptr in the follow-up.
// While transitioning this API from using AudioFrameProcessor& to using
// std::unique_ptr<AudioFrameProcessor>, we have two member variable both
// referencing the same object. Throughout the lifetime of the Factory
// only one of the variables is used, depending on which constructor was
// called.
AudioFrameProcessor& frame_processor_;
std::unique_ptr<AudioFrameProcessor> owned_frame_processor_;
TaskQueueFactory& task_queue_factory_;
};
@ -57,17 +68,39 @@ class AsyncAudioProcessing final {
// into `on_frame_processed_callback`, which is posted back onto
// `task_queue_`. `task_queue_` is created using the provided
// `task_queue_factory`.
// TODO(bugs.webrtc.org/15111):
// Remove this method in favour of the method taking the
// unique_ptr<AudioFrameProcessor> in the follow-up.
AsyncAudioProcessing(
AudioFrameProcessor& frame_processor,
TaskQueueFactory& task_queue_factory,
AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback);
// Creates AsyncAudioProcessing which will pass audio frames to
// `frame_processor` on `task_queue_` and reply with processed frames passed
// into `on_frame_processed_callback`, which is posted back onto
// `task_queue_`. `task_queue_` is created using the provided
// `task_queue_factory`.
AsyncAudioProcessing(
std::unique_ptr<AudioFrameProcessor> frame_processor,
TaskQueueFactory& task_queue_factory,
AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback);
// Accepts `frame` for asynchronous processing. Thread-safe.
void Process(std::unique_ptr<AudioFrame> frame);
private:
AudioFrameProcessor::OnAudioFrameCallback on_frame_processed_callback_;
// TODO(bugs.webrtc.org/15111):
// Remove 'AudioFrameProcessor& frame_processor_' in favour of
// std::unique_ptr in the follow-up.
// While transitioning this API from using AudioFrameProcessor& to using
// std::unique_ptr<AudioFrameProcessor>, we have two member variable both
// referencing the same object. Throughout the lifetime of the Factory
// only one of the variables is used, depending on which constructor was
// called.
AudioFrameProcessor& frame_processor_;
std::unique_ptr<AudioFrameProcessor> owned_frame_processor_;
rtc::TaskQueue task_queue_;
};