Starting using propagated field trials in the AudioProcessingImpl

Bug: webrtc:369904700
Change-Id: Ibc9a2e5349f0d1ba7a7a7ebdd57dfddaf092a1af
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/368520
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#43564}
This commit is contained in:
Danil Chapovalov 2024-12-13 14:25:40 +01:00 committed by WebRTC LUCI CQ
parent 3e98919a6a
commit 4c73d1a326
5 changed files with 30 additions and 33 deletions

View File

@ -22,11 +22,9 @@
namespace webrtc {
absl::Nullable<scoped_refptr<AudioProcessing>>
BuiltinAudioProcessingBuilder::Build(const Environment& /*env*/) {
// TODO: bugs.webrtc.org/369904700 - Pass `env` when AudioProcessingImpl gets
// constructor that accepts it.
BuiltinAudioProcessingBuilder::Build(const Environment& env) {
return make_ref_counted<AudioProcessingImpl>(
config_, std::move(capture_post_processing_),
env, config_, std::move(capture_post_processing_),
std::move(render_pre_processing_), std::move(echo_control_factory_),
std::move(echo_detector_), std::move(capture_analyzer_));
}

View File

@ -152,6 +152,7 @@ rtc_library("audio_processing") {
":high_pass_filter",
":rms_level",
"../../api:array_view",
"../../api:field_trials_view",
"../../api:function_view",
"../../api:make_ref_counted",
"../../api/audio:aec3_config",
@ -159,9 +160,13 @@ rtc_library("audio_processing") {
"../../api/audio:audio_processing",
"../../api/audio:audio_processing_statistics",
"../../api/audio:echo_control",
"../../api/environment",
"../../api/task_queue",
"../../audio/utility:audio_frame_operations",
"../../common_audio",
"../../common_audio:common_audio_c",
"../../common_audio:fir_filter",
"../../common_audio:fir_filter_factory",
"../../common_audio/third_party/ooura:fft_size_256",
"../../rtc_base:checks",
"../../rtc_base:event_tracer",
@ -172,7 +177,6 @@ rtc_library("audio_processing") {
"../../rtc_base:sanitizer",
"../../rtc_base:swap_queue",
"../../rtc_base:timeutils",
"../../rtc_base/experiments:field_trial_parser",
"../../rtc_base/synchronization:mutex",
"../../rtc_base/system:rtc_export",
"../../system_wrappers",
@ -180,7 +184,7 @@ rtc_library("audio_processing") {
"../../system_wrappers:field_trial",
"../../system_wrappers:metrics",
"aec3",
"aec_dump:aec_dump",
"aec_dump",
"aecm:aecm_core",
"agc",
"agc:gain_control_interface",
@ -190,17 +194,9 @@ rtc_library("audio_processing") {
"ns",
"vad",
"//third_party/abseil-cpp/absl/base:nullability",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/strings:string_view",
]
deps += [
"../../common_audio",
"../../common_audio:fir_filter",
"../../common_audio:fir_filter_factory",
"../../system_wrappers",
]
if (rtc_enable_protobuf) {
deps += [ "aec_dump:aec_dump_impl" ]
} else {

View File

@ -20,10 +20,11 @@
#include <utility>
#include "absl/base/nullability.h"
#include "absl/strings/match.h"
#include "absl/strings/string_view.h"
#include "api/array_view.h"
#include "api/audio/audio_frame.h"
#include "api/environment/environment.h"
#include "api/field_trials_view.h"
#include "api/task_queue/task_queue_base.h"
#include "common_audio/audio_converter.h"
#include "common_audio/include/audio_util.h"
@ -32,12 +33,10 @@
#include "modules/audio_processing/include/audio_frame_view.h"
#include "modules/audio_processing/logging/apm_data_dumper.h"
#include "rtc_base/checks.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/denormal_disabler.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
#define RETURN_ON_ERR(expr) \
@ -58,15 +57,15 @@ bool SampleRateSupportsMultiBand(int sample_rate_hz) {
}
// Checks whether the high-pass filter should be done in the full-band.
bool EnforceSplitBandHpf() {
return field_trial::IsEnabled("WebRTC-FullBandHpfKillSwitch");
bool EnforceSplitBandHpf(const FieldTrialsView& field_trials) {
return field_trials.IsEnabled("WebRTC-FullBandHpfKillSwitch");
}
// Checks whether AEC3 should be allowed to decide what the default
// configuration should be based on the render and capture channel configuration
// at hand.
bool UseSetupSpecificDefaultAec3Congfig() {
return !field_trial::IsEnabled(
bool UseSetupSpecificDefaultAec3Congfig(const FieldTrialsView& field_trials) {
return !field_trials.IsEnabled(
"WebRTC-Aec3SetupSpecificDefaultConfigDefaultsKillSwitch");
}
@ -102,8 +101,8 @@ GainControl::Mode Agc1ConfigModeToInterfaceMode(
RTC_CHECK_NOTREACHED();
}
bool MinimizeProcessingForUnusedOutput() {
return !field_trial::IsEnabled("WebRTC-MutedStateKillSwitch");
bool MinimizeProcessingForUnusedOutput(const FieldTrialsView& field_trials) {
return !field_trials.IsEnabled("WebRTC-MutedStateKillSwitch");
}
// Maximum lengths that frame of samples being passed from the render side to
@ -413,8 +412,9 @@ bool AudioProcessingImpl::SubmoduleStates::HighPassFilteringRequired() const {
noise_suppressor_enabled_;
}
AudioProcessingImpl::AudioProcessingImpl()
: AudioProcessingImpl(/*config=*/{},
AudioProcessingImpl::AudioProcessingImpl(const Environment& env)
: AudioProcessingImpl(env,
/*config=*/{},
/*capture_post_processor=*/nullptr,
/*render_pre_processor=*/nullptr,
/*echo_control_factory=*/nullptr,
@ -424,6 +424,7 @@ AudioProcessingImpl::AudioProcessingImpl()
std::atomic<int> AudioProcessingImpl::instance_count_(0);
AudioProcessingImpl::AudioProcessingImpl(
const Environment& env,
const AudioProcessing::Config& config,
std::unique_ptr<CustomProcessing> capture_post_processor,
std::unique_ptr<CustomProcessing> render_pre_processor,
@ -432,7 +433,7 @@ AudioProcessingImpl::AudioProcessingImpl(
std::unique_ptr<CustomAudioAnalyzer> capture_analyzer)
: data_dumper_(new ApmDataDumper(instance_count_.fetch_add(1) + 1)),
use_setup_specific_default_aec3_config_(
UseSetupSpecificDefaultAec3Congfig()),
UseSetupSpecificDefaultAec3Congfig(env.field_trials())),
capture_runtime_settings_(RuntimeSettingQueueSize()),
render_runtime_settings_(RuntimeSettingQueueSize()),
capture_runtime_settings_enqueuer_(&capture_runtime_settings_),
@ -446,12 +447,12 @@ AudioProcessingImpl::AudioProcessingImpl(
std::move(render_pre_processor),
std::move(echo_detector),
std::move(capture_analyzer)),
constants_(!field_trial::IsEnabled(
constants_(!env.field_trials().IsEnabled(
"WebRTC-ApmExperimentalMultiChannelRenderKillSwitch"),
!field_trial::IsEnabled(
!env.field_trials().IsEnabled(
"WebRTC-ApmExperimentalMultiChannelCaptureKillSwitch"),
EnforceSplitBandHpf(),
MinimizeProcessingForUnusedOutput()),
EnforceSplitBandHpf(env.field_trials()),
MinimizeProcessingForUnusedOutput(env.field_trials())),
capture_(),
capture_nonlocked_(),
applied_input_volume_stats_reporter_(

View File

@ -25,6 +25,7 @@
#include "api/array_view.h"
#include "api/audio/audio_processing.h"
#include "api/audio/audio_processing_statistics.h"
#include "api/environment/environment.h"
#include "api/function_view.h"
#include "api/task_queue/task_queue_base.h"
#include "modules/audio_processing/aec3/echo_canceller3.h"
@ -60,8 +61,9 @@ class AudioProcessingImpl : public AudioProcessing {
public:
// Methods forcing APM to run in a single-threaded manner.
// Acquires both the render and capture locks.
AudioProcessingImpl();
AudioProcessingImpl(const AudioProcessing::Config& config,
explicit AudioProcessingImpl(const Environment& env);
AudioProcessingImpl(const Environment& env,
const AudioProcessing::Config& config,
std::unique_ptr<CustomProcessing> capture_post_processor,
std::unique_ptr<CustomProcessing> render_pre_processor,
std::unique_ptr<EchoControlFactory> echo_control_factory,

View File

@ -40,7 +40,7 @@ using ::testing::NotNull;
class MockInitialize : public AudioProcessingImpl {
public:
MockInitialize() : AudioProcessingImpl() {}
MockInitialize() : AudioProcessingImpl(CreateEnvironment()) {}
MOCK_METHOD(void, InitializeLocked, (), (override));
void RealInitializeLocked() {