diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn index e5958047b0..64a9ddcb70 100644 --- a/modules/audio_device/BUILD.gn +++ b/modules/audio_device/BUILD.gn @@ -178,6 +178,7 @@ rtc_source_set("audio_device_impl") { "../../rtc_base/system:file_wrapper", "../../rtc_base/task_utils:repeating_task", "../../system_wrappers", + "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "../utility", "//third_party/abseil-cpp/absl/memory", diff --git a/modules/audio_device/android/audio_track_jni.cc b/modules/audio_device/android/audio_track_jni.cc index ef419f980c..776f0cfd70 100644 --- a/modules/audio_device/android/audio_track_jni.cc +++ b/modules/audio_device/android/audio_track_jni.cc @@ -18,6 +18,7 @@ #include "rtc_base/format_macros.h" #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -26,7 +27,7 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack( NativeRegistration* native_reg, std::unique_ptr audio_track) : audio_track_(std::move(audio_track)), - init_playout_(native_reg->GetMethodId("initPlayout", "(II)Z")), + init_playout_(native_reg->GetMethodId("initPlayout", "(IID)Z")), start_playout_(native_reg->GetMethodId("startPlayout", "()Z")), stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")), set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")), @@ -37,7 +38,15 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack( AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {} bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) { - return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels); + double buffer_size_factor = + strtod(webrtc::field_trial::FindFullName( + "WebRTC-AudioDevicePlayoutBufferSizeFactor") + .c_str(), + nullptr); + if (buffer_size_factor == 0) + buffer_size_factor = 1.0; + return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels, + buffer_size_factor); } bool AudioTrackJni::JavaAudioTrack::StartPlayout() { diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java index 925d6bc0de..3023c99fa2 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -215,9 +215,11 @@ public class WebRtcAudioTrack { } } - private boolean initPlayout(int sampleRate, int channels) { + private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) { threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + Logging.d(TAG, + "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + + ", bufferSizeFactor=" + bufferSizeFactor + ")"); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); @@ -230,11 +232,11 @@ public class WebRtcAudioTrack { // Get the minimum buffer size required for the successful creation of an // AudioTrack object to be created in the MODE_STREAM mode. // Note that this size doesn't guarantee a smooth playback under load. - // TODO(henrika): should we extend the buffer size to avoid glitches? final int channelConfig = channelCountToConfiguration(channels); - final int minBufferSizeInBytes = - AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT); - Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes); + final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig, + AudioFormat.ENCODING_PCM_16BIT) + * bufferSizeFactor); + Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes); // For the streaming mode, data must be written to the audio sink in // chunks of size (given by byteBuffer.capacity()) less than or equal // to the total buffer size |minBufferSizeInBytes|. But, we have seen diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 862f3a8cbf..5b0a7fe8ee 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -1108,6 +1108,7 @@ if (current_os == "linux" || is_android) { "../../modules/audio_device:audio_device_buffer", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../system_wrappers:field_trial", "../../system_wrappers:metrics", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index a00aec01ed..5956f3c0fc 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -182,9 +182,11 @@ class WebRtcAudioTrack { } @CalledByNative - private boolean initPlayout(int sampleRate, int channels) { + private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) { threadChecker.checkIsOnValidThread(); - Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + Logging.d(TAG, + "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + + ", bufferSizeFactor=" + bufferSizeFactor + ")"); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); @@ -197,11 +199,11 @@ class WebRtcAudioTrack { // Get the minimum buffer size required for the successful creation of an // AudioTrack object to be created in the MODE_STREAM mode. // Note that this size doesn't guarantee a smooth playback under load. - // TODO(henrika): should we extend the buffer size to avoid glitches? final int channelConfig = channelCountToConfiguration(channels); - final int minBufferSizeInBytes = - AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT); - Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes); + final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig, + AudioFormat.ENCODING_PCM_16BIT) + * bufferSizeFactor); + Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes); // For the streaming mode, data must be written to the audio sink in // chunks of size (given by byteBuffer.capacity()) less than or equal // to the total buffer size |minBufferSizeInBytes|. But, we have seen diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc index 12e9fbf834..8f0a041711 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -19,6 +19,7 @@ #include "rtc_base/platform_thread.h" #include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -81,9 +82,16 @@ int32_t AudioTrackJni::InitPlayout() { return 0; } RTC_DCHECK(!playing_); + double buffer_size_factor = + strtod(webrtc::field_trial::FindFullName( + "WebRTC-AudioDevicePlayoutBufferSizeFactor") + .c_str(), + nullptr); + if (buffer_size_factor == 0) + buffer_size_factor = 1.0; if (!Java_WebRtcAudioTrack_initPlayout( env_, j_audio_track_, audio_parameters_.sample_rate(), - static_cast(audio_parameters_.channels()))) { + static_cast(audio_parameters_.channels()), buffer_size_factor)) { RTC_LOG(LS_ERROR) << "InitPlayout failed"; return -1; }