diff --git a/modules/audio_device/android/audio_track_jni.cc b/modules/audio_device/android/audio_track_jni.cc index 6dd75ddd67..daaeeca1ea 100644 --- a/modules/audio_device/android/audio_track_jni.cc +++ b/modules/audio_device/android/audio_track_jni.cc @@ -34,7 +34,9 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack( set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")), get_stream_max_volume_( native_reg->GetMethodId("getStreamMaxVolume", "()I")), - get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {} + get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")), + get_buffer_size_in_frames_( + native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {} AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {} @@ -46,15 +48,26 @@ bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) { nullptr); if (buffer_size_factor == 0) buffer_size_factor = 1.0; - int buffer_size_bytes = audio_track_->CallIntMethod( + int requested_buffer_size_bytes = audio_track_->CallIntMethod( init_playout_, sample_rate, channels, buffer_size_factor); - if (buffer_size_bytes != -1) { + // Update UMA histograms for both the requested and actual buffer size. + if (requested_buffer_size_bytes >= 0) { // To avoid division by zero, we assume the sample rate is 48k if an invalid // value is found. sample_rate = sample_rate <= 0 ? 48000 : sample_rate; - const int buffer_size_ms = (buffer_size_bytes * 1000) / (2 * sample_rate); + // This calculation assumes that audio is mono. + const int requested_buffer_size_ms = + (requested_buffer_size_bytes * 1000) / (2 * sample_rate); RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs", - buffer_size_ms, 0, 1000, 100); + requested_buffer_size_ms, 0, 1000, 100); + int actual_buffer_size_frames = + audio_track_->CallIntMethod(get_buffer_size_in_frames_); + if (actual_buffer_size_frames >= 0) { + const int actual_buffer_size_ms = + actual_buffer_size_frames * 1000 / sample_rate; + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs", + actual_buffer_size_ms, 0, 1000, 100); + } return true; } return false; diff --git a/modules/audio_device/android/audio_track_jni.h b/modules/audio_device/android/audio_track_jni.h index 6303d754c8..529a9013e8 100644 --- a/modules/audio_device/android/audio_track_jni.h +++ b/modules/audio_device/android/audio_track_jni.h @@ -62,6 +62,7 @@ class AudioTrackJni { jmethodID set_stream_volume_; jmethodID get_stream_max_volume_; jmethodID get_stream_volume_; + jmethodID get_buffer_size_in_frames_; }; explicit AudioTrackJni(AudioManager* audio_manager); diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java index 1973657450..7e6ad5acf4 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -433,6 +433,13 @@ public class WebRtcAudioTrack { } } + private int getBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= 23) { + return audioTrack.getBufferSizeInFrames(); + } + return -1; + } + private void logBufferCapacityInFrames() { if (Build.VERSION.SDK_INT >= 24) { Logging.d(TAG, diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 07debc3aae..94eb2a4357 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -423,6 +423,14 @@ class WebRtcAudioTrack { } } + @CalledByNative + private int getBufferSizeInFrames() { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return audioTrack.getBufferSizeInFrames(); + } + return -1; + } + private void logBufferCapacityInFrames() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { Logging.d(TAG, diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc index 8f0a041711..d5b880b1b0 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -20,6 +20,7 @@ #include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h" #include "sdk/android/src/jni/jni_helpers.h" #include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { @@ -89,12 +90,33 @@ int32_t AudioTrackJni::InitPlayout() { nullptr); if (buffer_size_factor == 0) buffer_size_factor = 1.0; - if (!Java_WebRtcAudioTrack_initPlayout( - env_, j_audio_track_, audio_parameters_.sample_rate(), - static_cast(audio_parameters_.channels()), buffer_size_factor)) { + int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout( + env_, j_audio_track_, audio_parameters_.sample_rate(), + static_cast(audio_parameters_.channels()), buffer_size_factor); + if (requested_buffer_size_bytes < 0) { RTC_LOG(LS_ERROR) << "InitPlayout failed"; return -1; } + // Update UMA histograms for both the requested and actual buffer size. + // To avoid division by zero, we assume the sample rate is 48k if an invalid + // value is found. + const int sample_rate = audio_parameters_.sample_rate() <= 0 + ? 48000 + : audio_parameters_.sample_rate(); + // This calculation assumes that audio is mono. + const int requested_buffer_size_ms = + (requested_buffer_size_bytes * 1000) / (2 * sample_rate); + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs", + requested_buffer_size_ms, 0, 1000, 100); + int actual_buffer_size_frames = + Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_); + if (actual_buffer_size_frames >= 0) { + const int actual_buffer_size_ms = + actual_buffer_size_frames * 1000 / sample_rate; + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs", + actual_buffer_size_ms, 0, 1000, 100); + } + initialized_ = true; return 0; }