diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index 558e776216..3cb94381cf 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -15,6 +15,7 @@ import android.media.AudioDeviceInfo; import android.media.AudioManager; import android.os.Build; import android.support.annotation.RequiresApi; +import java.util.concurrent.ScheduledExecutorService; import org.webrtc.JniCommon; import org.webrtc.Logging; @@ -31,6 +32,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { public static class Builder { private final Context context; + private ScheduledExecutorService scheduler; private final AudioManager audioManager; private int inputSampleRate; private int outputSampleRate; @@ -53,6 +55,11 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); } + public Builder setScheduler(ScheduledExecutorService scheduler) { + this.scheduler = scheduler; + return this; + } + /** * Call this method if the default handling of querying the native sample rate shall be * overridden. Can be useful on some devices where the available Android APIs are known to @@ -208,9 +215,13 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { } Logging.d(TAG, "HW AEC will not be used."); } - final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource, - audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, - useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + ScheduledExecutorService executor = this.scheduler; + if (executor == null) { + executor = WebRtcAudioRecord.newDefaultScheduler(); + } + final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, + audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, + samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack( context, audioManager, audioTrackErrorCallback, audioTrackStateCallback); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java index 018196b784..734695937a 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -31,7 +31,10 @@ import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; @@ -90,12 +93,12 @@ class WebRtcAudioRecord { private @Nullable AudioRecordThread audioThread; private @Nullable AudioDeviceInfo preferredDevice; - private @Nullable ScheduledExecutorService executor; + private final ScheduledExecutorService executor; private @Nullable ScheduledFuture future; private volatile boolean microphoneMute; - private boolean audioSourceMatchesRecordingSession; - private boolean isAudioConfigVerified; + private final AtomicReference audioSourceMatchesRecordingSessionRef = + new AtomicReference<>(); private byte[] emptyBytes; private final @Nullable AudioRecordErrorCallback errorCallback; @@ -179,14 +182,15 @@ class WebRtcAudioRecord { @CalledByNative WebRtcAudioRecord(Context context, AudioManager audioManager) { - this(context, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, - null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */, - WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, + DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, + null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), WebRtcAudioEffects.isNoiseSuppressorSupported()); } - public WebRtcAudioRecord(Context context, AudioManager audioManager, int audioSource, - int audioFormat, @Nullable AudioRecordErrorCallback errorCallback, + public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, + AudioManager audioManager, int audioSource, int audioFormat, + @Nullable AudioRecordErrorCallback errorCallback, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { @@ -197,6 +201,7 @@ class WebRtcAudioRecord { throw new IllegalArgumentException("HW NS not supported"); } this.context = context; + this.executor = scheduler; this.audioManager = audioManager; this.audioSource = audioSource; this.audioFormat = audioFormat; @@ -227,7 +232,7 @@ class WebRtcAudioRecord { // checked before using the returned value of isAudioSourceMatchingRecordingSession(). @CalledByNative boolean isAudioConfigVerified() { - return isAudioConfigVerified; + return audioSourceMatchesRecordingSessionRef.get() != null; } // Returns true if verifyAudioConfig() succeeds. This value is set after a specific delay when @@ -236,7 +241,8 @@ class WebRtcAudioRecord { // enabled in WebRtcAudioRecord to ensure that the returned value is valid. @CalledByNative boolean isAudioSourceMatchingRecordingSession() { - if (!isAudioConfigVerified) { + Boolean audioSourceMatchesRecordingSession = audioSourceMatchesRecordingSessionRef.get(); + if (audioSourceMatchesRecordingSession == null) { Logging.w(TAG, "Audio configuration has not yet been verified"); return false; } @@ -298,6 +304,7 @@ class WebRtcAudioRecord { // Throws IllegalArgumentException. audioRecord = createAudioRecordOnMOrHigher( audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); if (preferredDevice != null) { setPreferredDevice(preferredDevice); } @@ -306,6 +313,7 @@ class WebRtcAudioRecord { // Throws UnsupportedOperationException. audioRecord = createAudioRecordOnLowerThanM( audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); } } catch (IllegalArgumentException | UnsupportedOperationException e) { // Report of exception message is sufficient. Example: "Cannot create AudioRecord". @@ -324,7 +332,7 @@ class WebRtcAudioRecord { // Check number of active recording sessions. Should be zero but we have seen conflict cases // and adding a log for it can help us figure out details about conflicting sessions. final int numActiveRecordingSessions = - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (numActiveRecordingSessions != 0) { // Log the conflict as a warning since initialization did in fact succeed. Most likely, the // upcoming call to startRecording() will fail under these conditions. @@ -371,7 +379,7 @@ class WebRtcAudioRecord { } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); - scheduleLogRecordingConfigurationsTask(); + scheduleLogRecordingConfigurationsTask(audioRecord); return true; } @@ -386,10 +394,6 @@ class WebRtcAudioRecord { } future = null; } - if (executor != null) { - executor.shutdownNow(); - executor = null; - } audioThread.stopThread(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); @@ -442,8 +446,8 @@ class WebRtcAudioRecord { @TargetApi(Build.VERSION_CODES.N) // Checks the number of active recording sessions and logs the states of all active sessions. - // Returns number of active sessions. - private int logRecordingConfigurations(boolean verifyAudioConfig) { + // Returns number of active sessions. Note that this could occur on arbituary thread. + private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAudioConfig) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { Logging.w(TAG, "AudioManager#getActiveRecordingConfigurations() requires N or higher"); return 0; @@ -451,6 +455,7 @@ class WebRtcAudioRecord { if (audioRecord == null) { return 0; } + // Get a list of the currently active audio recording configurations of the device (can be more // than one). An empty list indicates there is no recording active when queried. List configs = audioManager.getActiveRecordingConfigurations(); @@ -463,10 +468,9 @@ class WebRtcAudioRecord { // to the AudioRecord instance) is matching what the audio recording configuration lists // as its client parameters. If these do not match, recording might work but under invalid // conditions. - audioSourceMatchesRecordingSession = + audioSourceMatchesRecordingSessionRef.set( verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), - audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs); - isAudioConfigVerified = true; + audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); } } return numActiveRecordingSessions; @@ -501,12 +505,13 @@ class WebRtcAudioRecord { audioRecord.release(); audioRecord = null; } + audioSourceMatchesRecordingSessionRef.set(null); } private void reportWebRtcAudioRecordInitError(String errorMessage) { Logging.e(TAG, "Init recording error: " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordInitError(errorMessage); } @@ -516,7 +521,7 @@ class WebRtcAudioRecord { AudioRecordStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - logRecordingConfigurations(false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); } @@ -564,18 +569,18 @@ class WebRtcAudioRecord { // Use an ExecutorService to schedule a task after a given delay where the task consists of // checking (by logging) the current status of active recording sessions. - private void scheduleLogRecordingConfigurationsTask() { + private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { Logging.d(TAG, "scheduleLogRecordingConfigurationsTask"); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) { return; } - if (executor != null) { - executor.shutdownNow(); - } - executor = Executors.newSingleThreadScheduledExecutor(); Callable callable = () -> { - logRecordingConfigurations(true /* verifyAudioConfig */); + if (this.audioRecord == audioRecord) { + logRecordingConfigurations(audioRecord, true /* verifyAudioConfig */); + } else { + Logging.d(TAG, "audio record has changed"); + } return "Scheduled task is done"; }; @@ -704,4 +709,22 @@ class WebRtcAudioRecord { return "INVALID"; } } + + private static final AtomicInteger nextSchedulerId = new AtomicInteger(0); + + static ScheduledExecutorService newDefaultScheduler() { + AtomicInteger nextThreadId = new AtomicInteger(0); + return Executors.newScheduledThreadPool(0, new ThreadFactory() { + /** + * Constructs a new {@code Thread} + */ + @Override + public Thread newThread(Runnable r) { + Thread thread = Executors.defaultThreadFactory().newThread(r); + thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s", + nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); + return thread; + } + }); + } }