Adds WebRtcAudioTrack.setSpeakerMute() API

BUG=NONE

Review-Url: https://codereview.webrtc.org/2025423003
Cr-Commit-Position: refs/heads/master@{#13029}
This commit is contained in:
henrika 2016-06-03 02:56:20 -07:00 committed by Commit bot
parent fc715f584f
commit b50e84509f
2 changed files with 20 additions and 2 deletions

View File

@ -284,9 +284,10 @@ public class WebRtcAudioRecord {
private native void nativeDataIsRecorded(int bytes, long nativeAudioRecord);
// TODO(glaznev): remove this API once SW mic mute can use AudioTrack.setEnabled().
// Sets all recorded samples to zero if |mute| is true, i.e., ensures that
// the microphone is muted.
public static void setMicrophoneMute(boolean mute) {
Logging.w(TAG, "setMicrophoneMute API will be deprecated soon.");
Logging.w(TAG, "setMicrophoneMute(" + mute + ")");
microphoneMute = mute;
}
}

View File

@ -46,6 +46,11 @@ class WebRtcAudioTrack {
private AudioTrack audioTrack = null;
private AudioTrackThread audioThread = null;
// Samples to be played are replaced by zeros if |speakerMute| is set to true.
// Can be used to ensure that the speaker is fully muted.
private static volatile boolean speakerMute = false;
private byte[] emptyBytes;
/**
* Audio thread which keeps calling AudioTrack.write() to stream audio.
* Data is periodically acquired from the native WebRTC layer using the
@ -89,6 +94,10 @@ class WebRtcAudioTrack {
// Upon return, the buffer position will have been advanced to reflect
// the amount of data that was successfully written to the AudioTrack.
assertTrue(sizeInBytes <= byteBuffer.remaining());
if (speakerMute) {
byteBuffer.clear();
byteBuffer.put(emptyBytes);
}
int bytesWritten = 0;
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
@ -159,6 +168,7 @@ class WebRtcAudioTrack {
byteBuffer = byteBuffer.allocateDirect(
bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
emptyBytes = new byte[byteBuffer.capacity()];
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
@ -273,4 +283,11 @@ class WebRtcAudioTrack {
ByteBuffer byteBuffer, long nativeAudioRecord);
private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
// Sets all samples to be played out to zero if |mute| is true, i.e.,
// ensures that the speaker is muted.
public static void setSpeakerMute(boolean mute) {
Logging.w(TAG, "setSpeakerMute(" + mute + ")");
speakerMute = mute;
}
}