diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index 3cb94381cf..43fce4f89d 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -11,6 +11,7 @@ package org.webrtc.audio; import android.content.Context; +import android.media.AudioAttributes; import android.media.AudioDeviceInfo; import android.media.AudioManager; import android.os.Build; @@ -47,6 +48,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported(); private boolean useStereoInput; private boolean useStereoOutput; + private AudioAttributes audioAttributes; private Builder(Context context) { this.context = context; @@ -193,6 +195,14 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { return this; } + /** + * Set custom {@link AudioAttributes} to use. + */ + public Builder setAudioAttributes(AudioAttributes audioAttributes) { + this.audioAttributes = audioAttributes; + return this; + } + /** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). @@ -223,7 +233,7 @@ public class JavaAudioDeviceModule implements AudioDeviceModule { audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack( - context, audioManager, audioTrackErrorCallback, audioTrackStateCallback); + context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 94eb2a4357..3e01b958de 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -71,6 +71,7 @@ class WebRtcAudioTrack { private ByteBuffer byteBuffer; + private @Nullable final AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; @@ -162,15 +163,17 @@ class WebRtcAudioTrack { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { - this(context, audioManager, null /* errorCallback */, null /* stateCallback */); + this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, + null /* stateCallback */); } WebRtcAudioTrack(Context context, AudioManager audioManager, - @Nullable AudioTrackErrorCallback errorCallback, + @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, @Nullable AudioTrackStateCallback stateCallback) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; + this.audioAttributes = audioAttributes; this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = new VolumeLogger(audioManager); @@ -231,8 +234,8 @@ class WebRtcAudioTrack { // supersede the notion of stream types for defining the behavior of audio playback, // and to allow certain platforms or routing policies to use this information for more // refined volume or routing decisions. - audioTrack = - createAudioTrackOnLollipopOrHigher(sampleRate, channelConfig, minBufferSizeInBytes); + audioTrack = createAudioTrackOnLollipopOrHigher( + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); } else { // Use default constructor for API levels below 21. audioTrack = @@ -383,8 +386,8 @@ class WebRtcAudioTrack { // It allows certain platforms or routing policies to use this information for more // refined volume or routing decisions. @TargetApi(Build.VERSION_CODES.LOLLIPOP) - private static AudioTrack createAudioTrackOnLollipopOrHigher( - int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { + private static AudioTrack createAudioTrackOnLollipopOrHigher(int sampleRateInHz, + int channelConfig, int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { Logging.d(TAG, "createAudioTrackOnLollipopOrHigher"); // TODO(henrika): use setPerformanceMode(int) with PERFORMANCE_MODE_LOW_LATENCY to control // performance when Android O is supported. Add some logging in the mean time. @@ -394,11 +397,26 @@ class WebRtcAudioTrack { if (sampleRateInHz != nativeOutputSampleRate) { Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); } + + AudioAttributes.Builder attributesBuilder = + new AudioAttributes.Builder() + .setUsage(DEFAULT_USAGE) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); + + if (overrideAttributes != null) { + if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) { + attributesBuilder.setUsage(overrideAttributes.getUsage()); + } + if (overrideAttributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN) { + attributesBuilder.setContentType(overrideAttributes.getContentType()); + } + + attributesBuilder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()) + .setFlags(overrideAttributes.getFlags()); + } + // Create an audio track where the audio usage is for VoIP and the content type is speech. - return new AudioTrack(new AudioAttributes.Builder() - .setUsage(DEFAULT_USAGE) - .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) - .build(), + return new AudioTrack(attributesBuilder.build(), new AudioFormat.Builder() .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(sampleRateInHz)