1 /* 2 * Copyright 2018 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc.audio; 12 13 import android.content.Context; 14 import android.media.AudioAttributes; 15 import android.media.AudioDeviceInfo; 16 import android.media.AudioManager; 17 import android.os.Build; 18 import androidx.annotation.RequiresApi; 19 import java.util.concurrent.ScheduledExecutorService; 20 import org.webrtc.JniCommon; 21 import org.webrtc.Logging; 22 23 /** 24 * AudioDeviceModule implemented using android.media.AudioRecord as input and 25 * android.media.AudioTrack as output. 26 */ 27 public class JavaAudioDeviceModule implements AudioDeviceModule { 28 private static final String TAG = "JavaAudioDeviceModule"; 29 builder(Context context)30 public static Builder builder(Context context) { 31 return new Builder(context); 32 } 33 34 public static class Builder { 35 private final Context context; 36 private ScheduledExecutorService scheduler; 37 private final AudioManager audioManager; 38 private int inputSampleRate; 39 private int outputSampleRate; 40 private int audioSource = WebRtcAudioRecord.DEFAULT_AUDIO_SOURCE; 41 private int audioFormat = WebRtcAudioRecord.DEFAULT_AUDIO_FORMAT; 42 private AudioTrackErrorCallback audioTrackErrorCallback; 43 private AudioRecordErrorCallback audioRecordErrorCallback; 44 private SamplesReadyCallback samplesReadyCallback; 45 private AudioTrackStateCallback audioTrackStateCallback; 46 private AudioRecordStateCallback audioRecordStateCallback; 47 private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); 48 private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported(); 49 private boolean useStereoInput; 50 private boolean useStereoOutput; 51 private AudioAttributes audioAttributes; 52 private boolean useLowLatency; 53 private boolean enableVolumeLogger; 54 Builder(Context context)55 private Builder(Context context) { 56 this.context = context; 57 this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); 58 this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); 59 this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager); 60 this.useLowLatency = false; 61 this.enableVolumeLogger = true; 62 } 63 setScheduler(ScheduledExecutorService scheduler)64 public Builder setScheduler(ScheduledExecutorService scheduler) { 65 this.scheduler = scheduler; 66 return this; 67 } 68 69 /** 70 * Call this method if the default handling of querying the native sample rate shall be 71 * overridden. Can be useful on some devices where the available Android APIs are known to 72 * return invalid results. 73 */ setSampleRate(int sampleRate)74 public Builder setSampleRate(int sampleRate) { 75 Logging.d(TAG, "Input/Output sample rate overridden to: " + sampleRate); 76 this.inputSampleRate = sampleRate; 77 this.outputSampleRate = sampleRate; 78 return this; 79 } 80 81 /** 82 * Call this method to specifically override input sample rate. 83 */ setInputSampleRate(int inputSampleRate)84 public Builder setInputSampleRate(int inputSampleRate) { 85 Logging.d(TAG, "Input sample rate overridden to: " + inputSampleRate); 86 this.inputSampleRate = inputSampleRate; 87 return this; 88 } 89 90 /** 91 * Call this method to specifically override output sample rate. 92 */ setOutputSampleRate(int outputSampleRate)93 public Builder setOutputSampleRate(int outputSampleRate) { 94 Logging.d(TAG, "Output sample rate overridden to: " + outputSampleRate); 95 this.outputSampleRate = outputSampleRate; 96 return this; 97 } 98 99 /** 100 * Call this to change the audio source. The argument should be one of the values from 101 * android.media.MediaRecorder.AudioSource. The default is AudioSource.VOICE_COMMUNICATION. 102 */ setAudioSource(int audioSource)103 public Builder setAudioSource(int audioSource) { 104 this.audioSource = audioSource; 105 return this; 106 } 107 108 /** 109 * Call this to change the audio format. The argument should be one of the values from 110 * android.media.AudioFormat ENCODING_PCM_8BIT, ENCODING_PCM_16BIT or ENCODING_PCM_FLOAT. 111 * Default audio data format is PCM 16 bit per sample. 112 * Guaranteed to be supported by all devices. 113 */ setAudioFormat(int audioFormat)114 public Builder setAudioFormat(int audioFormat) { 115 this.audioFormat = audioFormat; 116 return this; 117 } 118 119 /** 120 * Set a callback to retrieve errors from the AudioTrack. 121 */ setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback)122 public Builder setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback) { 123 this.audioTrackErrorCallback = audioTrackErrorCallback; 124 return this; 125 } 126 127 /** 128 * Set a callback to retrieve errors from the AudioRecord. 129 */ setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback)130 public Builder setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback) { 131 this.audioRecordErrorCallback = audioRecordErrorCallback; 132 return this; 133 } 134 135 /** 136 * Set a callback to listen to the raw audio input from the AudioRecord. 137 */ setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback)138 public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback) { 139 this.samplesReadyCallback = samplesReadyCallback; 140 return this; 141 } 142 143 /** 144 * Set a callback to retrieve information from the AudioTrack on when audio starts and stop. 145 */ setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback)146 public Builder setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback) { 147 this.audioTrackStateCallback = audioTrackStateCallback; 148 return this; 149 } 150 151 /** 152 * Set a callback to retrieve information from the AudioRecord on when audio starts and stops. 153 */ setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback)154 public Builder setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback) { 155 this.audioRecordStateCallback = audioRecordStateCallback; 156 return this; 157 } 158 159 /** 160 * Control if the built-in HW noise suppressor should be used or not. The default is on if it is 161 * supported. It is possible to query support by calling isBuiltInNoiseSuppressorSupported(). 162 */ setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor)163 public Builder setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor) { 164 if (useHardwareNoiseSuppressor && !isBuiltInNoiseSuppressorSupported()) { 165 Logging.e(TAG, "HW NS not supported"); 166 useHardwareNoiseSuppressor = false; 167 } 168 this.useHardwareNoiseSuppressor = useHardwareNoiseSuppressor; 169 return this; 170 } 171 172 /** 173 * Control if the built-in HW acoustic echo canceler should be used or not. The default is on if 174 * it is supported. It is possible to query support by calling 175 * isBuiltInAcousticEchoCancelerSupported(). 176 */ setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler)177 public Builder setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler) { 178 if (useHardwareAcousticEchoCanceler && !isBuiltInAcousticEchoCancelerSupported()) { 179 Logging.e(TAG, "HW AEC not supported"); 180 useHardwareAcousticEchoCanceler = false; 181 } 182 this.useHardwareAcousticEchoCanceler = useHardwareAcousticEchoCanceler; 183 return this; 184 } 185 186 /** 187 * Control if stereo input should be used or not. The default is mono. 188 */ setUseStereoInput(boolean useStereoInput)189 public Builder setUseStereoInput(boolean useStereoInput) { 190 this.useStereoInput = useStereoInput; 191 return this; 192 } 193 194 /** 195 * Control if stereo output should be used or not. The default is mono. 196 */ setUseStereoOutput(boolean useStereoOutput)197 public Builder setUseStereoOutput(boolean useStereoOutput) { 198 this.useStereoOutput = useStereoOutput; 199 return this; 200 } 201 202 /** 203 * Control if the low-latency mode should be used. The default is disabled. 204 */ setUseLowLatency(boolean useLowLatency)205 public Builder setUseLowLatency(boolean useLowLatency) { 206 this.useLowLatency = useLowLatency; 207 return this; 208 } 209 210 /** 211 * Set custom {@link AudioAttributes} to use. 212 */ setAudioAttributes(AudioAttributes audioAttributes)213 public Builder setAudioAttributes(AudioAttributes audioAttributes) { 214 this.audioAttributes = audioAttributes; 215 return this; 216 } 217 218 /** Disables the volume logger on the audio output track. */ setEnableVolumeLogger(boolean enableVolumeLogger)219 public Builder setEnableVolumeLogger(boolean enableVolumeLogger) { 220 this.enableVolumeLogger = enableVolumeLogger; 221 return this; 222 } 223 224 /** 225 * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership 226 * and is responsible for calling release(). 227 */ createAudioDeviceModule()228 public JavaAudioDeviceModule createAudioDeviceModule() { 229 Logging.d(TAG, "createAudioDeviceModule"); 230 if (useHardwareNoiseSuppressor) { 231 Logging.d(TAG, "HW NS will be used."); 232 } else { 233 if (isBuiltInNoiseSuppressorSupported()) { 234 Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!"); 235 } 236 Logging.d(TAG, "HW NS will not be used."); 237 } 238 if (useHardwareAcousticEchoCanceler) { 239 Logging.d(TAG, "HW AEC will be used."); 240 } else { 241 if (isBuiltInAcousticEchoCancelerSupported()) { 242 Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!"); 243 } 244 Logging.d(TAG, "HW AEC will not be used."); 245 } 246 // Low-latency mode was introduced in API version 26, see 247 // https://developer.android.com/reference/android/media/AudioTrack#PERFORMANCE_MODE_LOW_LATENCY 248 final int MIN_LOW_LATENCY_SDK_VERSION = 26; 249 if (useLowLatency && Build.VERSION.SDK_INT >= MIN_LOW_LATENCY_SDK_VERSION) { 250 Logging.d(TAG, "Low latency mode will be used."); 251 } 252 ScheduledExecutorService executor = this.scheduler; 253 if (executor == null) { 254 executor = WebRtcAudioRecord.newDefaultScheduler(); 255 } 256 final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, 257 audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, 258 samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); 259 final WebRtcAudioTrack audioOutput = 260 new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, 261 audioTrackStateCallback, useLowLatency, enableVolumeLogger); 262 return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, 263 inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); 264 } 265 } 266 267 /* AudioRecord */ 268 // Audio recording error handler functions. 269 public enum AudioRecordStartErrorCode { 270 AUDIO_RECORD_START_EXCEPTION, 271 AUDIO_RECORD_START_STATE_MISMATCH, 272 } 273 274 public static interface AudioRecordErrorCallback { onWebRtcAudioRecordInitError(String errorMessage)275 void onWebRtcAudioRecordInitError(String errorMessage); onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage)276 void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage); onWebRtcAudioRecordError(String errorMessage)277 void onWebRtcAudioRecordError(String errorMessage); 278 } 279 280 /** Called when audio recording starts and stops. */ 281 public static interface AudioRecordStateCallback { onWebRtcAudioRecordStart()282 void onWebRtcAudioRecordStart(); onWebRtcAudioRecordStop()283 void onWebRtcAudioRecordStop(); 284 } 285 286 /** 287 * Contains audio sample information. 288 */ 289 public static class AudioSamples { 290 /** See {@link AudioRecord#getAudioFormat()} */ 291 private final int audioFormat; 292 /** See {@link AudioRecord#getChannelCount()} */ 293 private final int channelCount; 294 /** See {@link AudioRecord#getSampleRate()} */ 295 private final int sampleRate; 296 297 private final byte[] data; 298 AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data)299 public AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data) { 300 this.audioFormat = audioFormat; 301 this.channelCount = channelCount; 302 this.sampleRate = sampleRate; 303 this.data = data; 304 } 305 getAudioFormat()306 public int getAudioFormat() { 307 return audioFormat; 308 } 309 getChannelCount()310 public int getChannelCount() { 311 return channelCount; 312 } 313 getSampleRate()314 public int getSampleRate() { 315 return sampleRate; 316 } 317 getData()318 public byte[] getData() { 319 return data; 320 } 321 } 322 323 /** Called when new audio samples are ready. This should only be set for debug purposes */ 324 public static interface SamplesReadyCallback { onWebRtcAudioRecordSamplesReady(AudioSamples samples)325 void onWebRtcAudioRecordSamplesReady(AudioSamples samples); 326 } 327 328 /* AudioTrack */ 329 // Audio playout/track error handler functions. 330 public enum AudioTrackStartErrorCode { 331 AUDIO_TRACK_START_EXCEPTION, 332 AUDIO_TRACK_START_STATE_MISMATCH, 333 } 334 335 public static interface AudioTrackErrorCallback { onWebRtcAudioTrackInitError(String errorMessage)336 void onWebRtcAudioTrackInitError(String errorMessage); onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage)337 void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage); onWebRtcAudioTrackError(String errorMessage)338 void onWebRtcAudioTrackError(String errorMessage); 339 } 340 341 /** Called when audio playout starts and stops. */ 342 public static interface AudioTrackStateCallback { onWebRtcAudioTrackStart()343 void onWebRtcAudioTrackStart(); onWebRtcAudioTrackStop()344 void onWebRtcAudioTrackStop(); 345 } 346 347 /** 348 * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can 349 * be excluded). 350 */ isBuiltInAcousticEchoCancelerSupported()351 public static boolean isBuiltInAcousticEchoCancelerSupported() { 352 return WebRtcAudioEffects.isAcousticEchoCancelerSupported(); 353 } 354 355 /** 356 * Returns true if the device supports built-in HW NS, and the UUID is approved (some UUIDs can be 357 * excluded). 358 */ isBuiltInNoiseSuppressorSupported()359 public static boolean isBuiltInNoiseSuppressorSupported() { 360 return WebRtcAudioEffects.isNoiseSuppressorSupported(); 361 } 362 363 private final Context context; 364 private final AudioManager audioManager; 365 private final WebRtcAudioRecord audioInput; 366 private final WebRtcAudioTrack audioOutput; 367 private final int inputSampleRate; 368 private final int outputSampleRate; 369 private final boolean useStereoInput; 370 private final boolean useStereoOutput; 371 372 private final Object nativeLock = new Object(); 373 private long nativeAudioDeviceModule; 374 JavaAudioDeviceModule(Context context, AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput)375 private JavaAudioDeviceModule(Context context, AudioManager audioManager, 376 WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, 377 int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) { 378 this.context = context; 379 this.audioManager = audioManager; 380 this.audioInput = audioInput; 381 this.audioOutput = audioOutput; 382 this.inputSampleRate = inputSampleRate; 383 this.outputSampleRate = outputSampleRate; 384 this.useStereoInput = useStereoInput; 385 this.useStereoOutput = useStereoOutput; 386 } 387 388 @Override getNativeAudioDeviceModulePointer()389 public long getNativeAudioDeviceModulePointer() { 390 synchronized (nativeLock) { 391 if (nativeAudioDeviceModule == 0) { 392 nativeAudioDeviceModule = nativeCreateAudioDeviceModule(context, audioManager, audioInput, 393 audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); 394 } 395 return nativeAudioDeviceModule; 396 } 397 } 398 399 @Override release()400 public void release() { 401 synchronized (nativeLock) { 402 if (nativeAudioDeviceModule != 0) { 403 JniCommon.nativeReleaseRef(nativeAudioDeviceModule); 404 nativeAudioDeviceModule = 0; 405 } 406 } 407 } 408 409 @Override setSpeakerMute(boolean mute)410 public void setSpeakerMute(boolean mute) { 411 Logging.d(TAG, "setSpeakerMute: " + mute); 412 audioOutput.setSpeakerMute(mute); 413 } 414 415 @Override setMicrophoneMute(boolean mute)416 public void setMicrophoneMute(boolean mute) { 417 Logging.d(TAG, "setMicrophoneMute: " + mute); 418 audioInput.setMicrophoneMute(mute); 419 } 420 421 /** 422 * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should 423 * only be used if a client gives an explicit option for choosing a physical device to record 424 * from. Otherwise the best-matching device for other parameters will be used. Calling after 425 * recording is started may cause a temporary interruption if the audio routing changes. 426 */ 427 @RequiresApi(Build.VERSION_CODES.M) setPreferredInputDevice(AudioDeviceInfo preferredInputDevice)428 public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) { 429 Logging.d(TAG, "setPreferredInputDevice: " + preferredInputDevice); 430 audioInput.setPreferredDevice(preferredInputDevice); 431 } 432 nativeCreateAudioDeviceModule(Context context, AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput)433 private static native long nativeCreateAudioDeviceModule(Context context, 434 AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, 435 int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput); 436 } 437