1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import static android.companion.virtual.VirtualDeviceParams.DEVICE_POLICY_DEFAULT; 20 import static android.companion.virtual.VirtualDeviceParams.POLICY_TYPE_AUDIO; 21 import static android.content.Context.DEVICE_ID_DEFAULT; 22 import static android.media.AudioManager.AUDIO_SESSION_ID_GENERATE; 23 import static android.media.audio.Flags.FLAG_ROUTED_DEVICE_IDS; 24 25 import android.annotation.CallbackExecutor; 26 import android.annotation.FlaggedApi; 27 import android.annotation.FloatRange; 28 import android.annotation.IntDef; 29 import android.annotation.IntRange; 30 import android.annotation.NonNull; 31 import android.annotation.Nullable; 32 import android.annotation.RequiresPermission; 33 import android.annotation.SystemApi; 34 import android.annotation.TestApi; 35 import android.app.ActivityThread; 36 import android.companion.virtual.VirtualDeviceManager; 37 import android.compat.annotation.UnsupportedAppUsage; 38 import android.content.AttributionSource; 39 import android.content.AttributionSource.ScopedParcelState; 40 import android.content.Context; 41 import android.media.MediaRecorder.Source; 42 import android.media.audio.common.AudioInputFlags; 43 import android.media.audiopolicy.AudioMix; 44 import android.media.audiopolicy.AudioMixingRule; 45 import android.media.audiopolicy.AudioPolicy; 46 import android.media.metrics.LogSessionId; 47 import android.media.projection.MediaProjection; 48 import android.os.Binder; 49 import android.os.Build; 50 import android.os.Handler; 51 import android.os.IBinder; 52 import android.os.Looper; 53 import android.os.Message; 54 import android.os.Parcel; 55 import android.os.PersistableBundle; 56 import android.os.RemoteException; 57 import android.os.ServiceManager; 58 import android.util.ArrayMap; 59 import android.util.Log; 60 import android.util.Pair; 61 62 import com.android.internal.annotations.GuardedBy; 63 import com.android.internal.util.Preconditions; 64 65 import java.io.IOException; 66 import java.lang.annotation.Retention; 67 import java.lang.annotation.RetentionPolicy; 68 import java.lang.ref.WeakReference; 69 import java.nio.ByteBuffer; 70 import java.util.ArrayList; 71 import java.util.HashSet; 72 import java.util.Iterator; 73 import java.util.List; 74 import java.util.Objects; 75 import java.util.concurrent.Executor; 76 77 /** 78 * The AudioRecord class manages the audio resources for Java applications 79 * to record audio from the audio input hardware of the platform. This is 80 * achieved by "pulling" (reading) the data from the AudioRecord object. The 81 * application is responsible for polling the AudioRecord object in time using one of 82 * the following three methods: {@link #read(byte[],int, int)}, {@link #read(short[], int, int)} 83 * or {@link #read(ByteBuffer, int)}. The choice of which method to use will be based 84 * on the audio data storage format that is the most convenient for the user of AudioRecord. 85 * <p>Upon creation, an AudioRecord object initializes its associated audio buffer that it will 86 * fill with the new audio data. The size of this buffer, specified during the construction, 87 * determines how long an AudioRecord can record before "over-running" data that has not 88 * been read yet. Data should be read from the audio hardware in chunks of sizes inferior to 89 * the total recording buffer size.</p> 90 * <p> 91 * Applications creating an AudioRecord instance need 92 * {@link android.Manifest.permission#RECORD_AUDIO} or the Builder will throw 93 * {@link java.lang.UnsupportedOperationException} on 94 * {@link android.media.AudioRecord.Builder#build build()}, 95 * and the constructor will return an instance in state 96 * {@link #STATE_UNINITIALIZED}.</p> 97 */ 98 public class AudioRecord implements AudioRouting, MicrophoneDirection, 99 AudioRecordingMonitor, AudioRecordingMonitorClient 100 { 101 //--------------------------------------------------------- 102 // Constants 103 //-------------------- 104 105 106 /** 107 * indicates AudioRecord state is not successfully initialized. 108 */ 109 public static final int STATE_UNINITIALIZED = 0; 110 /** 111 * indicates AudioRecord state is ready to be used 112 */ 113 public static final int STATE_INITIALIZED = 1; 114 115 /** 116 * indicates AudioRecord recording state is not recording 117 */ 118 public static final int RECORDSTATE_STOPPED = 1; // matches SL_RECORDSTATE_STOPPED 119 /** 120 * indicates AudioRecord recording state is recording 121 */ 122 public static final int RECORDSTATE_RECORDING = 3;// matches SL_RECORDSTATE_RECORDING 123 124 /** 125 * Denotes a successful operation. 126 */ 127 public static final int SUCCESS = AudioSystem.SUCCESS; 128 /** 129 * Denotes a generic operation failure. 130 */ 131 public static final int ERROR = AudioSystem.ERROR; 132 /** 133 * Denotes a failure due to the use of an invalid value. 134 */ 135 public static final int ERROR_BAD_VALUE = AudioSystem.BAD_VALUE; 136 /** 137 * Denotes a failure due to the improper use of a method. 138 */ 139 public static final int ERROR_INVALID_OPERATION = AudioSystem.INVALID_OPERATION; 140 /** 141 * An error code indicating that the object reporting it is no longer valid and needs to 142 * be recreated. 143 */ 144 public static final int ERROR_DEAD_OBJECT = AudioSystem.DEAD_OBJECT; 145 146 // Error codes: 147 // to keep in sync with frameworks/base/core/jni/android_media_AudioRecord.cpp 148 private static final int AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT = -16; 149 private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK = -17; 150 private static final int AUDIORECORD_ERROR_SETUP_INVALIDFORMAT = -18; 151 private static final int AUDIORECORD_ERROR_SETUP_INVALIDSOURCE = -19; 152 private static final int AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED = -20; 153 154 // Events: 155 // to keep in sync with frameworks/av/include/media/AudioRecord.h 156 /** 157 * Event id denotes when record head has reached a previously set marker. 158 */ 159 private static final int NATIVE_EVENT_MARKER = 2; 160 /** 161 * Event id denotes when previously set update period has elapsed during recording. 162 */ 163 private static final int NATIVE_EVENT_NEW_POS = 3; 164 165 private final static String TAG = "android.media.AudioRecord"; 166 167 /** @hide */ 168 public final static String SUBMIX_FIXED_VOLUME = "fixedVolume"; 169 170 /** @hide */ 171 @IntDef({ 172 READ_BLOCKING, 173 READ_NON_BLOCKING 174 }) 175 @Retention(RetentionPolicy.SOURCE) 176 public @interface ReadMode {} 177 178 /** 179 * The read mode indicating the read operation will block until all data 180 * requested has been read. 181 */ 182 public final static int READ_BLOCKING = 0; 183 184 /** 185 * The read mode indicating the read operation will return immediately after 186 * reading as much audio data as possible without blocking. 187 */ 188 public final static int READ_NON_BLOCKING = 1; 189 190 //--------------------------------------------------------- 191 // Used exclusively by native code 192 //-------------------- 193 /** 194 * Accessed by native methods: provides access to C++ AudioRecord object 195 * Is 0 after release() 196 */ 197 @SuppressWarnings("unused") 198 @UnsupportedAppUsage 199 private long mNativeAudioRecordHandle; 200 201 /** 202 * Accessed by native methods: provides access to the callback data. 203 */ 204 @SuppressWarnings("unused") 205 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 206 private long mNativeJNIDataHandle; 207 208 //--------------------------------------------------------- 209 // Member variables 210 //-------------------- 211 private AudioPolicy mAudioCapturePolicy; 212 213 /** 214 * The audio data sampling rate in Hz. 215 * Never {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}. 216 */ 217 private int mSampleRate; // initialized by all constructors via audioParamCheck() 218 /** 219 * The number of input audio channels (1 is mono, 2 is stereo) 220 */ 221 private int mChannelCount; 222 /** 223 * The audio channel position mask 224 */ 225 private int mChannelMask; 226 /** 227 * The audio channel index mask 228 */ 229 private int mChannelIndexMask; 230 /** 231 * The encoding of the audio samples. 232 * @see AudioFormat#ENCODING_PCM_8BIT 233 * @see AudioFormat#ENCODING_PCM_16BIT 234 * @see AudioFormat#ENCODING_PCM_FLOAT 235 */ 236 private int mAudioFormat; 237 /** 238 * Where the audio data is recorded from. 239 */ 240 private int mRecordSource; 241 /** 242 * Indicates the state of the AudioRecord instance. 243 */ 244 private int mState = STATE_UNINITIALIZED; 245 /** 246 * Indicates the recording state of the AudioRecord instance. 247 */ 248 private int mRecordingState = RECORDSTATE_STOPPED; 249 /** 250 * Lock to make sure mRecordingState updates are reflecting the actual state of the object. 251 */ 252 private final Object mRecordingStateLock = new Object(); 253 /** 254 * The listener the AudioRecord notifies when the record position reaches a marker 255 * or for periodic updates during the progression of the record head. 256 * @see #setRecordPositionUpdateListener(OnRecordPositionUpdateListener) 257 * @see #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler) 258 */ 259 private OnRecordPositionUpdateListener mPositionListener = null; 260 /** 261 * Lock to protect position listener updates against event notifications 262 */ 263 private final Object mPositionListenerLock = new Object(); 264 /** 265 * Handler for marker events coming from the native code 266 */ 267 private NativeEventHandler mEventHandler = null; 268 /** 269 * Looper associated with the thread that creates the AudioRecord instance 270 */ 271 @UnsupportedAppUsage 272 private Looper mInitializationLooper = null; 273 /** 274 * Size of the native audio buffer. 275 */ 276 private int mNativeBufferSizeInBytes = 0; 277 /** 278 * Audio session ID 279 */ 280 private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE; 281 /** 282 * Audio HAL Input Flags as bitfield. 283 */ 284 private int mHalInputFlags = 0; 285 286 /** 287 * AudioAttributes 288 */ 289 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 290 private AudioAttributes mAudioAttributes; 291 private boolean mIsSubmixFullVolume = false; 292 293 /** 294 * The log session id used for metrics. 295 * {@link LogSessionId#LOG_SESSION_ID_NONE} here means it is not set. 296 */ 297 @NonNull private LogSessionId mLogSessionId = LogSessionId.LOG_SESSION_ID_NONE; 298 299 //--------------------------------------------------------- 300 // Constructor, Finalize 301 //-------------------- 302 /** 303 * Class constructor. 304 * Though some invalid parameters will result in an {@link IllegalArgumentException} exception, 305 * other errors do not. Thus you should call {@link #getState()} immediately after construction 306 * to confirm that the object is usable. 307 * @param audioSource the recording source. 308 * See {@link MediaRecorder.AudioSource} for the recording source definitions. 309 * @param sampleRateInHz the sample rate expressed in Hertz. 44100Hz is currently the only 310 * rate that is guaranteed to work on all devices, but other rates such as 22050, 311 * 16000, and 11025 may work on some devices. 312 * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value 313 * which is usually the sample rate of the source. 314 * {@link #getSampleRate()} can be used to retrieve the actual sample rate chosen. 315 * @param channelConfig describes the configuration of the audio channels. 316 * See {@link AudioFormat#CHANNEL_IN_MONO} and 317 * {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed 318 * to work on all devices. 319 * @param audioFormat the format in which the audio data is to be returned. 320 * See {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT}, 321 * and {@link AudioFormat#ENCODING_PCM_FLOAT}. 322 * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written 323 * to during the recording. New audio data can be read from this buffer in smaller chunks 324 * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum 325 * required buffer size for the successful creation of an AudioRecord instance. Using values 326 * smaller than getMinBufferSize() will result in an initialization failure. 327 * @throws java.lang.IllegalArgumentException 328 */ 329 @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes)330 public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, 331 int bufferSizeInBytes) 332 throws IllegalArgumentException { 333 this((new AudioAttributes.Builder()) 334 .setInternalCapturePreset(audioSource) 335 .build(), 336 (new AudioFormat.Builder()) 337 .setChannelMask(getChannelMaskFromLegacyConfig(channelConfig, 338 true/*allow legacy configurations*/)) 339 .setEncoding(audioFormat) 340 .setSampleRate(sampleRateInHz) 341 .build(), 342 bufferSizeInBytes, 343 AudioManager.AUDIO_SESSION_ID_GENERATE); 344 } 345 346 /** 347 * @hide 348 * Class constructor with {@link AudioAttributes} and {@link AudioFormat}. 349 * @param attributes a non-null {@link AudioAttributes} instance. Use 350 * {@link AudioAttributes.Builder#setCapturePreset(int)} for configuring the audio 351 * source for this instance. 352 * @param format a non-null {@link AudioFormat} instance describing the format of the data 353 * that will be recorded through this AudioRecord. See {@link AudioFormat.Builder} for 354 * configuring the audio format parameters such as encoding, channel mask and sample rate. 355 * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written 356 * to during the recording. New audio data can be read from this buffer in smaller chunks 357 * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum 358 * required buffer size for the successful creation of an AudioRecord instance. Using values 359 * smaller than getMinBufferSize() will result in an initialization failure. 360 * @param sessionId ID of audio session the AudioRecord must be attached to, or 361 * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at construction 362 * time. See also {@link AudioManager#generateAudioSessionId()} to obtain a session ID before 363 * construction. 364 * @throws IllegalArgumentException 365 */ 366 @SystemApi 367 @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes, int sessionId)368 public AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes, 369 int sessionId) throws IllegalArgumentException { 370 this(attributes, format, bufferSizeInBytes, sessionId, 371 ActivityThread.currentApplication(), 372 0 /*maxSharedAudioHistoryMs*/, 0 /* halInputFlags */); 373 } 374 375 /** 376 * @hide 377 * Class constructor with {@link AudioAttributes} and {@link AudioFormat}. 378 * @param attributes a non-null {@link AudioAttributes} instance. Use 379 * {@link AudioAttributes.Builder#setCapturePreset(int)} for configuring the audio 380 * source for this instance. 381 * @param format a non-null {@link AudioFormat} instance describing the format of the data 382 * that will be recorded through this AudioRecord. See {@link AudioFormat.Builder} for 383 * configuring the audio format parameters such as encoding, channel mask and sample rate. 384 * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written 385 * to during the recording. New audio data can be read from this buffer in smaller chunks 386 * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum 387 * required buffer size for the successful creation of an AudioRecord instance. Using values 388 * smaller than getMinBufferSize() will result in an initialization failure. 389 * @param sessionId ID of audio session the AudioRecord must be attached to, or 390 * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at construction 391 * time. See also {@link AudioManager#generateAudioSessionId()} to obtain a session ID before 392 * construction. 393 * @param context An optional context on whose behalf the recoding is performed. 394 * @param maxSharedAudioHistoryMs 395 * @param halInputFlags Bitfield indexed by {@link AudioInputFlags} which is passed to the HAL. 396 * @throws IllegalArgumentException 397 */ AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes, int sessionId, @Nullable Context context, int maxSharedAudioHistoryMs, int halInputFlags)398 private AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes, 399 int sessionId, @Nullable Context context, 400 int maxSharedAudioHistoryMs, int halInputFlags) throws IllegalArgumentException { 401 mRecordingState = RECORDSTATE_STOPPED; 402 mHalInputFlags = halInputFlags; 403 if (attributes == null) { 404 throw new IllegalArgumentException("Illegal null AudioAttributes"); 405 } 406 if (format == null) { 407 throw new IllegalArgumentException("Illegal null AudioFormat"); 408 } 409 410 // remember which looper is associated with the AudioRecord instanciation 411 if ((mInitializationLooper = Looper.myLooper()) == null) { 412 mInitializationLooper = Looper.getMainLooper(); 413 } 414 415 // is this AudioRecord using REMOTE_SUBMIX at full volume? 416 if (attributes.getCapturePreset() == MediaRecorder.AudioSource.REMOTE_SUBMIX) { 417 final AudioAttributes.Builder ab = 418 new AudioAttributes.Builder(attributes); 419 HashSet<String> filteredTags = new HashSet<String>(); 420 final Iterator<String> tagsIter = attributes.getTags().iterator(); 421 while (tagsIter.hasNext()) { 422 final String tag = tagsIter.next(); 423 if (tag.equalsIgnoreCase(SUBMIX_FIXED_VOLUME)) { 424 mIsSubmixFullVolume = true; 425 Log.v(TAG, "Will record from REMOTE_SUBMIX at full fixed volume"); 426 } else { // SUBMIX_FIXED_VOLUME: is not to be propagated to the native layers 427 filteredTags.add(tag); 428 } 429 } 430 ab.replaceTags(filteredTags); 431 attributes = ab.build(); 432 } 433 434 mAudioAttributes = attributes; 435 436 int rate = format.getSampleRate(); 437 if (rate == AudioFormat.SAMPLE_RATE_UNSPECIFIED) { 438 rate = 0; 439 } 440 441 int encoding = AudioFormat.ENCODING_DEFAULT; 442 if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) 443 { 444 encoding = format.getEncoding(); 445 } 446 447 audioParamCheck(mAudioAttributes.getCapturePreset(), rate, encoding); 448 449 if ((format.getPropertySetMask() 450 & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) { 451 mChannelIndexMask = format.getChannelIndexMask(); 452 mChannelCount = format.getChannelCount(); 453 } 454 if ((format.getPropertySetMask() 455 & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) { 456 mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false); 457 mChannelCount = format.getChannelCount(); 458 } else if (mChannelIndexMask == 0) { 459 mChannelMask = getChannelMaskFromLegacyConfig(AudioFormat.CHANNEL_IN_DEFAULT, false); 460 mChannelCount = AudioFormat.channelCountFromInChannelMask(mChannelMask); 461 } 462 463 audioBuffSizeCheck(bufferSizeInBytes); 464 465 AttributionSource attributionSource = (context != null) 466 ? context.getAttributionSource() : AttributionSource.myAttributionSource(); 467 if (attributionSource.getPackageName() == null) { 468 // Command line utility 469 attributionSource = attributionSource.withPackageName("uid:" + Binder.getCallingUid()); 470 } 471 472 int[] sampleRate = new int[] {mSampleRate}; 473 int[] session = new int[1]; 474 session[0] = resolveSessionId(context, sessionId); 475 476 //TODO: update native initialization when information about hardware init failure 477 // due to capture device already open is available. 478 try (ScopedParcelState attributionSourceState = attributionSource.asScopedParcelState()) { 479 int initResult = native_setup(new WeakReference<AudioRecord>(this), mAudioAttributes, 480 sampleRate, mChannelMask, mChannelIndexMask, mAudioFormat, 481 mNativeBufferSizeInBytes, session, attributionSourceState.getParcel(), 482 0 /*nativeRecordInJavaObj*/, maxSharedAudioHistoryMs, mHalInputFlags); 483 if (initResult != SUCCESS) { 484 loge("Error code " + initResult + " when initializing native AudioRecord object."); 485 return; // with mState == STATE_UNINITIALIZED 486 } 487 } 488 489 mSampleRate = sampleRate[0]; 490 mSessionId = session[0]; 491 492 mState = STATE_INITIALIZED; 493 } 494 495 /** 496 * A constructor which explicitly connects a Native (C++) AudioRecord. For use by 497 * the AudioRecordRoutingProxy subclass. 498 * @param nativeRecordInJavaObj A C/C++ pointer to a native AudioRecord 499 * (associated with an OpenSL ES recorder). Note: the caller must ensure a correct 500 * value here as no error checking is or can be done. 501 */ AudioRecord(long nativeRecordInJavaObj)502 /*package*/ AudioRecord(long nativeRecordInJavaObj) { 503 mNativeAudioRecordHandle = 0; 504 mNativeJNIDataHandle = 0; 505 506 // other initialization... 507 if (nativeRecordInJavaObj != 0) { 508 deferred_connect(nativeRecordInJavaObj); 509 } else { 510 mState = STATE_UNINITIALIZED; 511 } 512 } 513 514 /** 515 * Sets an {@link AudioPolicy} to automatically unregister when the record is released. 516 * 517 * <p>This is to prevent users of the audio capture API from having to manually unregister the 518 * policy that was used to create the record. 519 */ unregisterAudioPolicyOnRelease(AudioPolicy audioPolicy)520 private void unregisterAudioPolicyOnRelease(AudioPolicy audioPolicy) { 521 mAudioCapturePolicy = audioPolicy; 522 } 523 524 /** 525 * @hide 526 */ deferred_connect(long nativeRecordInJavaObj)527 /* package */ void deferred_connect(long nativeRecordInJavaObj) { 528 if (mState != STATE_INITIALIZED) { 529 int[] session = {0}; 530 int[] rates = {0}; 531 //TODO: update native initialization when information about hardware init failure 532 // due to capture device already open is available. 533 // Note that for this native_setup, we are providing an already created/initialized 534 // *Native* AudioRecord, so the attributes parameters to native_setup() are ignored. 535 final int initResult; 536 try (ScopedParcelState attributionSourceState = AttributionSource.myAttributionSource() 537 .asScopedParcelState()) { 538 initResult = native_setup(new WeakReference<>(this), 539 null /*mAudioAttributes*/, 540 rates /*mSampleRates*/, 541 0 /*mChannelMask*/, 542 0 /*mChannelIndexMask*/, 543 0 /*mAudioFormat*/, 544 0 /*mNativeBufferSizeInBytes*/, 545 session, 546 attributionSourceState.getParcel(), 547 nativeRecordInJavaObj, 548 0 /*maxSharedAudioHistoryMs*/, 549 0 /*halInputFlags*/); 550 } 551 if (initResult != SUCCESS) { 552 loge("Error code "+initResult+" when initializing native AudioRecord object."); 553 return; // with mState == STATE_UNINITIALIZED 554 } 555 556 mSessionId = session[0]; 557 558 mState = STATE_INITIALIZED; 559 } 560 } 561 562 /** @hide */ getAudioAttributes()563 public AudioAttributes getAudioAttributes() { 564 return mAudioAttributes; 565 } 566 567 /** 568 * Builder class for {@link AudioRecord} objects. 569 * Use this class to configure and create an <code>AudioRecord</code> instance. By setting the 570 * recording source and audio format parameters, you indicate which of 571 * those vary from the default behavior on the device. 572 * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat} 573 * parameters, to be used by a new <code>AudioRecord</code> instance: 574 * 575 * <pre class="prettyprint"> 576 * AudioRecord recorder = new AudioRecord.Builder() 577 * .setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION) 578 * .setAudioFormat(new AudioFormat.Builder() 579 * .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 580 * .setSampleRate(32000) 581 * .setChannelMask(AudioFormat.CHANNEL_IN_MONO) 582 * .build()) 583 * .setBufferSizeInBytes(2*minBuffSize) 584 * .build(); 585 * </pre> 586 * <p> 587 * If the audio source is not set with {@link #setAudioSource(int)}, 588 * {@link MediaRecorder.AudioSource#DEFAULT} is used. 589 * <br>If the audio format is not specified or is incomplete, its channel configuration will be 590 * {@link AudioFormat#CHANNEL_IN_MONO}, and the encoding will be 591 * {@link AudioFormat#ENCODING_PCM_16BIT}. 592 * The sample rate will depend on the device actually selected for capture and can be queried 593 * with {@link #getSampleRate()} method. 594 * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)}, 595 * the minimum buffer size for the source is used. 596 */ 597 public static class Builder { 598 599 private static final String ERROR_MESSAGE_SOURCE_MISMATCH = 600 "Cannot both set audio source and set playback capture config"; 601 602 private AudioPlaybackCaptureConfiguration mAudioPlaybackCaptureConfiguration; 603 private AudioAttributes mAttributes; 604 private AudioFormat mFormat; 605 private Context mContext; 606 private int mBufferSizeInBytes; 607 private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE; 608 private int mPrivacySensitive = PRIVACY_SENSITIVE_DEFAULT; 609 private int mMaxSharedAudioHistoryMs = 0; 610 private int mCallRedirectionMode = AudioManager.CALL_REDIRECT_NONE; 611 private boolean mIsHotwordStream = false; 612 private boolean mIsHotwordLookback = false; 613 614 private static final int PRIVACY_SENSITIVE_DEFAULT = -1; 615 private static final int PRIVACY_SENSITIVE_DISABLED = 0; 616 private static final int PRIVACY_SENSITIVE_ENABLED = 1; 617 618 /** 619 * Constructs a new Builder with the default values as described above. 620 */ Builder()621 public Builder() { 622 } 623 624 /** 625 * @param source the audio source. 626 * See {@link MediaRecorder.AudioSource} for the supported audio source definitions. 627 * @return the same Builder instance. 628 * @throws IllegalArgumentException 629 */ setAudioSource(@ource int source)630 public Builder setAudioSource(@Source int source) throws IllegalArgumentException { 631 Preconditions.checkState( 632 mAudioPlaybackCaptureConfiguration == null, 633 ERROR_MESSAGE_SOURCE_MISMATCH); 634 if ( (source < MediaRecorder.AudioSource.DEFAULT) || 635 (source > MediaRecorder.getAudioSourceMax()) ) { 636 throw new IllegalArgumentException("Invalid audio source " + source); 637 } 638 mAttributes = new AudioAttributes.Builder() 639 .setInternalCapturePreset(source) 640 .build(); 641 return this; 642 } 643 644 /** 645 * Sets the context the record belongs to. This context will be used to pull information, 646 * such as {@link android.content.AttributionSource} and device specific session ids, 647 * which will be associated with the {@link AudioRecord} the AudioRecord. 648 * However, the context itself will not be retained by the AudioRecord. 649 * @param context a non-null {@link Context} instance 650 * @return the same Builder instance. 651 */ setContext(@onNull Context context)652 public @NonNull Builder setContext(@NonNull Context context) { 653 // keep reference, we only copy the data when building 654 mContext = Objects.requireNonNull(context); 655 return this; 656 } 657 658 /** 659 * @hide 660 * To be only used by system components. Allows specifying non-public capture presets 661 * @param attributes a non-null {@link AudioAttributes} instance that contains the capture 662 * preset to be used. 663 * @return the same Builder instance. 664 * @throws IllegalArgumentException 665 */ 666 @SystemApi setAudioAttributes(@onNull AudioAttributes attributes)667 public Builder setAudioAttributes(@NonNull AudioAttributes attributes) 668 throws IllegalArgumentException { 669 if (attributes == null) { 670 throw new IllegalArgumentException("Illegal null AudioAttributes argument"); 671 } 672 if (attributes.getCapturePreset() == MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID) { 673 throw new IllegalArgumentException( 674 "No valid capture preset in AudioAttributes argument"); 675 } 676 // keep reference, we only copy the data when building 677 mAttributes = attributes; 678 return this; 679 } 680 681 /** 682 * Sets the format of the audio data to be captured. 683 * @param format a non-null {@link AudioFormat} instance 684 * @return the same Builder instance. 685 * @throws IllegalArgumentException 686 */ setAudioFormat(@onNull AudioFormat format)687 public Builder setAudioFormat(@NonNull AudioFormat format) throws IllegalArgumentException { 688 if (format == null) { 689 throw new IllegalArgumentException("Illegal null AudioFormat argument"); 690 } 691 // keep reference, we only copy the data when building 692 mFormat = format; 693 return this; 694 } 695 696 /** 697 * Sets the total size (in bytes) of the buffer where audio data is written 698 * during the recording. New audio data can be read from this buffer in smaller chunks 699 * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum 700 * required buffer size for the successful creation of an AudioRecord instance. 701 * Since bufferSizeInBytes may be internally increased to accommodate the source 702 * requirements, use {@link #getBufferSizeInFrames()} to determine the actual buffer size 703 * in frames. 704 * @param bufferSizeInBytes a value strictly greater than 0 705 * @return the same Builder instance. 706 * @throws IllegalArgumentException 707 */ setBufferSizeInBytes(int bufferSizeInBytes)708 public Builder setBufferSizeInBytes(int bufferSizeInBytes) throws IllegalArgumentException { 709 if (bufferSizeInBytes <= 0) { 710 throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes); 711 } 712 mBufferSizeInBytes = bufferSizeInBytes; 713 return this; 714 } 715 716 /** 717 * Sets the {@link AudioRecord} to record audio played by other apps. 718 * 719 * @param config Defines what apps to record audio from (i.e., via either their uid or 720 * the type of audio). 721 * @throws IllegalStateException if called in conjunction with {@link #setAudioSource(int)}. 722 * @throws NullPointerException if {@code config} is null. 723 */ setAudioPlaybackCaptureConfig( @onNull AudioPlaybackCaptureConfiguration config)724 public @NonNull Builder setAudioPlaybackCaptureConfig( 725 @NonNull AudioPlaybackCaptureConfiguration config) { 726 Preconditions.checkNotNull( 727 config, "Illegal null AudioPlaybackCaptureConfiguration argument"); 728 Preconditions.checkState( 729 mAttributes == null, 730 ERROR_MESSAGE_SOURCE_MISMATCH); 731 mAudioPlaybackCaptureConfiguration = config; 732 return this; 733 } 734 735 /** 736 * Indicates that this capture request is privacy sensitive and that 737 * any concurrent capture is not permitted. 738 * <p> 739 * The default is not privacy sensitive except when the audio source set with 740 * {@link #setAudioSource(int)} is {@link MediaRecorder.AudioSource#VOICE_COMMUNICATION} or 741 * {@link MediaRecorder.AudioSource#CAMCORDER}. 742 * <p> 743 * Always takes precedence over default from audio source when set explicitly. 744 * <p> 745 * Using this API is only permitted when the audio source is one of: 746 * <ul> 747 * <li>{@link MediaRecorder.AudioSource#MIC}</li> 748 * <li>{@link MediaRecorder.AudioSource#CAMCORDER}</li> 749 * <li>{@link MediaRecorder.AudioSource#VOICE_RECOGNITION}</li> 750 * <li>{@link MediaRecorder.AudioSource#VOICE_COMMUNICATION}</li> 751 * <li>{@link MediaRecorder.AudioSource#UNPROCESSED}</li> 752 * <li>{@link MediaRecorder.AudioSource#VOICE_PERFORMANCE}</li> 753 * </ul> 754 * Invoking {@link #build()} will throw an UnsupportedOperationException if this 755 * condition is not met. 756 * @param privacySensitive True if capture from this AudioRecord must be marked as privacy 757 * sensitive, false otherwise. 758 */ setPrivacySensitive(boolean privacySensitive)759 public @NonNull Builder setPrivacySensitive(boolean privacySensitive) { 760 mPrivacySensitive = 761 privacySensitive ? PRIVACY_SENSITIVE_ENABLED : PRIVACY_SENSITIVE_DISABLED; 762 return this; 763 } 764 765 /** 766 * @hide 767 * To be only used by system components. 768 * 769 * Note, that if there's a device specific session id asociated with the context, explicitly 770 * setting a session id using this method will override it. 771 * @param sessionId ID of audio session the AudioRecord must be attached to, or 772 * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at 773 * construction time. 774 * @return the same Builder instance. 775 * @throws IllegalArgumentException 776 */ 777 @SystemApi setSessionId(int sessionId)778 public Builder setSessionId(int sessionId) throws IllegalArgumentException { 779 if (sessionId < 0) { 780 throw new IllegalArgumentException("Invalid session ID " + sessionId); 781 } 782 // Do not override a session ID previously set with setSharedAudioEvent() 783 if (mSessionId == AudioManager.AUDIO_SESSION_ID_GENERATE) { 784 mSessionId = sessionId; 785 } else { 786 Log.e(TAG, "setSessionId() called twice or after setSharedAudioEvent()"); 787 } 788 return this; 789 } 790 buildAudioPlaybackCaptureRecord()791 private @NonNull AudioRecord buildAudioPlaybackCaptureRecord() { 792 AudioMix audioMix = mAudioPlaybackCaptureConfiguration.createAudioMix(mFormat); 793 MediaProjection projection = mAudioPlaybackCaptureConfiguration.getMediaProjection(); 794 AudioPolicy audioPolicy = new AudioPolicy.Builder(/*context=*/ mContext) 795 .setMediaProjection(projection) 796 .addMix(audioMix).build(); 797 798 int error = AudioManager.registerAudioPolicyStatic(audioPolicy); 799 if (error != 0) { 800 throw new UnsupportedOperationException("Error: could not register audio policy"); 801 } 802 803 AudioRecord record = audioPolicy.createAudioRecordSink(audioMix); 804 if (record == null) { 805 throw new UnsupportedOperationException("Cannot create AudioRecord"); 806 } 807 record.unregisterAudioPolicyOnRelease(audioPolicy); 808 return record; 809 } 810 811 /** 812 * @hide 813 * Sets the {@link AudioRecord} call redirection mode. 814 * Used when creating an AudioRecord to extract audio from call downlink path. The mode 815 * indicates if the call is a PSTN call or a VoIP call in which case a dynamic audio 816 * policy is created to forward all playback with voice communication usage this record. 817 * 818 * @param callRedirectionMode one of 819 * {@link AudioManager#CALL_REDIRECT_NONE}, 820 * {@link AudioManager#CALL_REDIRECT_PSTN}, 821 * or {@link AAudioManager#CALL_REDIRECT_VOIP}. 822 * @return the same Builder instance. 823 * @throws IllegalArgumentException if {@code callRedirectionMode} is not valid. 824 */ setCallRedirectionMode( @udioManager.CallRedirectionMode int callRedirectionMode)825 public @NonNull Builder setCallRedirectionMode( 826 @AudioManager.CallRedirectionMode int callRedirectionMode) { 827 switch (callRedirectionMode) { 828 case AudioManager.CALL_REDIRECT_NONE: 829 case AudioManager.CALL_REDIRECT_PSTN: 830 case AudioManager.CALL_REDIRECT_VOIP: 831 mCallRedirectionMode = callRedirectionMode; 832 break; 833 default: 834 throw new IllegalArgumentException( 835 "Invalid call redirection mode " + callRedirectionMode); 836 } 837 return this; 838 } 839 buildCallExtractionRecord()840 private @NonNull AudioRecord buildCallExtractionRecord() { 841 AudioMixingRule audioMixingRule = new AudioMixingRule.Builder() 842 .addMixRule(AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE, 843 new AudioAttributes.Builder() 844 .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION) 845 .setForCallRedirection() 846 .build()) 847 .addMixRule(AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE, 848 new AudioAttributes.Builder() 849 .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING) 850 .setForCallRedirection() 851 .build()) 852 .setTargetMixRole(AudioMixingRule.MIX_ROLE_PLAYERS) 853 .build(); 854 AudioMix audioMix = new AudioMix.Builder(audioMixingRule) 855 .setFormat(mFormat) 856 .setRouteFlags(AudioMix.ROUTE_FLAG_LOOP_BACK) 857 .build(); 858 AudioPolicy audioPolicy = new AudioPolicy.Builder(mContext).addMix(audioMix).build(); 859 if (AudioManager.registerAudioPolicyStatic(audioPolicy) != 0) { 860 throw new UnsupportedOperationException("Error: could not register audio policy"); 861 } 862 AudioRecord record = audioPolicy.createAudioRecordSink(audioMix); 863 if (record == null) { 864 throw new UnsupportedOperationException("Cannot create extraction AudioRecord"); 865 } 866 record.unregisterAudioPolicyOnRelease(audioPolicy); 867 return record; 868 } 869 870 /** 871 * @hide 872 * Specifies the maximum duration in the past of the this AudioRecord's capture buffer 873 * that can be shared with another app by calling 874 * {@link AudioRecord#shareAudioHistory(String, long)}. 875 * @param maxSharedAudioHistoryMillis the maximum duration that will be available 876 * in milliseconds. 877 * @return the same Builder instance. 878 * @throws IllegalArgumentException 879 * 880 */ 881 @SystemApi 882 @RequiresPermission(android.Manifest.permission.CAPTURE_AUDIO_HOTWORD) setMaxSharedAudioHistoryMillis(long maxSharedAudioHistoryMillis)883 public @NonNull Builder setMaxSharedAudioHistoryMillis(long maxSharedAudioHistoryMillis) 884 throws IllegalArgumentException { 885 if (maxSharedAudioHistoryMillis <= 0 886 || maxSharedAudioHistoryMillis > MAX_SHARED_AUDIO_HISTORY_MS) { 887 throw new IllegalArgumentException("Illegal maxSharedAudioHistoryMillis argument"); 888 } 889 mMaxSharedAudioHistoryMs = (int) maxSharedAudioHistoryMillis; 890 return this; 891 } 892 893 /** 894 * @hide 895 * Indicates that this AudioRecord will use the audio history shared by another app's 896 * AudioRecord. See {@link AudioRecord#shareAudioHistory(String, long)}. 897 * The audio session ID set with {@link AudioRecord.Builder#setSessionId(int)} will be 898 * ignored if this method is used. 899 * @param event The {@link MediaSyncEvent} provided by the app sharing its audio history 900 * with this AudioRecord. 901 * @return the same Builder instance. 902 * @throws IllegalArgumentException 903 */ 904 @SystemApi setSharedAudioEvent(@onNull MediaSyncEvent event)905 public @NonNull Builder setSharedAudioEvent(@NonNull MediaSyncEvent event) 906 throws IllegalArgumentException { 907 Objects.requireNonNull(event); 908 if (event.getType() != MediaSyncEvent.SYNC_EVENT_SHARE_AUDIO_HISTORY) { 909 throw new IllegalArgumentException( 910 "Invalid event type " + event.getType()); 911 } 912 if (event.getAudioSessionId() == AudioSystem.AUDIO_SESSION_ALLOCATE) { 913 throw new IllegalArgumentException( 914 "Invalid session ID " + event.getAudioSessionId()); 915 } 916 // This prevails over a session ID set with setSessionId() 917 mSessionId = event.getAudioSessionId(); 918 return this; 919 } 920 921 /** 922 * @hide 923 * Set to indicate that the requested AudioRecord object should produce the same type 924 * of audio content that the hotword recognition model consumes. SoundTrigger hotword 925 * recognition will not be disrupted. The source in the set AudioAttributes and the set 926 * audio source will be overridden if this API is used. 927 * <br> Use {@link AudioManager#isHotwordStreamSupported(boolean)} to query support. 928 * @param hotwordContent true if AudioRecord should produce content captured from the 929 * hotword pipeline. false if AudioRecord should produce content captured outside 930 * the hotword pipeline. 931 * @return the same Builder instance. 932 **/ 933 @SystemApi 934 @RequiresPermission(android.Manifest.permission.CAPTURE_AUDIO_HOTWORD) setRequestHotwordStream(boolean hotwordContent)935 public @NonNull Builder setRequestHotwordStream(boolean hotwordContent) { 936 mIsHotwordStream = hotwordContent; 937 return this; 938 } 939 940 /** 941 * @hide 942 * Set to indicate that the requested AudioRecord object should produce the same type 943 * of audio content that the hotword recognition model consumes and that the stream will 944 * be able to provide buffered audio content from an unspecified duration prior to stream 945 * open. The source in the set AudioAttributes and the set audio source will be overridden 946 * if this API is used. 947 * <br> Use {@link AudioManager#isHotwordStreamSupported(boolean)} to query support. 948 * <br> If this is set, {@link AudioRecord.Builder#setRequestHotwordStream(boolean)} 949 * must not be set, or {@link AudioRecord.Builder#build()} will throw. 950 * @param hotwordLookbackContent true if AudioRecord should produce content captured from 951 * the hotword pipeline with capture content from prior to open. false if AudioRecord 952 * should not capture such content. 953 * to stream open is requested. 954 * @return the same Builder instance. 955 **/ 956 @SystemApi 957 @RequiresPermission(android.Manifest.permission.CAPTURE_AUDIO_HOTWORD) setRequestHotwordLookbackStream(boolean hotwordLookbackContent)958 public @NonNull Builder setRequestHotwordLookbackStream(boolean hotwordLookbackContent) { 959 mIsHotwordLookback = hotwordLookbackContent; 960 return this; 961 } 962 963 964 /** 965 * @return a new {@link AudioRecord} instance successfully initialized with all 966 * the parameters set on this <code>Builder</code>. 967 * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code> 968 * were incompatible, if the parameters are not supported by the device, if the caller 969 * does not hold the appropriate permissions, or if the device was not available. 970 */ 971 @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) build()972 public AudioRecord build() throws UnsupportedOperationException { 973 if (mAudioPlaybackCaptureConfiguration != null) { 974 return buildAudioPlaybackCaptureRecord(); 975 } 976 int halInputFlags = 0; 977 if (mIsHotwordStream) { 978 if (mIsHotwordLookback) { 979 throw new UnsupportedOperationException( 980 "setRequestHotwordLookbackStream and " + 981 "setRequestHotwordStream used concurrently"); 982 } else { 983 halInputFlags = (1 << AudioInputFlags.HOTWORD_TAP); 984 } 985 } else if (mIsHotwordLookback) { 986 halInputFlags = (1 << AudioInputFlags.HOTWORD_TAP) | 987 (1 << AudioInputFlags.HW_LOOKBACK); 988 } 989 990 if (mFormat == null) { 991 mFormat = new AudioFormat.Builder() 992 .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 993 .setChannelMask(AudioFormat.CHANNEL_IN_MONO) 994 .build(); 995 } else { 996 if (mFormat.getEncoding() == AudioFormat.ENCODING_INVALID) { 997 mFormat = new AudioFormat.Builder(mFormat) 998 .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 999 .build(); 1000 } 1001 if (mFormat.getChannelMask() == AudioFormat.CHANNEL_INVALID 1002 && mFormat.getChannelIndexMask() == AudioFormat.CHANNEL_INVALID) { 1003 mFormat = new AudioFormat.Builder(mFormat) 1004 .setChannelMask(AudioFormat.CHANNEL_IN_MONO) 1005 .build(); 1006 } 1007 } 1008 if (mAttributes == null) { 1009 mAttributes = new AudioAttributes.Builder() 1010 .setInternalCapturePreset(MediaRecorder.AudioSource.DEFAULT) 1011 .build(); 1012 } 1013 1014 if (mIsHotwordStream || mIsHotwordLookback) { 1015 mAttributes = new AudioAttributes.Builder(mAttributes) 1016 .setInternalCapturePreset(MediaRecorder.AudioSource.VOICE_RECOGNITION) 1017 .build(); 1018 } 1019 1020 // If mPrivacySensitive is default, the privacy flag is already set 1021 // according to audio source in audio attributes. 1022 if (mPrivacySensitive != PRIVACY_SENSITIVE_DEFAULT) { 1023 int source = mAttributes.getCapturePreset(); 1024 if (source == MediaRecorder.AudioSource.REMOTE_SUBMIX 1025 || source == MediaRecorder.AudioSource.RADIO_TUNER 1026 || source == MediaRecorder.AudioSource.VOICE_DOWNLINK 1027 || source == MediaRecorder.AudioSource.VOICE_UPLINK 1028 || source == MediaRecorder.AudioSource.VOICE_CALL 1029 || source == MediaRecorder.AudioSource.ECHO_REFERENCE) { 1030 throw new UnsupportedOperationException( 1031 "Cannot request private capture with source: " + source); 1032 } 1033 1034 mAttributes = new AudioAttributes.Builder(mAttributes) 1035 .setInternalCapturePreset(source) 1036 .setPrivacySensitive(mPrivacySensitive == PRIVACY_SENSITIVE_ENABLED) 1037 .build(); 1038 } 1039 1040 if (mCallRedirectionMode == AudioManager.CALL_REDIRECT_VOIP) { 1041 return buildCallExtractionRecord(); 1042 } else if (mCallRedirectionMode == AudioManager.CALL_REDIRECT_PSTN) { 1043 mAttributes = new AudioAttributes.Builder(mAttributes) 1044 .setForCallRedirection() 1045 .build(); 1046 } 1047 1048 try { 1049 // If the buffer size is not specified, 1050 // use a single frame for the buffer size and let the 1051 // native code figure out the minimum buffer size. 1052 if (mBufferSizeInBytes == 0) { 1053 mBufferSizeInBytes = mFormat.getChannelCount() 1054 * mFormat.getBytesPerSample(mFormat.getEncoding()); 1055 } 1056 final AudioRecord record = new AudioRecord( 1057 mAttributes, mFormat, mBufferSizeInBytes, mSessionId, mContext, 1058 mMaxSharedAudioHistoryMs, halInputFlags); 1059 if (record.getState() == STATE_UNINITIALIZED) { 1060 // release is not necessary 1061 throw new UnsupportedOperationException("Cannot create AudioRecord"); 1062 } 1063 return record; 1064 } catch (IllegalArgumentException e) { 1065 throw new UnsupportedOperationException(e.getMessage()); 1066 } 1067 } 1068 } 1069 1070 /** 1071 * Helper method to resolve session id to be used for AudioRecord initialization. 1072 * 1073 * This method will assign session id in following way: 1074 * 1. Explicitly requested session id has the highest priority, if there is one, 1075 * it will be used. 1076 * 2. If there's device-specific session id asociated with the provided context, 1077 * it will be used. 1078 * 3. Otherwise {@link AUDIO_SESSION_ID_GENERATE} is returned. 1079 * 1080 * @param context {@link Context} to use for extraction of device specific session id. 1081 * @param requestedSessionId explicitly requested session id or AUDIO_SESSION_ID_GENERATE. 1082 * @return session id to be passed to AudioService for the {@link AudioRecord} instance given 1083 * provided {@link Context} instance and explicitly requested session id. 1084 */ resolveSessionId(@ullable Context context, int requestedSessionId)1085 private static int resolveSessionId(@Nullable Context context, int requestedSessionId) { 1086 if (requestedSessionId != AUDIO_SESSION_ID_GENERATE) { 1087 // Use explicitly requested session id. 1088 return requestedSessionId; 1089 } 1090 1091 if (context == null) { 1092 return AUDIO_SESSION_ID_GENERATE; 1093 } 1094 1095 int deviceId = context.getDeviceId(); 1096 if (deviceId == DEVICE_ID_DEFAULT) { 1097 return AUDIO_SESSION_ID_GENERATE; 1098 } 1099 1100 VirtualDeviceManager vdm = context.getSystemService(VirtualDeviceManager.class); 1101 if (vdm == null || vdm.getDevicePolicy(deviceId, POLICY_TYPE_AUDIO) 1102 == DEVICE_POLICY_DEFAULT) { 1103 return AUDIO_SESSION_ID_GENERATE; 1104 } 1105 1106 return vdm.getAudioRecordingSessionId(deviceId); 1107 } 1108 1109 // Convenience method for the constructor's parameter checks. 1110 // This, getChannelMaskFromLegacyConfig and audioBuffSizeCheck are where constructor 1111 // IllegalArgumentException-s are thrown getChannelMaskFromLegacyConfig(int inChannelConfig, boolean allowLegacyConfig)1112 private static int getChannelMaskFromLegacyConfig(int inChannelConfig, 1113 boolean allowLegacyConfig) { 1114 int mask; 1115 switch (inChannelConfig) { 1116 case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT 1117 case AudioFormat.CHANNEL_IN_MONO: 1118 case AudioFormat.CHANNEL_CONFIGURATION_MONO: 1119 mask = AudioFormat.CHANNEL_IN_MONO; 1120 break; 1121 case AudioFormat.CHANNEL_IN_STEREO: 1122 case AudioFormat.CHANNEL_CONFIGURATION_STEREO: 1123 mask = AudioFormat.CHANNEL_IN_STEREO; 1124 break; 1125 case (AudioFormat.CHANNEL_IN_FRONT | AudioFormat.CHANNEL_IN_BACK): 1126 mask = inChannelConfig; 1127 break; 1128 default: 1129 throw new IllegalArgumentException("Unsupported channel configuration."); 1130 } 1131 1132 if (!allowLegacyConfig && ((inChannelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO) 1133 || (inChannelConfig == AudioFormat.CHANNEL_CONFIGURATION_STEREO))) { 1134 // only happens with the constructor that uses AudioAttributes and AudioFormat 1135 throw new IllegalArgumentException("Unsupported deprecated configuration."); 1136 } 1137 1138 return mask; 1139 } 1140 1141 // postconditions: 1142 // mRecordSource is valid 1143 // mAudioFormat is valid 1144 // mSampleRate is valid audioParamCheck(int audioSource, int sampleRateInHz, int audioFormat)1145 private void audioParamCheck(int audioSource, int sampleRateInHz, int audioFormat) 1146 throws IllegalArgumentException { 1147 1148 //-------------- 1149 // audio source 1150 if ((audioSource < MediaRecorder.AudioSource.DEFAULT) 1151 || ((audioSource > MediaRecorder.getAudioSourceMax()) 1152 && (audioSource != MediaRecorder.AudioSource.RADIO_TUNER) 1153 && (audioSource != MediaRecorder.AudioSource.ECHO_REFERENCE) 1154 && (audioSource != MediaRecorder.AudioSource.HOTWORD) 1155 && (audioSource != MediaRecorder.AudioSource.ULTRASOUND))) { 1156 throw new IllegalArgumentException("Invalid audio source " + audioSource); 1157 } 1158 mRecordSource = audioSource; 1159 1160 //-------------- 1161 // sample rate 1162 if ((sampleRateInHz < AudioFormat.SAMPLE_RATE_HZ_MIN || 1163 sampleRateInHz > AudioFormat.SAMPLE_RATE_HZ_MAX) && 1164 sampleRateInHz != AudioFormat.SAMPLE_RATE_UNSPECIFIED) { 1165 throw new IllegalArgumentException(sampleRateInHz 1166 + "Hz is not a supported sample rate."); 1167 } 1168 mSampleRate = sampleRateInHz; 1169 1170 //-------------- 1171 // audio format 1172 switch (audioFormat) { 1173 case AudioFormat.ENCODING_DEFAULT: 1174 mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; 1175 break; 1176 case AudioFormat.ENCODING_PCM_24BIT_PACKED: 1177 case AudioFormat.ENCODING_PCM_32BIT: 1178 case AudioFormat.ENCODING_PCM_FLOAT: 1179 case AudioFormat.ENCODING_PCM_16BIT: 1180 case AudioFormat.ENCODING_PCM_8BIT: 1181 case AudioFormat.ENCODING_E_AC3_JOC: 1182 mAudioFormat = audioFormat; 1183 break; 1184 default: 1185 throw new IllegalArgumentException("Unsupported sample encoding " + audioFormat 1186 + ". Should be ENCODING_PCM_8BIT, ENCODING_PCM_16BIT," 1187 + " ENCODING_PCM_24BIT_PACKED, ENCODING_PCM_32BIT," 1188 + " or ENCODING_PCM_FLOAT."); 1189 } 1190 } 1191 1192 1193 // Convenience method for the contructor's audio buffer size check. 1194 // postcondition: 1195 // mNativeBufferSizeInBytes is valid (multiple of frame size, positive) audioBuffSizeCheck(int audioBufferSize)1196 private void audioBuffSizeCheck(int audioBufferSize) throws IllegalArgumentException { 1197 if ((audioBufferSize % getFormat().getFrameSizeInBytes() != 0) || (audioBufferSize < 1)) { 1198 throw new IllegalArgumentException("Invalid audio buffer size " + audioBufferSize 1199 + " (frame size " + getFormat().getFrameSizeInBytes() + ")"); 1200 } 1201 1202 mNativeBufferSizeInBytes = audioBufferSize; 1203 } 1204 1205 1206 1207 /** 1208 * Releases the native AudioRecord resources. 1209 * The object can no longer be used and the reference should be set to null 1210 * after a call to release() 1211 */ release()1212 public void release() { 1213 try { 1214 stop(); 1215 } catch(IllegalStateException ise) { 1216 // don't raise an exception, we're releasing the resources. 1217 } 1218 if (mAudioCapturePolicy != null) { 1219 AudioManager.unregisterAudioPolicyAsyncStatic(mAudioCapturePolicy); 1220 mAudioCapturePolicy = null; 1221 } 1222 native_release(); 1223 mState = STATE_UNINITIALIZED; 1224 } 1225 1226 1227 @Override finalize()1228 protected void finalize() { 1229 // will cause stop() to be called, and if appropriate, will handle fixed volume recording 1230 release(); 1231 } 1232 1233 1234 //-------------------------------------------------------------------------- 1235 // Getters 1236 //-------------------- 1237 /** 1238 * Returns the configured audio sink sample rate in Hz. 1239 * The sink sample rate never changes after construction. 1240 * If the constructor had a specific sample rate, then the sink sample rate is that value. 1241 * If the constructor had {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}, 1242 * then the sink sample rate is a route-dependent default value based on the source [sic]. 1243 */ getSampleRate()1244 public int getSampleRate() { 1245 return mSampleRate; 1246 } 1247 1248 /** 1249 * Returns the audio recording source. 1250 * @see MediaRecorder.AudioSource 1251 */ getAudioSource()1252 public int getAudioSource() { 1253 return mRecordSource; 1254 } 1255 1256 /** 1257 * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT}, 1258 * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}. 1259 */ getAudioFormat()1260 public int getAudioFormat() { 1261 return mAudioFormat; 1262 } 1263 1264 /** 1265 * Returns the configured channel position mask. 1266 * <p> See {@link AudioFormat#CHANNEL_IN_MONO} 1267 * and {@link AudioFormat#CHANNEL_IN_STEREO}. 1268 * This method may return {@link AudioFormat#CHANNEL_INVALID} if 1269 * a channel index mask is used. 1270 * Consider {@link #getFormat()} instead, to obtain an {@link AudioFormat}, 1271 * which contains both the channel position mask and the channel index mask. 1272 */ getChannelConfiguration()1273 public int getChannelConfiguration() { 1274 return mChannelMask; 1275 } 1276 1277 /** 1278 * Returns the configured <code>AudioRecord</code> format. 1279 * @return an {@link AudioFormat} containing the 1280 * <code>AudioRecord</code> parameters at the time of configuration. 1281 */ getFormat()1282 public @NonNull AudioFormat getFormat() { 1283 AudioFormat.Builder builder = new AudioFormat.Builder() 1284 .setSampleRate(mSampleRate) 1285 .setEncoding(mAudioFormat); 1286 if (mChannelMask != AudioFormat.CHANNEL_INVALID) { 1287 builder.setChannelMask(mChannelMask); 1288 } 1289 if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) { 1290 builder.setChannelIndexMask(mChannelIndexMask); 1291 } 1292 return builder.build(); 1293 } 1294 1295 /** 1296 * Returns the configured number of channels. 1297 */ getChannelCount()1298 public int getChannelCount() { 1299 return mChannelCount; 1300 } 1301 1302 /** 1303 * Returns the state of the AudioRecord instance. This is useful after the 1304 * AudioRecord instance has been created to check if it was initialized 1305 * properly. This ensures that the appropriate hardware resources have been 1306 * acquired. 1307 * @see AudioRecord#STATE_INITIALIZED 1308 * @see AudioRecord#STATE_UNINITIALIZED 1309 */ getState()1310 public int getState() { 1311 return mState; 1312 } 1313 1314 /** 1315 * Returns the recording state of the AudioRecord instance. 1316 * @see AudioRecord#RECORDSTATE_STOPPED 1317 * @see AudioRecord#RECORDSTATE_RECORDING 1318 */ getRecordingState()1319 public int getRecordingState() { 1320 synchronized (mRecordingStateLock) { 1321 return mRecordingState; 1322 } 1323 } 1324 1325 /** 1326 * Returns the frame count of the native <code>AudioRecord</code> buffer. 1327 * This is greater than or equal to the bufferSizeInBytes converted to frame units 1328 * specified in the <code>AudioRecord</code> constructor or Builder. 1329 * The native frame count may be enlarged to accommodate the requirements of the 1330 * source on creation or if the <code>AudioRecord</code> 1331 * is subsequently rerouted. 1332 * @return current size in frames of the <code>AudioRecord</code> buffer. 1333 * @throws IllegalStateException 1334 */ getBufferSizeInFrames()1335 public int getBufferSizeInFrames() { 1336 return native_get_buffer_size_in_frames(); 1337 } 1338 1339 /** 1340 * Returns the notification marker position expressed in frames. 1341 */ getNotificationMarkerPosition()1342 public int getNotificationMarkerPosition() { 1343 return native_get_marker_pos(); 1344 } 1345 1346 /** 1347 * Returns the notification update period expressed in frames. 1348 */ getPositionNotificationPeriod()1349 public int getPositionNotificationPeriod() { 1350 return native_get_pos_update_period(); 1351 } 1352 1353 /** 1354 * Poll for an {@link AudioTimestamp} on demand. 1355 * <p> 1356 * The AudioTimestamp reflects the frame delivery information at 1357 * the earliest point available in the capture pipeline. 1358 * <p> 1359 * Calling {@link #startRecording()} following a {@link #stop()} will reset 1360 * the frame count to 0. 1361 * 1362 * @param outTimestamp a caller provided non-null AudioTimestamp instance, 1363 * which is updated with the AudioRecord frame delivery information upon success. 1364 * @param timebase one of 1365 * {@link AudioTimestamp#TIMEBASE_BOOTTIME AudioTimestamp.TIMEBASE_BOOTTIME} or 1366 * {@link AudioTimestamp#TIMEBASE_MONOTONIC AudioTimestamp.TIMEBASE_MONOTONIC}, 1367 * used to select the clock for the AudioTimestamp time. 1368 * @return {@link #SUCCESS} if a timestamp is available, 1369 * or {@link #ERROR_INVALID_OPERATION} if a timestamp not available. 1370 */ getTimestamp(@onNull AudioTimestamp outTimestamp, @AudioTimestamp.Timebase int timebase)1371 public int getTimestamp(@NonNull AudioTimestamp outTimestamp, 1372 @AudioTimestamp.Timebase int timebase) 1373 { 1374 if (outTimestamp == null || 1375 (timebase != AudioTimestamp.TIMEBASE_BOOTTIME 1376 && timebase != AudioTimestamp.TIMEBASE_MONOTONIC)) { 1377 throw new IllegalArgumentException(); 1378 } 1379 return native_get_timestamp(outTimestamp, timebase); 1380 } 1381 1382 /** 1383 * Returns the minimum buffer size required for the successful creation of an AudioRecord 1384 * object, in byte units. 1385 * Note that this size doesn't guarantee a smooth recording under load, and higher values 1386 * should be chosen according to the expected frequency at which the AudioRecord instance 1387 * will be polled for new data. 1388 * See {@link #AudioRecord(int, int, int, int, int)} for more information on valid 1389 * configuration values. 1390 * @param sampleRateInHz the sample rate expressed in Hertz. 1391 * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} is not permitted. 1392 * @param channelConfig describes the configuration of the audio channels. 1393 * See {@link AudioFormat#CHANNEL_IN_MONO} and 1394 * {@link AudioFormat#CHANNEL_IN_STEREO} 1395 * @param audioFormat the format in which the audio data is represented. 1396 * See {@link AudioFormat#ENCODING_PCM_16BIT}. 1397 * @return {@link #ERROR_BAD_VALUE} if the recording parameters are not supported by the 1398 * hardware, or an invalid parameter was passed, 1399 * or {@link #ERROR} if the implementation was unable to query the hardware for its 1400 * input properties, 1401 * or the minimum buffer size expressed in bytes. 1402 * @see #AudioRecord(int, int, int, int, int) 1403 */ getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat)1404 static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { 1405 int channelCount = 0; 1406 switch (channelConfig) { 1407 case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT 1408 case AudioFormat.CHANNEL_IN_MONO: 1409 case AudioFormat.CHANNEL_CONFIGURATION_MONO: 1410 channelCount = 1; 1411 break; 1412 case AudioFormat.CHANNEL_IN_STEREO: 1413 case AudioFormat.CHANNEL_CONFIGURATION_STEREO: 1414 case (AudioFormat.CHANNEL_IN_FRONT | AudioFormat.CHANNEL_IN_BACK): 1415 channelCount = 2; 1416 break; 1417 case AudioFormat.CHANNEL_INVALID: 1418 default: 1419 loge("getMinBufferSize(): Invalid channel configuration."); 1420 return ERROR_BAD_VALUE; 1421 } 1422 1423 int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat); 1424 if (size == 0) { 1425 return ERROR_BAD_VALUE; 1426 } 1427 else if (size == -1) { 1428 return ERROR; 1429 } 1430 else { 1431 return size; 1432 } 1433 } 1434 1435 /** 1436 * Returns the audio session ID. 1437 * 1438 * @return the ID of the audio session this AudioRecord belongs to. 1439 */ getAudioSessionId()1440 public int getAudioSessionId() { 1441 return mSessionId; 1442 } 1443 1444 /** 1445 * Returns whether this AudioRecord is marked as privacy sensitive or not. 1446 * <p> 1447 * See {@link Builder#setPrivacySensitive(boolean)} 1448 * <p> 1449 * @return true if privacy sensitive, false otherwise 1450 */ isPrivacySensitive()1451 public boolean isPrivacySensitive() { 1452 return (mAudioAttributes.getAllFlags() & AudioAttributes.FLAG_CAPTURE_PRIVATE) != 0; 1453 } 1454 1455 /** 1456 * @hide 1457 * Returns whether the AudioRecord object produces the same type of audio content that 1458 * the hotword recognition model consumes. 1459 * <br> If {@link isHotwordLookbackStream(boolean)} is true, this will return false 1460 * <br> See {@link Builder#setRequestHotwordStream(boolean)} 1461 * @return true if AudioRecord produces hotword content, false otherwise 1462 **/ 1463 @SystemApi isHotwordStream()1464 public boolean isHotwordStream() { 1465 return ((mHalInputFlags & (1 << AudioInputFlags.HOTWORD_TAP)) != 0 && 1466 (mHalInputFlags & (1 << AudioInputFlags.HW_LOOKBACK)) == 0); 1467 } 1468 1469 /** 1470 * @hide 1471 * Returns whether the AudioRecord object produces the same type of audio content that 1472 * the hotword recognition model consumes, and includes capture content from prior to 1473 * stream open. 1474 * <br> See {@link Builder#setRequestHotwordLookbackStream(boolean)} 1475 * @return true if AudioRecord produces hotword capture content from 1476 * prior to stream open, false otherwise 1477 **/ 1478 @SystemApi isHotwordLookbackStream()1479 public boolean isHotwordLookbackStream() { 1480 return ((mHalInputFlags & (1 << AudioInputFlags.HW_LOOKBACK)) != 0); 1481 } 1482 1483 1484 //--------------------------------------------------------- 1485 // Transport control methods 1486 //-------------------- 1487 /** 1488 * Starts recording from the AudioRecord instance. 1489 * @throws IllegalStateException 1490 */ startRecording()1491 public void startRecording() 1492 throws IllegalStateException { 1493 if (mState != STATE_INITIALIZED) { 1494 throw new IllegalStateException("startRecording() called on an " 1495 + "uninitialized AudioRecord."); 1496 } 1497 1498 // start recording 1499 synchronized(mRecordingStateLock) { 1500 if (native_start(MediaSyncEvent.SYNC_EVENT_NONE, 0) == SUCCESS) { 1501 handleFullVolumeRec(true); 1502 mRecordingState = RECORDSTATE_RECORDING; 1503 } 1504 } 1505 } 1506 1507 /** 1508 * Starts recording from the AudioRecord instance when the specified synchronization event 1509 * occurs on the specified audio session. 1510 * @throws IllegalStateException 1511 * @param syncEvent event that triggers the capture. 1512 * @see MediaSyncEvent 1513 */ startRecording(MediaSyncEvent syncEvent)1514 public void startRecording(MediaSyncEvent syncEvent) 1515 throws IllegalStateException { 1516 if (mState != STATE_INITIALIZED) { 1517 throw new IllegalStateException("startRecording() called on an " 1518 + "uninitialized AudioRecord."); 1519 } 1520 1521 // start recording 1522 synchronized(mRecordingStateLock) { 1523 if (native_start(syncEvent.getType(), syncEvent.getAudioSessionId()) == SUCCESS) { 1524 handleFullVolumeRec(true); 1525 mRecordingState = RECORDSTATE_RECORDING; 1526 } 1527 } 1528 } 1529 1530 /** 1531 * Stops recording. 1532 * @throws IllegalStateException 1533 */ stop()1534 public void stop() 1535 throws IllegalStateException { 1536 if (mState != STATE_INITIALIZED) { 1537 throw new IllegalStateException("stop() called on an uninitialized AudioRecord."); 1538 } 1539 1540 // stop recording 1541 synchronized(mRecordingStateLock) { 1542 handleFullVolumeRec(false); 1543 native_stop(); 1544 mRecordingState = RECORDSTATE_STOPPED; 1545 } 1546 } 1547 1548 private final IBinder mICallBack = new Binder(); handleFullVolumeRec(boolean starting)1549 private void handleFullVolumeRec(boolean starting) { 1550 if (!mIsSubmixFullVolume) { 1551 return; 1552 } 1553 final IBinder b = ServiceManager.getService(android.content.Context.AUDIO_SERVICE); 1554 final IAudioService ias = IAudioService.Stub.asInterface(b); 1555 try { 1556 ias.forceRemoteSubmixFullVolume(starting, mICallBack); 1557 } catch (RemoteException e) { 1558 Log.e(TAG, "Error talking to AudioService when handling full submix volume", e); 1559 } 1560 } 1561 1562 //--------------------------------------------------------- 1563 // Audio data supply 1564 //-------------------- 1565 /** 1566 * Reads audio data from the audio hardware for recording into a byte array. 1567 * The format specified in the AudioRecord constructor should be 1568 * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array. 1569 * @param audioData the array to which the recorded audio data is written. 1570 * @param offsetInBytes index in audioData from which the data is written expressed in bytes. 1571 * @param sizeInBytes the number of requested bytes. 1572 * @return zero or the positive number of bytes that were read, or one of the following 1573 * error codes. The number of bytes will not exceed sizeInBytes. 1574 * <ul> 1575 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1576 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1577 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1578 * needs to be recreated. The dead object error code is not returned if some data was 1579 * successfully transferred. In this case, the error is returned at the next read()</li> 1580 * <li>{@link #ERROR} in case of other error</li> 1581 * </ul> 1582 */ read(@onNull byte[] audioData, int offsetInBytes, int sizeInBytes)1583 public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) { 1584 return read(audioData, offsetInBytes, sizeInBytes, READ_BLOCKING); 1585 } 1586 1587 /** 1588 * Reads audio data from the audio hardware for recording into a byte array. 1589 * The format specified in the AudioRecord constructor should be 1590 * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array. 1591 * The format can be {@link AudioFormat#ENCODING_PCM_16BIT}, but this is deprecated. 1592 * @param audioData the array to which the recorded audio data is written. 1593 * @param offsetInBytes index in audioData to which the data is written expressed in bytes. 1594 * Must not be negative, or cause the data access to go out of bounds of the array. 1595 * @param sizeInBytes the number of requested bytes. 1596 * Must not be negative, or cause the data access to go out of bounds of the array. 1597 * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}. 1598 * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data 1599 * is read. 1600 * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after 1601 * reading as much audio data as possible without blocking. 1602 * @return zero or the positive number of bytes that were read, or one of the following 1603 * error codes. The number of bytes will be a multiple of the frame size in bytes 1604 * not to exceed sizeInBytes. 1605 * <ul> 1606 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1607 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1608 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1609 * needs to be recreated. The dead object error code is not returned if some data was 1610 * successfully transferred. In this case, the error is returned at the next read()</li> 1611 * <li>{@link #ERROR} in case of other error</li> 1612 * </ul> 1613 */ read(@onNull byte[] audioData, int offsetInBytes, int sizeInBytes, @ReadMode int readMode)1614 public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes, 1615 @ReadMode int readMode) { 1616 // Note: we allow reads of extended integers into a byte array. 1617 if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) { 1618 return ERROR_INVALID_OPERATION; 1619 } 1620 1621 if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) { 1622 Log.e(TAG, "AudioRecord.read() called with invalid blocking mode"); 1623 return ERROR_BAD_VALUE; 1624 } 1625 1626 if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0) 1627 || (offsetInBytes + sizeInBytes < 0) // detect integer overflow 1628 || (offsetInBytes + sizeInBytes > audioData.length)) { 1629 return ERROR_BAD_VALUE; 1630 } 1631 1632 return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes, 1633 readMode == READ_BLOCKING); 1634 } 1635 1636 /** 1637 * Reads audio data from the audio hardware for recording into a short array. 1638 * The format specified in the AudioRecord constructor should be 1639 * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array. 1640 * @param audioData the array to which the recorded audio data is written. 1641 * @param offsetInShorts index in audioData to which the data is written expressed in shorts. 1642 * Must not be negative, or cause the data access to go out of bounds of the array. 1643 * @param sizeInShorts the number of requested shorts. 1644 * Must not be negative, or cause the data access to go out of bounds of the array. 1645 * @return zero or the positive number of shorts that were read, or one of the following 1646 * error codes. The number of shorts will be a multiple of the channel count not to exceed 1647 * sizeInShorts. 1648 * <ul> 1649 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1650 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1651 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1652 * needs to be recreated. The dead object error code is not returned if some data was 1653 * successfully transferred. In this case, the error is returned at the next read()</li> 1654 * <li>{@link #ERROR} in case of other error</li> 1655 * </ul> 1656 */ read(@onNull short[] audioData, int offsetInShorts, int sizeInShorts)1657 public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) { 1658 return read(audioData, offsetInShorts, sizeInShorts, READ_BLOCKING); 1659 } 1660 1661 /** 1662 * Reads audio data from the audio hardware for recording into a short array. 1663 * The format specified in the AudioRecord constructor should be 1664 * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array. 1665 * @param audioData the array to which the recorded audio data is written. 1666 * @param offsetInShorts index in audioData from which the data is written expressed in shorts. 1667 * Must not be negative, or cause the data access to go out of bounds of the array. 1668 * @param sizeInShorts the number of requested shorts. 1669 * Must not be negative, or cause the data access to go out of bounds of the array. 1670 * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}. 1671 * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data 1672 * is read. 1673 * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after 1674 * reading as much audio data as possible without blocking. 1675 * @return zero or the positive number of shorts that were read, or one of the following 1676 * error codes. The number of shorts will be a multiple of the channel count not to exceed 1677 * sizeInShorts. 1678 * <ul> 1679 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1680 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1681 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1682 * needs to be recreated. The dead object error code is not returned if some data was 1683 * successfully transferred. In this case, the error is returned at the next read()</li> 1684 * <li>{@link #ERROR} in case of other error</li> 1685 * </ul> 1686 */ read(@onNull short[] audioData, int offsetInShorts, int sizeInShorts, @ReadMode int readMode)1687 public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts, 1688 @ReadMode int readMode) { 1689 if (mState != STATE_INITIALIZED 1690 || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT 1691 // use ByteBuffer instead for later encodings 1692 || mAudioFormat > AudioFormat.ENCODING_LEGACY_SHORT_ARRAY_THRESHOLD) { 1693 return ERROR_INVALID_OPERATION; 1694 } 1695 1696 if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) { 1697 Log.e(TAG, "AudioRecord.read() called with invalid blocking mode"); 1698 return ERROR_BAD_VALUE; 1699 } 1700 1701 if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0) 1702 || (offsetInShorts + sizeInShorts < 0) // detect integer overflow 1703 || (offsetInShorts + sizeInShorts > audioData.length)) { 1704 return ERROR_BAD_VALUE; 1705 } 1706 return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts, 1707 readMode == READ_BLOCKING); 1708 } 1709 1710 /** 1711 * Reads audio data from the audio hardware for recording into a float array. 1712 * The format specified in the AudioRecord constructor should be 1713 * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array. 1714 * @param audioData the array to which the recorded audio data is written. 1715 * @param offsetInFloats index in audioData from which the data is written. 1716 * Must not be negative, or cause the data access to go out of bounds of the array. 1717 * @param sizeInFloats the number of requested floats. 1718 * Must not be negative, or cause the data access to go out of bounds of the array. 1719 * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}. 1720 * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data 1721 * is read. 1722 * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after 1723 * reading as much audio data as possible without blocking. 1724 * @return zero or the positive number of floats that were read, or one of the following 1725 * error codes. The number of floats will be a multiple of the channel count not to exceed 1726 * sizeInFloats. 1727 * <ul> 1728 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1729 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1730 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1731 * needs to be recreated. The dead object error code is not returned if some data was 1732 * successfully transferred. In this case, the error is returned at the next read()</li> 1733 * <li>{@link #ERROR} in case of other error</li> 1734 * </ul> 1735 */ read(@onNull float[] audioData, int offsetInFloats, int sizeInFloats, @ReadMode int readMode)1736 public int read(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats, 1737 @ReadMode int readMode) { 1738 if (mState == STATE_UNINITIALIZED) { 1739 Log.e(TAG, "AudioRecord.read() called in invalid state STATE_UNINITIALIZED"); 1740 return ERROR_INVALID_OPERATION; 1741 } 1742 1743 if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) { 1744 Log.e(TAG, "AudioRecord.read(float[] ...) requires format ENCODING_PCM_FLOAT"); 1745 return ERROR_INVALID_OPERATION; 1746 } 1747 1748 if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) { 1749 Log.e(TAG, "AudioRecord.read() called with invalid blocking mode"); 1750 return ERROR_BAD_VALUE; 1751 } 1752 1753 if ((audioData == null) || (offsetInFloats < 0) || (sizeInFloats < 0) 1754 || (offsetInFloats + sizeInFloats < 0) // detect integer overflow 1755 || (offsetInFloats + sizeInFloats > audioData.length)) { 1756 return ERROR_BAD_VALUE; 1757 } 1758 1759 return native_read_in_float_array(audioData, offsetInFloats, sizeInFloats, 1760 readMode == READ_BLOCKING); 1761 } 1762 1763 /** 1764 * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer 1765 * is not a direct buffer, this method will always return 0. 1766 * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is 1767 * unchanged after a call to this method. 1768 * The representation of the data in the buffer will depend on the format specified in 1769 * the AudioRecord constructor, and will be native endian. 1770 * @param audioBuffer the direct buffer to which the recorded audio data is written. 1771 * Data is written to audioBuffer.position(). 1772 * @param sizeInBytes the number of requested bytes. It is recommended but not enforced 1773 * that the number of bytes requested be a multiple of the frame size (sample size in 1774 * bytes multiplied by the channel count). 1775 * @return zero or the positive number of bytes that were read, or one of the following 1776 * error codes. The number of bytes will not exceed sizeInBytes and will be truncated to be 1777 * a multiple of the frame size. 1778 * <ul> 1779 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1780 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1781 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1782 * needs to be recreated. The dead object error code is not returned if some data was 1783 * successfully transferred. In this case, the error is returned at the next read()</li> 1784 * <li>{@link #ERROR} in case of other error</li> 1785 * </ul> 1786 */ read(@onNull ByteBuffer audioBuffer, int sizeInBytes)1787 public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes) { 1788 return read(audioBuffer, sizeInBytes, READ_BLOCKING); 1789 } 1790 1791 /** 1792 * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer 1793 * is not a direct buffer, this method will always return 0. 1794 * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is 1795 * unchanged after a call to this method. 1796 * The representation of the data in the buffer will depend on the format specified in 1797 * the AudioRecord constructor, and will be native endian. 1798 * @param audioBuffer the direct buffer to which the recorded audio data is written. 1799 * Data is written to audioBuffer.position(). 1800 * @param sizeInBytes the number of requested bytes. It is recommended but not enforced 1801 * that the number of bytes requested be a multiple of the frame size (sample size in 1802 * bytes multiplied by the channel count). 1803 * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}. 1804 * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data 1805 * is read. 1806 * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after 1807 * reading as much audio data as possible without blocking. 1808 * @return zero or the positive number of bytes that were read, or one of the following 1809 * error codes. The number of bytes will not exceed sizeInBytes and will be truncated to be 1810 * a multiple of the frame size. 1811 * <ul> 1812 * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li> 1813 * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li> 1814 * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and 1815 * needs to be recreated. The dead object error code is not returned if some data was 1816 * successfully transferred. In this case, the error is returned at the next read()</li> 1817 * <li>{@link #ERROR} in case of other error</li> 1818 * </ul> 1819 */ read(@onNull ByteBuffer audioBuffer, int sizeInBytes, @ReadMode int readMode)1820 public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes, @ReadMode int readMode) { 1821 if (mState != STATE_INITIALIZED) { 1822 return ERROR_INVALID_OPERATION; 1823 } 1824 1825 if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) { 1826 Log.e(TAG, "AudioRecord.read() called with invalid blocking mode"); 1827 return ERROR_BAD_VALUE; 1828 } 1829 1830 if ( (audioBuffer == null) || (sizeInBytes < 0) ) { 1831 return ERROR_BAD_VALUE; 1832 } 1833 1834 return native_read_in_direct_buffer(audioBuffer, sizeInBytes, readMode == READ_BLOCKING); 1835 } 1836 1837 /** 1838 * Return Metrics data about the current AudioTrack instance. 1839 * 1840 * @return a {@link PersistableBundle} containing the set of attributes and values 1841 * available for the media being handled by this instance of AudioRecord 1842 * The attributes are descibed in {@link MetricsConstants}. 1843 * 1844 * Additional vendor-specific fields may also be present in 1845 * the return value. 1846 */ getMetrics()1847 public PersistableBundle getMetrics() { 1848 PersistableBundle bundle = native_getMetrics(); 1849 return bundle; 1850 } 1851 native_getMetrics()1852 private native PersistableBundle native_getMetrics(); 1853 1854 //-------------------------------------------------------------------------- 1855 // Initialization / configuration 1856 //-------------------- 1857 /** 1858 * Sets the listener the AudioRecord notifies when a previously set marker is reached or 1859 * for each periodic record head position update. 1860 * @param listener 1861 */ setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener)1862 public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener) { 1863 setRecordPositionUpdateListener(listener, null); 1864 } 1865 1866 /** 1867 * Sets the listener the AudioRecord notifies when a previously set marker is reached or 1868 * for each periodic record head position update. 1869 * Use this method to receive AudioRecord events in the Handler associated with another 1870 * thread than the one in which you created the AudioRecord instance. 1871 * @param listener 1872 * @param handler the Handler that will receive the event notification messages. 1873 */ setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener, Handler handler)1874 public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener, 1875 Handler handler) { 1876 synchronized (mPositionListenerLock) { 1877 1878 mPositionListener = listener; 1879 1880 if (listener != null) { 1881 if (handler != null) { 1882 mEventHandler = new NativeEventHandler(this, handler.getLooper()); 1883 } else { 1884 // no given handler, use the looper the AudioRecord was created in 1885 mEventHandler = new NativeEventHandler(this, mInitializationLooper); 1886 } 1887 } else { 1888 mEventHandler = null; 1889 } 1890 } 1891 1892 } 1893 1894 1895 /** 1896 * Sets the marker position at which the listener is called, if set with 1897 * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or 1898 * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)}. 1899 * @param markerInFrames marker position expressed in frames 1900 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE}, 1901 * {@link #ERROR_INVALID_OPERATION} 1902 */ setNotificationMarkerPosition(int markerInFrames)1903 public int setNotificationMarkerPosition(int markerInFrames) { 1904 if (mState == STATE_UNINITIALIZED) { 1905 return ERROR_INVALID_OPERATION; 1906 } 1907 return native_set_marker_pos(markerInFrames); 1908 } 1909 1910 /** 1911 * Internal API of getRoutedDevices(). We should not call flag APIs internally. 1912 */ getRoutedDevicesInternal()1913 private @NonNull List<AudioDeviceInfo> getRoutedDevicesInternal() { 1914 List<AudioDeviceInfo> audioDeviceInfos = new ArrayList<AudioDeviceInfo>(); 1915 final int[] deviceIds = native_getRoutedDeviceIds(); 1916 if (deviceIds == null || deviceIds.length == 0) { 1917 return audioDeviceInfos; 1918 } 1919 1920 for (int i = 0; i < deviceIds.length; i++) { 1921 AudioDeviceInfo audioDeviceInfo = AudioManager.getDeviceForPortId(deviceIds[i], 1922 AudioManager.GET_DEVICES_INPUTS); 1923 if (audioDeviceInfo != null) { 1924 audioDeviceInfos.add(audioDeviceInfo); 1925 } 1926 } 1927 return audioDeviceInfos; 1928 } 1929 1930 /** 1931 * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioRecord. 1932 * Note: The query is only valid if the AudioRecord is currently recording. If it is not, 1933 * <code>getRoutedDevice()</code> will return null. 1934 */ 1935 @Override getRoutedDevice()1936 public AudioDeviceInfo getRoutedDevice() { 1937 final List<AudioDeviceInfo> audioDeviceInfos = getRoutedDevicesInternal(); 1938 if (audioDeviceInfos.isEmpty()) { 1939 return null; 1940 } 1941 return audioDeviceInfos.get(0); 1942 } 1943 1944 /** 1945 * Returns a List of {@link AudioDeviceInfo} identifying the current routing of this 1946 * AudioRecord. 1947 * Note: The query is only valid if the AudioRecord is currently playing. If it is not, 1948 * <code>getRoutedDevices()</code> will return an empty list. 1949 */ 1950 @Override 1951 @FlaggedApi(FLAG_ROUTED_DEVICE_IDS) getRoutedDevices()1952 public @NonNull List<AudioDeviceInfo> getRoutedDevices() { 1953 return getRoutedDevicesInternal(); 1954 } 1955 1956 /** 1957 * Must match the native definition in frameworks/av/service/audioflinger/Audioflinger.h. 1958 */ 1959 private static final long MAX_SHARED_AUDIO_HISTORY_MS = 5000; 1960 1961 /** 1962 * @hide 1963 * returns the maximum duration in milliseconds of the audio history that can be requested 1964 * to be made available to other clients using the same session with 1965 * {@Link Builder#setMaxSharedAudioHistory(long)}. 1966 */ 1967 @SystemApi getMaxSharedAudioHistoryMillis()1968 public static long getMaxSharedAudioHistoryMillis() { 1969 return MAX_SHARED_AUDIO_HISTORY_MS; 1970 } 1971 1972 /** 1973 * @hide 1974 * 1975 * A privileged app with permission CAPTURE_AUDIO_HOTWORD can share part of its recent 1976 * capture history on a given AudioRecord with the following steps: 1977 * 1) Specify the maximum time in the past that will be available for other apps by calling 1978 * {@link Builder#setMaxSharedAudioHistoryMillis(long)} when creating the AudioRecord. 1979 * 2) Start recording and determine where the other app should start capturing in the past. 1980 * 3) Call this method with the package name of the app the history will be shared with and 1981 * the intended start time for this app's capture relative to this AudioRecord's start time. 1982 * 4) Communicate the {@link MediaSyncEvent} returned by this method to the other app. 1983 * 5) The other app will use the MediaSyncEvent when creating its AudioRecord with 1984 * {@link Builder#setSharedAudioEvent(MediaSyncEvent). 1985 * 6) Only after the other app has started capturing can this app stop capturing and 1986 * release its AudioRecord. 1987 * This method is intended to be called only once: if called multiple times, only the last 1988 * request will be honored. 1989 * The implementation is "best effort": if the specified start time if too far in the past 1990 * compared to the max available history specified, the start time will be adjusted to the 1991 * start of the available history. 1992 * @param sharedPackage the package the history will be shared with 1993 * @param startFromMillis the start time, relative to the initial start time of this 1994 * AudioRecord, at which the other AudioRecord will start. 1995 * @return a {@link MediaSyncEvent} to be communicated to the app this AudioRecord's audio 1996 * history will be shared with. 1997 * @throws IllegalArgumentException 1998 * @throws SecurityException 1999 */ 2000 @SystemApi 2001 @RequiresPermission(android.Manifest.permission.CAPTURE_AUDIO_HOTWORD) shareAudioHistory(@onNull String sharedPackage, @IntRange(from = 0) long startFromMillis)2002 @NonNull public MediaSyncEvent shareAudioHistory(@NonNull String sharedPackage, 2003 @IntRange(from = 0) long startFromMillis) { 2004 Objects.requireNonNull(sharedPackage); 2005 if (startFromMillis < 0) { 2006 throw new IllegalArgumentException("Illegal negative sharedAudioHistoryMs argument"); 2007 } 2008 int status = native_shareAudioHistory(sharedPackage, startFromMillis); 2009 if (status == AudioSystem.BAD_VALUE) { 2010 throw new IllegalArgumentException("Illegal sharedAudioHistoryMs argument"); 2011 } else if (status == AudioSystem.PERMISSION_DENIED) { 2012 throw new SecurityException("permission CAPTURE_AUDIO_HOTWORD required"); 2013 } 2014 MediaSyncEvent event = 2015 MediaSyncEvent.createEvent(MediaSyncEvent.SYNC_EVENT_SHARE_AUDIO_HISTORY); 2016 event.setAudioSessionId(mSessionId); 2017 return event; 2018 } 2019 2020 /* 2021 * Call BEFORE adding a routing callback handler. 2022 */ 2023 @GuardedBy("mRoutingChangeListeners") testEnableNativeRoutingCallbacksLocked()2024 private void testEnableNativeRoutingCallbacksLocked() { 2025 if (mRoutingChangeListeners.size() == 0) { 2026 native_enableDeviceCallback(); 2027 } 2028 } 2029 2030 /* 2031 * Call AFTER removing a routing callback handler. 2032 */ 2033 @GuardedBy("mRoutingChangeListeners") testDisableNativeRoutingCallbacksLocked()2034 private void testDisableNativeRoutingCallbacksLocked() { 2035 if (mRoutingChangeListeners.size() == 0) { 2036 native_disableDeviceCallback(); 2037 } 2038 } 2039 2040 //-------------------------------------------------------------------------- 2041 // (Re)Routing Info 2042 //-------------------- 2043 /** 2044 * The list of AudioRouting.OnRoutingChangedListener interfaces added (with 2045 * {@link AudioRecord#addOnRoutingChangedListener} by an app to receive 2046 * (re)routing notifications. 2047 */ 2048 @GuardedBy("mRoutingChangeListeners") 2049 private ArrayMap<AudioRouting.OnRoutingChangedListener, 2050 NativeRoutingEventHandlerDelegate> mRoutingChangeListeners = new ArrayMap<>(); 2051 2052 /** 2053 * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of 2054 * routing changes on this AudioRecord. 2055 * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive 2056 * notifications of rerouting events. 2057 * @param handler Specifies the {@link Handler} object for the thread on which to execute 2058 * the callback. If <code>null</code>, the {@link Handler} associated with the main 2059 * {@link Looper} will be used. 2060 */ 2061 @Override addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener, android.os.Handler handler)2062 public void addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener, 2063 android.os.Handler handler) { 2064 synchronized (mRoutingChangeListeners) { 2065 if (listener != null && !mRoutingChangeListeners.containsKey(listener)) { 2066 testEnableNativeRoutingCallbacksLocked(); 2067 mRoutingChangeListeners.put( 2068 listener, new NativeRoutingEventHandlerDelegate(this, listener, 2069 handler != null ? handler : new Handler(mInitializationLooper))); 2070 } 2071 } 2072 } 2073 2074 /** 2075 * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added 2076 * to receive rerouting notifications. 2077 * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface 2078 * to remove. 2079 */ 2080 @Override removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener)2081 public void removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener) { 2082 synchronized (mRoutingChangeListeners) { 2083 if (mRoutingChangeListeners.containsKey(listener)) { 2084 mRoutingChangeListeners.remove(listener); 2085 testDisableNativeRoutingCallbacksLocked(); 2086 } 2087 } 2088 } 2089 2090 //-------------------------------------------------------------------------- 2091 // (Re)Routing Info 2092 //-------------------- 2093 /** 2094 * Defines the interface by which applications can receive notifications of 2095 * routing changes for the associated {@link AudioRecord}. 2096 * 2097 * @deprecated users should switch to the general purpose 2098 * {@link AudioRouting.OnRoutingChangedListener} class instead. 2099 */ 2100 @Deprecated 2101 public interface OnRoutingChangedListener extends AudioRouting.OnRoutingChangedListener { 2102 /** 2103 * Called when the routing of an AudioRecord changes from either and 2104 * explicit or policy rerouting. Use {@link #getRoutedDevice()} to 2105 * retrieve the newly routed-from device. 2106 */ onRoutingChanged(AudioRecord audioRecord)2107 public void onRoutingChanged(AudioRecord audioRecord); 2108 2109 @Override onRoutingChanged(AudioRouting router)2110 default public void onRoutingChanged(AudioRouting router) { 2111 if (router instanceof AudioRecord) { 2112 onRoutingChanged((AudioRecord) router); 2113 } 2114 } 2115 } 2116 2117 /** 2118 * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes 2119 * on this AudioRecord. 2120 * @param listener The {@link OnRoutingChangedListener} interface to receive notifications 2121 * of rerouting events. 2122 * @param handler Specifies the {@link Handler} object for the thread on which to execute 2123 * the callback. If <code>null</code>, the {@link Handler} associated with the main 2124 * {@link Looper} will be used. 2125 * @deprecated users should switch to the general purpose 2126 * {@link AudioRouting.OnRoutingChangedListener} class instead. 2127 */ 2128 @Deprecated addOnRoutingChangedListener(OnRoutingChangedListener listener, android.os.Handler handler)2129 public void addOnRoutingChangedListener(OnRoutingChangedListener listener, 2130 android.os.Handler handler) { 2131 addOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener, handler); 2132 } 2133 2134 /** 2135 * Removes an {@link OnRoutingChangedListener} which has been previously added 2136 * to receive rerouting notifications. 2137 * @param listener The previously added {@link OnRoutingChangedListener} interface to remove. 2138 * @deprecated users should switch to the general purpose 2139 * {@link AudioRouting.OnRoutingChangedListener} class instead. 2140 */ 2141 @Deprecated removeOnRoutingChangedListener(OnRoutingChangedListener listener)2142 public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) { 2143 removeOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener); 2144 } 2145 2146 /** 2147 * Sends device list change notification to all listeners. 2148 */ broadcastRoutingChange()2149 private void broadcastRoutingChange() { 2150 AudioManager.resetAudioPortGeneration(); 2151 synchronized (mRoutingChangeListeners) { 2152 for (NativeRoutingEventHandlerDelegate delegate : mRoutingChangeListeners.values()) { 2153 delegate.notifyClient(); 2154 } 2155 } 2156 } 2157 2158 /** 2159 * Sets the period at which the listener is called, if set with 2160 * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or 2161 * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)}. 2162 * It is possible for notifications to be lost if the period is too small. 2163 * @param periodInFrames update period expressed in frames 2164 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION} 2165 */ setPositionNotificationPeriod(int periodInFrames)2166 public int setPositionNotificationPeriod(int periodInFrames) { 2167 if (mState == STATE_UNINITIALIZED) { 2168 return ERROR_INVALID_OPERATION; 2169 } 2170 return native_set_pos_update_period(periodInFrames); 2171 } 2172 2173 //-------------------------------------------------------------------------- 2174 // Explicit Routing 2175 //-------------------- 2176 private AudioDeviceInfo mPreferredDevice = null; 2177 2178 /** 2179 * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route 2180 * the input to this AudioRecord. 2181 * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio source. 2182 * If deviceInfo is null, default routing is restored. 2183 * @return true if successful, false if the specified {@link AudioDeviceInfo} is non-null and 2184 * does not correspond to a valid audio input device. 2185 */ 2186 @Override setPreferredDevice(AudioDeviceInfo deviceInfo)2187 public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) { 2188 // Do some validation.... 2189 if (deviceInfo != null && !deviceInfo.isSource()) { 2190 return false; 2191 } 2192 2193 int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0; 2194 boolean status = native_setInputDevice(preferredDeviceId); 2195 if (status == true) { 2196 synchronized (this) { 2197 mPreferredDevice = deviceInfo; 2198 } 2199 } 2200 return status; 2201 } 2202 2203 /** 2204 * Returns the selected input specified by {@link #setPreferredDevice}. Note that this 2205 * is not guarenteed to correspond to the actual device being used for recording. 2206 */ 2207 @Override getPreferredDevice()2208 public AudioDeviceInfo getPreferredDevice() { 2209 synchronized (this) { 2210 return mPreferredDevice; 2211 } 2212 } 2213 2214 //-------------------------------------------------------------------------- 2215 // Microphone information 2216 //-------------------- 2217 /** 2218 * Returns a lists of {@link MicrophoneInfo} representing the active microphones. 2219 * By querying channel mapping for each active microphone, developer can know how 2220 * the microphone is used by each channels or a capture stream. 2221 * Note that the information about the active microphones may change during a recording. 2222 * See {@link AudioManager#registerAudioDeviceCallback} to be notified of changes 2223 * in the audio devices, querying the active microphones then will return the latest 2224 * information. 2225 * 2226 * @return a lists of {@link MicrophoneInfo} representing the active microphones. 2227 * @throws IOException if an error occurs 2228 */ getActiveMicrophones()2229 public List<MicrophoneInfo> getActiveMicrophones() throws IOException { 2230 ArrayList<MicrophoneInfo> activeMicrophones = new ArrayList<>(); 2231 int status = native_get_active_microphones(activeMicrophones); 2232 if (status != AudioManager.SUCCESS) { 2233 if (status != AudioManager.ERROR_INVALID_OPERATION) { 2234 Log.e(TAG, "getActiveMicrophones failed:" + status); 2235 } 2236 Log.i(TAG, "getActiveMicrophones failed, fallback on routed device info"); 2237 } 2238 AudioManager.setPortIdForMicrophones(activeMicrophones); 2239 2240 // Use routed device when there is not information returned by hal. 2241 if (activeMicrophones.size() == 0) { 2242 AudioDeviceInfo device = getRoutedDevice(); 2243 if (device != null) { 2244 MicrophoneInfo microphone = AudioManager.microphoneInfoFromAudioDeviceInfo(device); 2245 ArrayList<Pair<Integer, Integer>> channelMapping = new ArrayList<>(); 2246 for (int i = 0; i < mChannelCount; i++) { 2247 channelMapping.add(new Pair(i, MicrophoneInfo.CHANNEL_MAPPING_DIRECT)); 2248 } 2249 microphone.setChannelMapping(channelMapping); 2250 activeMicrophones.add(microphone); 2251 } 2252 } 2253 return activeMicrophones; 2254 } 2255 2256 //-------------------------------------------------------------------------- 2257 // Implementation of AudioRecordingMonitor interface 2258 //-------------------- 2259 2260 AudioRecordingMonitorImpl mRecordingInfoImpl = 2261 new AudioRecordingMonitorImpl((AudioRecordingMonitorClient) this); 2262 2263 /** 2264 * Register a callback to be notified of audio capture changes via a 2265 * {@link AudioManager.AudioRecordingCallback}. A callback is received when the capture path 2266 * configuration changes (pre-processing, format, sampling rate...) or capture is 2267 * silenced/unsilenced by the system. 2268 * @param executor {@link Executor} to handle the callbacks. 2269 * @param cb non-null callback to register 2270 */ registerAudioRecordingCallback(@onNull @allbackExecutor Executor executor, @NonNull AudioManager.AudioRecordingCallback cb)2271 public void registerAudioRecordingCallback(@NonNull @CallbackExecutor Executor executor, 2272 @NonNull AudioManager.AudioRecordingCallback cb) { 2273 mRecordingInfoImpl.registerAudioRecordingCallback(executor, cb); 2274 } 2275 2276 /** 2277 * Unregister an audio recording callback previously registered with 2278 * {@link #registerAudioRecordingCallback(Executor, AudioManager.AudioRecordingCallback)}. 2279 * @param cb non-null callback to unregister 2280 */ unregisterAudioRecordingCallback(@onNull AudioManager.AudioRecordingCallback cb)2281 public void unregisterAudioRecordingCallback(@NonNull AudioManager.AudioRecordingCallback cb) { 2282 mRecordingInfoImpl.unregisterAudioRecordingCallback(cb); 2283 } 2284 2285 /** 2286 * Returns the current active audio recording for this audio recorder. 2287 * @return a valid {@link AudioRecordingConfiguration} if this recorder is active 2288 * or null otherwise. 2289 * @see AudioRecordingConfiguration 2290 */ getActiveRecordingConfiguration()2291 public @Nullable AudioRecordingConfiguration getActiveRecordingConfiguration() { 2292 return mRecordingInfoImpl.getActiveRecordingConfiguration(); 2293 } 2294 2295 //--------------------------------------------------------- 2296 // Implementation of AudioRecordingMonitorClient interface 2297 //-------------------- 2298 /** 2299 * @hide 2300 */ getPortId()2301 public int getPortId() { 2302 if (mNativeAudioRecordHandle == 0) { 2303 return 0; 2304 } 2305 try { 2306 return native_getPortId(); 2307 } catch (IllegalStateException e) { 2308 return 0; 2309 } 2310 } 2311 2312 //-------------------------------------------------------------------------- 2313 // MicrophoneDirection 2314 //-------------------- 2315 /** 2316 * Specifies the logical microphone (for processing). Applications can use this to specify 2317 * which side of the device to optimize capture from. Typically used in conjunction with 2318 * the camera capturing video. 2319 * 2320 * @return true if sucessful. 2321 */ setPreferredMicrophoneDirection(@irectionMode int direction)2322 public boolean setPreferredMicrophoneDirection(@DirectionMode int direction) { 2323 return native_set_preferred_microphone_direction(direction) == AudioSystem.SUCCESS; 2324 } 2325 2326 /** 2327 * Specifies the zoom factor (i.e. the field dimension) for the selected microphone 2328 * (for processing). The selected microphone is determined by the use-case for the stream. 2329 * 2330 * @param zoom the desired field dimension of microphone capture. Range is from -1 (wide angle), 2331 * though 0 (no zoom) to 1 (maximum zoom). 2332 * @return true if sucessful. 2333 */ setPreferredMicrophoneFieldDimension( @loatRangefrom = -1.0, to = 1.0) float zoom)2334 public boolean setPreferredMicrophoneFieldDimension( 2335 @FloatRange(from = -1.0, to = 1.0) float zoom) { 2336 Preconditions.checkArgument( 2337 zoom >= -1 && zoom <= 1, "Argument must fall between -1 & 1 (inclusive)"); 2338 return native_set_preferred_microphone_field_dimension(zoom) == AudioSystem.SUCCESS; 2339 } 2340 2341 /** 2342 * Sets a {@link LogSessionId} instance to this AudioRecord for metrics collection. 2343 * 2344 * @param logSessionId a {@link LogSessionId} instance which is used to 2345 * identify this object to the metrics service. Proper generated 2346 * Ids must be obtained from the Java metrics service and should 2347 * be considered opaque. Use 2348 * {@link LogSessionId#LOG_SESSION_ID_NONE} to remove the 2349 * logSessionId association. 2350 * @throws IllegalStateException if AudioRecord not initialized. 2351 */ setLogSessionId(@onNull LogSessionId logSessionId)2352 public void setLogSessionId(@NonNull LogSessionId logSessionId) { 2353 Objects.requireNonNull(logSessionId); 2354 if (mState == STATE_UNINITIALIZED) { 2355 throw new IllegalStateException("AudioRecord not initialized"); 2356 } 2357 String stringId = logSessionId.getStringId(); 2358 native_setLogSessionId(stringId); 2359 mLogSessionId = logSessionId; 2360 } 2361 2362 /** 2363 * Returns the {@link LogSessionId}. 2364 */ 2365 @NonNull getLogSessionId()2366 public LogSessionId getLogSessionId() { 2367 return mLogSessionId; 2368 } 2369 2370 //--------------------------------------------------------- 2371 // Interface definitions 2372 //-------------------- 2373 /** 2374 * Interface definition for a callback to be invoked when an AudioRecord has 2375 * reached a notification marker set by {@link AudioRecord#setNotificationMarkerPosition(int)} 2376 * or for periodic updates on the progress of the record head, as set by 2377 * {@link AudioRecord#setPositionNotificationPeriod(int)}. 2378 */ 2379 public interface OnRecordPositionUpdateListener { 2380 /** 2381 * Called on the listener to notify it that the previously set marker has been reached 2382 * by the recording head. 2383 */ onMarkerReached(AudioRecord recorder)2384 void onMarkerReached(AudioRecord recorder); 2385 2386 /** 2387 * Called on the listener to periodically notify it that the record head has reached 2388 * a multiple of the notification period. 2389 */ onPeriodicNotification(AudioRecord recorder)2390 void onPeriodicNotification(AudioRecord recorder); 2391 } 2392 2393 2394 2395 //--------------------------------------------------------- 2396 // Inner classes 2397 //-------------------- 2398 2399 /** 2400 * Helper class to handle the forwarding of native events to the appropriate listener 2401 * (potentially) handled in a different thread 2402 */ 2403 private class NativeEventHandler extends Handler { 2404 private final AudioRecord mAudioRecord; 2405 NativeEventHandler(AudioRecord recorder, Looper looper)2406 NativeEventHandler(AudioRecord recorder, Looper looper) { 2407 super(looper); 2408 mAudioRecord = recorder; 2409 } 2410 2411 @Override handleMessage(Message msg)2412 public void handleMessage(Message msg) { 2413 OnRecordPositionUpdateListener listener = null; 2414 synchronized (mPositionListenerLock) { 2415 listener = mAudioRecord.mPositionListener; 2416 } 2417 2418 switch (msg.what) { 2419 case NATIVE_EVENT_MARKER: 2420 if (listener != null) { 2421 listener.onMarkerReached(mAudioRecord); 2422 } 2423 break; 2424 case NATIVE_EVENT_NEW_POS: 2425 if (listener != null) { 2426 listener.onPeriodicNotification(mAudioRecord); 2427 } 2428 break; 2429 default: 2430 loge("Unknown native event type: " + msg.what); 2431 break; 2432 } 2433 } 2434 } 2435 2436 //--------------------------------------------------------- 2437 // Java methods called from the native side 2438 //-------------------- 2439 @SuppressWarnings("unused") 2440 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) postEventFromNative(Object audiorecord_ref, int what, int arg1, int arg2, Object obj)2441 private static void postEventFromNative(Object audiorecord_ref, 2442 int what, int arg1, int arg2, Object obj) { 2443 //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2); 2444 AudioRecord recorder = (AudioRecord)((WeakReference)audiorecord_ref).get(); 2445 if (recorder == null) { 2446 return; 2447 } 2448 2449 if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) { 2450 recorder.broadcastRoutingChange(); 2451 return; 2452 } 2453 2454 if (recorder.mEventHandler != null) { 2455 Message m = 2456 recorder.mEventHandler.obtainMessage(what, arg1, arg2, obj); 2457 recorder.mEventHandler.sendMessage(m); 2458 } 2459 2460 } 2461 2462 2463 //--------------------------------------------------------- 2464 // Native methods called from the Java side 2465 //-------------------- 2466 2467 /** 2468 * @deprecated Use native_setup that takes an {@link AttributionSource} object 2469 * @return 2470 */ 2471 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, 2472 publicAlternatives = "{@code AudioRecord.Builder}") 2473 @Deprecated native_setup(Object audiorecordThis, Object attributes, int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, int buffSizeInBytes, int[] sessionId, String opPackageName, long nativeRecordInJavaObj, int halInputFlags)2474 private int native_setup(Object audiorecordThis, 2475 Object /*AudioAttributes*/ attributes, 2476 int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, 2477 int buffSizeInBytes, int[] sessionId, String opPackageName, 2478 long nativeRecordInJavaObj, int halInputFlags) { 2479 AttributionSource attributionSource = AttributionSource.myAttributionSource() 2480 .withPackageName(opPackageName); 2481 try (ScopedParcelState attributionSourceState = attributionSource.asScopedParcelState()) { 2482 return native_setup(audiorecordThis, attributes, sampleRate, channelMask, 2483 channelIndexMask, audioFormat, buffSizeInBytes, sessionId, 2484 attributionSourceState.getParcel(), nativeRecordInJavaObj, 0, halInputFlags); 2485 } 2486 } 2487 native_setup(Object audiorecordThis, Object attributes, int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, int buffSizeInBytes, int[] sessionId, @NonNull Parcel attributionSource, long nativeRecordInJavaObj, int maxSharedAudioHistoryMs, int halInputFlags)2488 private native int native_setup(Object audiorecordThis, 2489 Object /*AudioAttributes*/ attributes, 2490 int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, 2491 int buffSizeInBytes, int[] sessionId, @NonNull Parcel attributionSource, 2492 long nativeRecordInJavaObj, int maxSharedAudioHistoryMs, int halInputFlags); 2493 2494 // TODO remove: implementation calls directly into implementation of native_release() native_finalize()2495 private native void native_finalize(); 2496 2497 /** 2498 * @hide 2499 */ 2500 @UnsupportedAppUsage native_release()2501 public native final void native_release(); 2502 native_start(int syncEvent, int sessionId)2503 private native final int native_start(int syncEvent, int sessionId); 2504 native_stop()2505 private native final void native_stop(); 2506 native_read_in_byte_array(byte[] audioData, int offsetInBytes, int sizeInBytes, boolean isBlocking)2507 private native final int native_read_in_byte_array(byte[] audioData, 2508 int offsetInBytes, int sizeInBytes, boolean isBlocking); 2509 native_read_in_short_array(short[] audioData, int offsetInShorts, int sizeInShorts, boolean isBlocking)2510 private native final int native_read_in_short_array(short[] audioData, 2511 int offsetInShorts, int sizeInShorts, boolean isBlocking); 2512 native_read_in_float_array(float[] audioData, int offsetInFloats, int sizeInFloats, boolean isBlocking)2513 private native final int native_read_in_float_array(float[] audioData, 2514 int offsetInFloats, int sizeInFloats, boolean isBlocking); 2515 native_read_in_direct_buffer(Object jBuffer, int sizeInBytes, boolean isBlocking)2516 private native final int native_read_in_direct_buffer(Object jBuffer, 2517 int sizeInBytes, boolean isBlocking); 2518 native_get_buffer_size_in_frames()2519 private native final int native_get_buffer_size_in_frames(); 2520 native_set_marker_pos(int marker)2521 private native final int native_set_marker_pos(int marker); native_get_marker_pos()2522 private native final int native_get_marker_pos(); 2523 native_set_pos_update_period(int updatePeriod)2524 private native final int native_set_pos_update_period(int updatePeriod); native_get_pos_update_period()2525 private native final int native_get_pos_update_period(); 2526 native_get_min_buff_size( int sampleRateInHz, int channelCount, int audioFormat)2527 static private native final int native_get_min_buff_size( 2528 int sampleRateInHz, int channelCount, int audioFormat); 2529 native_setInputDevice(int deviceId)2530 private native final boolean native_setInputDevice(int deviceId); native_getRoutedDeviceIds()2531 private native int[] native_getRoutedDeviceIds(); native_enableDeviceCallback()2532 private native final void native_enableDeviceCallback(); native_disableDeviceCallback()2533 private native final void native_disableDeviceCallback(); 2534 native_get_timestamp(@onNull AudioTimestamp outTimestamp, @AudioTimestamp.Timebase int timebase)2535 private native final int native_get_timestamp(@NonNull AudioTimestamp outTimestamp, 2536 @AudioTimestamp.Timebase int timebase); 2537 native_get_active_microphones( ArrayList<MicrophoneInfo> activeMicrophones)2538 private native final int native_get_active_microphones( 2539 ArrayList<MicrophoneInfo> activeMicrophones); 2540 2541 /** 2542 * @throws IllegalStateException 2543 */ native_getPortId()2544 private native int native_getPortId(); 2545 native_set_preferred_microphone_direction(int direction)2546 private native int native_set_preferred_microphone_direction(int direction); native_set_preferred_microphone_field_dimension(float zoom)2547 private native int native_set_preferred_microphone_field_dimension(float zoom); 2548 native_setLogSessionId(@ullable String logSessionId)2549 private native void native_setLogSessionId(@Nullable String logSessionId); 2550 native_shareAudioHistory(@onNull String sharedPackage, long startFromMs)2551 private native int native_shareAudioHistory(@NonNull String sharedPackage, long startFromMs); 2552 2553 //--------------------------------------------------------- 2554 // Utility methods 2555 //------------------ 2556 logd(String msg)2557 private static void logd(String msg) { 2558 Log.d(TAG, msg); 2559 } 2560 loge(String msg)2561 private static void loge(String msg) { 2562 Log.e(TAG, msg); 2563 } 2564 2565 public static final class MetricsConstants 2566 { MetricsConstants()2567 private MetricsConstants() {} 2568 2569 // MM_PREFIX is slightly different than TAG, used to avoid cut-n-paste errors. 2570 private static final String MM_PREFIX = "android.media.audiorecord."; 2571 2572 /** 2573 * Key to extract the audio data encoding for this track 2574 * from the {@link AudioRecord#getMetrics} return value. 2575 * The value is a {@code String}. 2576 */ 2577 public static final String ENCODING = MM_PREFIX + "encoding"; 2578 2579 /** 2580 * Key to extract the source type for this track 2581 * from the {@link AudioRecord#getMetrics} return value. 2582 * The value is a {@code String}. 2583 */ 2584 public static final String SOURCE = MM_PREFIX + "source"; 2585 2586 /** 2587 * Key to extract the estimated latency through the recording pipeline 2588 * from the {@link AudioRecord#getMetrics} return value. 2589 * This is in units of milliseconds. 2590 * The value is an {@code int}. 2591 * @deprecated Not properly supported in the past. 2592 */ 2593 @Deprecated 2594 public static final String LATENCY = MM_PREFIX + "latency"; 2595 2596 /** 2597 * Key to extract the sink sample rate for this record track in Hz 2598 * from the {@link AudioRecord#getMetrics} return value. 2599 * The value is an {@code int}. 2600 */ 2601 public static final String SAMPLERATE = MM_PREFIX + "samplerate"; 2602 2603 /** 2604 * Key to extract the number of channels being recorded in this record track 2605 * from the {@link AudioRecord#getMetrics} return value. 2606 * The value is an {@code int}. 2607 */ 2608 public static final String CHANNELS = MM_PREFIX + "channels"; 2609 2610 /** 2611 * Use for testing only. Do not expose. 2612 * The native channel mask. 2613 * The value is a {@code long}. 2614 * @hide 2615 */ 2616 @TestApi 2617 public static final String CHANNEL_MASK = MM_PREFIX + "channelMask"; 2618 2619 2620 /** 2621 * Use for testing only. Do not expose. 2622 * The port id of this input port in audioserver. 2623 * The value is an {@code int}. 2624 * @hide 2625 */ 2626 @TestApi 2627 public static final String PORT_ID = MM_PREFIX + "portId"; 2628 2629 /** 2630 * Use for testing only. Do not expose. 2631 * The buffer frameCount. 2632 * The value is an {@code int}. 2633 * @hide 2634 */ 2635 @TestApi 2636 public static final String FRAME_COUNT = MM_PREFIX + "frameCount"; 2637 2638 /** 2639 * Use for testing only. Do not expose. 2640 * The actual record track attributes used. 2641 * The value is a {@code String}. 2642 * @hide 2643 */ 2644 @TestApi 2645 public static final String ATTRIBUTES = MM_PREFIX + "attributes"; 2646 2647 /** 2648 * Use for testing only. Do not expose. 2649 * The buffer frameCount 2650 * The value is a {@code double}. 2651 * @hide 2652 */ 2653 @TestApi 2654 public static final String DURATION_MS = MM_PREFIX + "durationMs"; 2655 2656 /** 2657 * Use for testing only. Do not expose. 2658 * The number of times the record track has started 2659 * The value is a {@code long}. 2660 * @hide 2661 */ 2662 @TestApi 2663 public static final String START_COUNT = MM_PREFIX + "startCount"; 2664 } 2665 } 2666