1 /* 2 * Copyright (C) 2023 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.audio.cts; 18 19 import static org.junit.Assert.assertEquals; 20 import static org.junit.Assert.assertNotNull; 21 import static org.junit.Assert.assertTrue; 22 import static org.junit.Assert.fail; 23 24 import android.Manifest; 25 import android.content.Context; 26 import android.content.pm.PackageManager; 27 import android.media.AudioAttributes; 28 import android.media.AudioDeviceInfo; 29 import android.media.AudioFormat; 30 import android.media.AudioManager; 31 import android.media.AudioRecord; 32 import android.media.AudioTimestamp; 33 import android.media.AudioTrack; 34 import android.media.audiopolicy.AudioMix; 35 import android.media.audiopolicy.AudioMixingRule; 36 import android.media.audiopolicy.AudioPolicy; 37 import android.os.Build; 38 import android.os.Looper; 39 import android.platform.test.flag.junit.CheckFlagsRule; 40 import android.platform.test.flag.junit.DeviceFlagsValueProvider; 41 import android.util.Log; 42 43 import androidx.test.platform.app.InstrumentationRegistry; 44 import androidx.test.runner.AndroidJUnit4; 45 46 import com.android.bedstead.harrier.DeviceState; 47 import com.android.bedstead.harrier.annotations.RequireNotAutomotive; 48 import com.android.compatibility.common.util.CddTest; 49 import com.android.compatibility.common.util.PropertyUtil; 50 51 import org.junit.After; 52 import org.junit.Before; 53 import org.junit.ClassRule; 54 import org.junit.Rule; 55 import org.junit.Test; 56 import org.junit.runner.RunWith; 57 58 import java.io.IOException; 59 import java.io.InputStream; 60 import java.util.Arrays; 61 import java.util.Objects; 62 import java.util.concurrent.atomic.AtomicBoolean; 63 64 @RunWith(AndroidJUnit4.class) 65 public class LoopbackPassthroughTest { 66 67 private static final String TAG = "LoopbackPassthroughTest"; 68 private Context mContext; 69 private AudioManager mAudioManager; 70 private AudioPolicy mAudioPolicy; 71 private AudioFormat mMixFormat; 72 private AudioSource mPlaybackSource; 73 private AudioSource mRecordReferenceSource; 74 private int mBytesToRead; 75 private float mBitrateInBytesPerSecond; 76 77 @ClassRule 78 @Rule 79 public static final DeviceState sDeviceState = new DeviceState(); 80 81 @Rule 82 public final CheckFlagsRule mCheckFlagsRule = DeviceFlagsValueProvider.createCheckFlagsRule(); 83 84 @Before setUp()85 public void setUp() { 86 mContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); 87 InstrumentationRegistry.getInstrumentation().getUiAutomation() 88 .adoptShellPermissionIdentity(Manifest.permission.MODIFY_AUDIO_ROUTING); 89 mAudioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE); 90 } 91 92 @After tearDown()93 public void tearDown() throws Exception { 94 if (mPlaybackSource != null) { 95 mPlaybackSource.release(); 96 } 97 if (mRecordReferenceSource != null) { 98 mRecordReferenceSource.release(); 99 } 100 if (mAudioPolicy != null) { 101 mAudioManager.unregisterAudioPolicy(mAudioPolicy); 102 mAudioPolicy = null; 103 } 104 InstrumentationRegistry.getInstrumentation() 105 .getUiAutomation().dropShellPermissionIdentity(); 106 } 107 108 private static class Assert { fail(String message)109 public static void fail(String message) { 110 Log.e(TAG, message); 111 org.junit.Assert.fail(message); 112 } 113 assertNotNull(String message, Object object)114 public static void assertNotNull(String message, Object object) { 115 if (object == null) { 116 Log.e(TAG, message); 117 } 118 org.junit.Assert.assertNotNull(message, object); 119 } 120 assertTrue(String message, boolean condition)121 public static void assertTrue(String message, boolean condition) { 122 if (!condition) { 123 Log.e(TAG, message); 124 } 125 org.junit.Assert.assertTrue(message, condition); 126 } 127 assertEquals(String message, T expected, T actual)128 public static <T> void assertEquals(String message, T expected, T actual) { 129 if (!Objects.equals(expected, actual)) { 130 Log.e(TAG, message + ". Expected " + String.valueOf(expected) + " but got " 131 + String.valueOf(actual) + "."); 132 } 133 org.junit.Assert.assertEquals(message, expected, actual); 134 } 135 } 136 137 @CddTest(requirement="5.4.3/C-1-1") 138 @Test 139 @RequireNotAutomotive(reason = "Auto uses its own policy for routing") testPcmLoopback()140 public void testPcmLoopback() { 141 if (!supportsLoopback() 142 || PropertyUtil.getVsrApiLevel() <= Build.VENDOR_API_2024_Q2) { 143 return; 144 } 145 final int NUM_BUFFERS_TO_WRITE = 32; 146 final int NUM_BUFFERS_NOT_DRAINED_TOLERANCE = 1; // Read this number of buffers less 147 // compared to the number of buffers written. 148 final int sampleRate = 48000; 149 mMixFormat = new AudioFormat.Builder() 150 .setSampleRate(sampleRate) 151 .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 152 .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO) 153 .build(); 154 mBitrateInBytesPerSecond = sampleRate * mMixFormat.getFrameSizeInBytes(); 155 mBytesToRead = (NUM_BUFFERS_TO_WRITE - NUM_BUFFERS_NOT_DRAINED_TOLERANCE) 156 * AudioTrack.getMinBufferSize(mMixFormat.getSampleRate(), 157 mMixFormat.getChannelMask(), mMixFormat.getEncoding()); 158 mPlaybackSource = new PcmAudioSource(mBytesToRead); 159 mRecordReferenceSource = new PcmAudioSource(mBytesToRead); 160 loopback(false); 161 } 162 163 @CddTest(requirement="5.4.3/C-1-1") 164 @Test 165 @RequireNotAutomotive(reason = "Auto uses its own policy for routing") testEac3JocLoopback()166 public void testEac3JocLoopback() { 167 if (!supportsLoopback() 168 || PropertyUtil.getVsrApiLevel() <= Build.VENDOR_API_2024_Q2) { 169 return; 170 } 171 final int EAC3_JOC_RESOURCE = R.raw.Living_Room_Atmos_6ch_640kbps_eac3_joc_10s; 172 mBitrateInBytesPerSecond = (float) 640000 / 8; 173 final int NUM_EAC3_JOC_FRAMES_TO_WRITE = 312; 174 final int EAC3_JOC_FRAMES_NOT_DRAINED_TOLERANCE = 2; // Read this number of frames less 175 // compared to the number of frames written. 176 // TODO: improve implementation to reduce EAC3_JOC_FRAMES_NOT_DRAINED_TOLERANCE 177 final int NUM_EAC3_JOC_FRAMES_TO_READ = 178 NUM_EAC3_JOC_FRAMES_TO_WRITE - EAC3_JOC_FRAMES_NOT_DRAINED_TOLERANCE; 179 final int EAC3_JOC_BYTES_PER_FRAME = 2560; 180 mBytesToRead = NUM_EAC3_JOC_FRAMES_TO_READ * EAC3_JOC_BYTES_PER_FRAME; 181 mMixFormat = new AudioFormat.Builder() 182 .setSampleRate(48000) 183 .setEncoding(AudioFormat.ENCODING_E_AC3_JOC) 184 .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO) 185 .build(); 186 mPlaybackSource = new Eac3JocAudioSource(EAC3_JOC_RESOURCE); 187 mRecordReferenceSource = new Eac3JocAudioSource(EAC3_JOC_RESOURCE); 188 loopback(true); 189 } 190 supportsLoopback()191 private boolean supportsLoopback() { 192 return mContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE) 193 && mContext.getPackageManager().hasSystemFeature( 194 PackageManager.FEATURE_AUDIO_OUTPUT); 195 } 196 loopback(boolean checkAudioData)197 private void loopback(boolean checkAudioData) { 198 AudioAttributes mediaAttr = new AudioAttributes.Builder() 199 .setUsage(AudioAttributes.USAGE_MEDIA) 200 .build(); 201 AudioMixingRule mediaRule = new AudioMixingRule.Builder() 202 .addRule(mediaAttr, AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE) 203 .build(); 204 AudioMix audioMix = new AudioMix.Builder(mediaRule) 205 .setFormat(mMixFormat) 206 .setRouteFlags(AudioMix.ROUTE_FLAG_LOOP_BACK) 207 .build(); 208 209 mAudioPolicy = new AudioPolicy.Builder(mContext) 210 .addMix(audioMix) 211 .setLooper(Looper.getMainLooper()) 212 .build(); 213 214 if (mAudioManager.registerAudioPolicy(mAudioPolicy) != AudioManager.SUCCESS) { 215 Assert.fail("failed to register audio policy"); 216 } 217 try { 218 Thread.sleep(1000); 219 } catch (InterruptedException e) { 220 Log.e(TAG, "main thread interrupted"); 221 } 222 223 AudioRecord recorder = mAudioPolicy.createAudioRecordSink(audioMix); 224 Assert.assertNotNull("didn't create AudioRecord sink", recorder); 225 Assert.assertEquals("AudioRecord not initialized", AudioRecord.STATE_INITIALIZED, 226 recorder.getState()); 227 AudioRecordThread audioRecordThread = new AudioRecordThread(recorder, checkAudioData); 228 audioRecordThread.startRecording(); 229 230 // when audio policy is installed, 3P apps should be able to discover direct capabilities 231 if (mPlaybackSource.getFormat() == AudioFormat.ENCODING_E_AC3_JOC) { 232 Assert.assertEquals("direct playback not supported", 233 AudioManager.DIRECT_PLAYBACK_BITSTREAM_SUPPORTED, 234 AudioManager.getDirectPlaybackSupport(mMixFormat, mediaAttr) 235 | AudioManager.DIRECT_PLAYBACK_BITSTREAM_SUPPORTED); 236 } 237 238 AudioTrack player = null; 239 try { 240 final int kBufferSizeInBytes = AudioTrack.getMinBufferSize(mMixFormat.getSampleRate(), 241 mMixFormat.getChannelMask(), mMixFormat.getEncoding()); 242 player = new AudioTrack.Builder() 243 .setAudioAttributes(mediaAttr) 244 .setAudioFormat(mMixFormat) 245 .setTransferMode(AudioTrack.MODE_STREAM) 246 .setBufferSizeInBytes(kBufferSizeInBytes) 247 .build(); 248 byte[] chunk = new byte[kBufferSizeInBytes]; 249 int totalBytesWritten = 0; 250 AudioTimestamp timestamp = new AudioTimestamp(); 251 while (true) { 252 int bytesRead = mPlaybackSource.read(chunk, kBufferSizeInBytes); 253 if (bytesRead <= 0) { 254 // TODO: Test getUnderrunCount(). 255 break; 256 } 257 int bytesToWrite = bytesRead; 258 while (bytesToWrite > 0) { 259 int ret = player.write(chunk, bytesRead - bytesToWrite, bytesToWrite, 260 AudioTrack.WRITE_BLOCKING); 261 if (ret < 0) { 262 Assert.fail("Unable to write to AudioTrack, returns:" + ret); 263 } else { 264 bytesToWrite -= ret; 265 totalBytesWritten += ret; 266 Log.v(TAG, "wrote " + ret 267 + " bytes to AudioTrack. Bytes left:" + bytesToWrite 268 + " Offset relative to start:" + totalBytesWritten); 269 if (player.getPlayState() != AudioTrack.PLAYSTATE_PLAYING 270 && ret < kBufferSizeInBytes) { 271 player.play(); 272 Log.v(TAG, "start play"); 273 Assert.assertEquals("track not routed to remote submix", 274 AudioDeviceInfo.TYPE_REMOTE_SUBMIX, 275 player.getRoutedDevice().getType()); 276 } 277 // check positions are correct within a latency tolerance of 1 second 278 player.getTimestamp(timestamp); 279 int headPosition = player.getPlaybackHeadPosition(); 280 float writtenInMilliseconds = 281 (totalBytesWritten * 1000) / mBitrateInBytesPerSecond; 282 float expectedFramePositionPcmReferred = 283 (writtenInMilliseconds * mMixFormat.getSampleRate()) / 1000; 284 float minAllowedFramePosition = 285 expectedFramePositionPcmReferred - mMixFormat.getSampleRate(); 286 float maxAllowedFramePosition = 287 expectedFramePositionPcmReferred + mMixFormat.getSampleRate(); 288 Assert.assertTrue("timestamp position:" + timestamp.framePosition 289 + " time:" + timestamp.nanoTime + " out of range", 290 timestamp.framePosition >= minAllowedFramePosition 291 && timestamp.framePosition <= maxAllowedFramePosition); 292 Assert.assertTrue("head position:" + headPosition + " out of range", 293 headPosition >= minAllowedFramePosition 294 && headPosition <= maxAllowedFramePosition); 295 } 296 } 297 } 298 } catch (UnsupportedOperationException e) { 299 Assert.fail("can't create audio track"); 300 } finally { 301 if (player != null) { 302 player.stop(); 303 player.release(); 304 } 305 } 306 307 try { 308 Thread.sleep(1000); 309 Assert.assertTrue("AudioRecord output differs from AudioTrack input", 310 audioRecordThread.isRecordingOutputCorrect()); 311 312 } catch (InterruptedException e) { 313 Assert.fail("main thread interrupted"); 314 } finally { 315 audioRecordThread.stopRecording(); 316 } 317 } 318 319 private interface AudioSource { 320 // Read "numBytes" bytes of audio into "buffer". 321 // @return Number of bytes actually read. read(byte[] buffer, int numBytes)322 int read(byte[] buffer, int numBytes); 323 324 // Returns the offset in bytes read relative to the start of the source. getOffsetInBytes()325 int getOffsetInBytes(); 326 327 // Returns the audio format of the source. getFormat()328 int getFormat(); 329 330 // Releases resources acquired by this instance. release()331 void release(); 332 } 333 334 private static class PcmAudioSource implements AudioSource { 335 private final int mTotalBytes; 336 private int mBytesRead; 337 PcmAudioSource(int totalBytes)338 public PcmAudioSource(int totalBytes) { 339 mTotalBytes = totalBytes; 340 mBytesRead = 0; 341 } 342 343 @Override read(byte[] buffer, int numBytes)344 public int read(byte[] buffer, int numBytes) { 345 int bytesToRead = Math.min(numBytes, mTotalBytes - mBytesRead); 346 for (int j = 0; j < bytesToRead; j++) { 347 buffer[j] = (byte) ((mBytesRead + j) % 256); 348 } 349 mBytesRead += bytesToRead; 350 return bytesToRead; 351 } 352 353 @Override getOffsetInBytes()354 public int getOffsetInBytes() { 355 return mBytesRead; 356 } 357 358 @Override getFormat()359 public int getFormat() { 360 return AudioFormat.ENCODING_PCM_16BIT; 361 } 362 363 @Override release()364 public void release() { 365 } 366 } 367 368 private class Eac3JocAudioSource implements AudioSource { 369 private final InputStream mStream; 370 private int mBytesRead; 371 Eac3JocAudioSource(int resource)372 public Eac3JocAudioSource(int resource) { 373 mStream = mContext.getResources().openRawResource(resource); 374 Assert.assertNotNull("Stream is null when opening resource:" + resource, mStream); 375 mBytesRead = 0; 376 } 377 378 @Override read(byte[] buffer, int numBytes)379 public int read(byte[] buffer, int numBytes) { 380 try { 381 int bytesRead = mStream.read(buffer, 0, numBytes); 382 if (bytesRead < 0) { 383 return 0; 384 } 385 mBytesRead += bytesRead; 386 return bytesRead; 387 } catch (IOException e) { 388 Assert.fail("Unable to read from stream: " + e.getMessage()); 389 return 0; 390 } 391 } 392 393 @Override getOffsetInBytes()394 public int getOffsetInBytes() { 395 return mBytesRead; 396 } 397 398 @Override getFormat()399 public int getFormat() { 400 return AudioFormat.ENCODING_E_AC3_JOC; 401 } 402 403 @Override release()404 public void release() { 405 try { 406 if (mStream != null) { 407 mStream.close(); 408 } 409 } catch (IOException e) { 410 Assert.fail("Unable to close asset file stream: " + e.getMessage()); 411 } 412 } 413 } 414 415 private class AudioRecordThread extends Thread { 416 private static final String TAG = "AudioRecordThread"; 417 private final AudioRecord mRecord; 418 private final boolean mCheckAudioData; 419 private final AtomicBoolean mStopped = new AtomicBoolean(false); 420 private boolean mIsRecordingOutputCorrect = true; 421 AudioRecordThread(AudioRecord record, boolean checkAudioData)422 public AudioRecordThread(AudioRecord record, boolean checkAudioData) { 423 mRecord = record; 424 mCheckAudioData = checkAudioData; 425 } 426 startRecording()427 public void startRecording() { 428 mRecord.startRecording(); 429 Assert.assertEquals("recording didn't start", AudioRecord.RECORDSTATE_RECORDING, 430 mRecord.getRecordingState()); 431 Assert.assertEquals("recorder not routed from remote submix", 432 AudioDeviceInfo.TYPE_REMOTE_SUBMIX, mRecord.getRoutedDevice().getType()); 433 start(); 434 } 435 isRecordingOutputCorrect()436 public boolean isRecordingOutputCorrect() { 437 return mIsRecordingOutputCorrect; 438 } 439 stopRecording()440 public void stopRecording() { 441 mStopped.set(true); 442 try { 443 join(); 444 } catch (InterruptedException e) { 445 Assert.fail("Unable to complete test successfully"); 446 } 447 } 448 run()449 public void run() { 450 final int kBufferSizeInBytes = AudioRecord.getMinBufferSize( 451 mRecord.getFormat().getSampleRate(), mRecord.getFormat().getChannelMask(), 452 mRecord.getFormat().getEncoding()); 453 byte[] audioData = new byte[kBufferSizeInBytes]; 454 byte[] referenceData = new byte[kBufferSizeInBytes]; 455 while (!mStopped.get() && mBytesToRead > 0) { 456 int ret = mRecord.read(audioData, 0, Math.min(mBytesToRead, kBufferSizeInBytes), 457 AudioRecord.READ_BLOCKING); 458 if (ret > 0) { 459 Log.v(TAG, "read " + ret + " bytes"); 460 if (mCheckAudioData) { 461 int srcOffset = mRecordReferenceSource.getOffsetInBytes(); 462 mRecordReferenceSource.read(referenceData, ret); 463 if (!Arrays.equals(audioData, 0, ret, referenceData, 0, ret) 464 && mIsRecordingOutputCorrect) { 465 mIsRecordingOutputCorrect = false; 466 int bufOffset = 0; 467 while (bufOffset < Integer.min(ret, kBufferSizeInBytes)) { 468 if (audioData[bufOffset] != referenceData[bufOffset]) { 469 break; 470 } 471 ++bufOffset; 472 } 473 srcOffset += bufOffset; 474 Log.e(TAG, "Detected difference in AudioRecord output at reference " 475 + "source offset " + srcOffset + " bytes:"); 476 final int NUM_DEBUG_PRINT_BYTES = 256; 477 String expectedBytesStr = ""; 478 String actualBytesStr = ""; 479 for (int i = 0; i < Integer.min(NUM_DEBUG_PRINT_BYTES, ret - bufOffset); 480 ++i) { 481 expectedBytesStr += String.format("%02x,", 482 referenceData[bufOffset + i]); 483 actualBytesStr += String.format("%02x,", audioData[bufOffset + i]); 484 } 485 Log.e(TAG, "Expected:" + expectedBytesStr); 486 Log.e(TAG, "Actual: " + actualBytesStr); 487 } 488 } 489 mBytesToRead -= ret; 490 } else if (ret < 0) { 491 Log.e(TAG, "read error:" + ret); 492 break; 493 } else { 494 // No more data to read 495 break; 496 } 497 } 498 mRecord.stop(); 499 mRecord.release(); 500 } 501 } 502 } 503