1 /* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package com.google.android.exoplayer2.audio; 17 18 import static com.google.android.exoplayer2.util.Util.castNonNull; 19 20 import android.media.AudioTimestamp; 21 import android.media.AudioTrack; 22 import android.os.SystemClock; 23 import androidx.annotation.IntDef; 24 import androidx.annotation.Nullable; 25 import com.google.android.exoplayer2.C; 26 import com.google.android.exoplayer2.util.Assertions; 27 import com.google.android.exoplayer2.util.Util; 28 import java.lang.annotation.Documented; 29 import java.lang.annotation.Retention; 30 import java.lang.annotation.RetentionPolicy; 31 import java.lang.reflect.Method; 32 33 /** 34 * Wraps an {@link AudioTrack}, exposing a position based on {@link 35 * AudioTrack#getPlaybackHeadPosition()} and {@link AudioTrack#getTimestamp(AudioTimestamp)}. 36 * 37 * <p>Call {@link #setAudioTrack(AudioTrack, int, int, int)} to set the audio track to wrap. Call 38 * {@link #mayHandleBuffer(long)} if there is input data to write to the track. If it returns false, 39 * the audio track position is stabilizing and no data may be written. Call {@link #start()} 40 * immediately before calling {@link AudioTrack#play()}. Call {@link #pause()} when pausing the 41 * track. Call {@link #handleEndOfStream(long)} when no more data will be written to the track. When 42 * the audio track will no longer be used, call {@link #reset()}. 43 */ 44 /* package */ final class AudioTrackPositionTracker { 45 46 /** Listener for position tracker events. */ 47 public interface Listener { 48 49 /** 50 * Called when the frame position is too far from the expected frame position. 51 * 52 * @param audioTimestampPositionFrames The frame position of the last known audio track 53 * timestamp. 54 * @param audioTimestampSystemTimeUs The system time associated with the last known audio track 55 * timestamp, in microseconds. 56 * @param systemTimeUs The current time. 57 * @param playbackPositionUs The current playback head position in microseconds. 58 */ onPositionFramesMismatch( long audioTimestampPositionFrames, long audioTimestampSystemTimeUs, long systemTimeUs, long playbackPositionUs)59 void onPositionFramesMismatch( 60 long audioTimestampPositionFrames, 61 long audioTimestampSystemTimeUs, 62 long systemTimeUs, 63 long playbackPositionUs); 64 65 /** 66 * Called when the system time associated with the last known audio track timestamp is 67 * unexpectedly far from the current time. 68 * 69 * @param audioTimestampPositionFrames The frame position of the last known audio track 70 * timestamp. 71 * @param audioTimestampSystemTimeUs The system time associated with the last known audio track 72 * timestamp, in microseconds. 73 * @param systemTimeUs The current time. 74 * @param playbackPositionUs The current playback head position in microseconds. 75 */ onSystemTimeUsMismatch( long audioTimestampPositionFrames, long audioTimestampSystemTimeUs, long systemTimeUs, long playbackPositionUs)76 void onSystemTimeUsMismatch( 77 long audioTimestampPositionFrames, 78 long audioTimestampSystemTimeUs, 79 long systemTimeUs, 80 long playbackPositionUs); 81 82 /** 83 * Called when the audio track has provided an invalid latency. 84 * 85 * @param latencyUs The reported latency in microseconds. 86 */ onInvalidLatency(long latencyUs)87 void onInvalidLatency(long latencyUs); 88 89 /** 90 * Called when the audio track runs out of data to play. 91 * 92 * @param bufferSize The size of the sink's buffer, in bytes. 93 * @param bufferSizeMs The size of the sink's buffer, in milliseconds, if it is configured for 94 * PCM output. {@link C#TIME_UNSET} if it is configured for encoded audio output, as the 95 * buffered media can have a variable bitrate so the duration may be unknown. 96 */ onUnderrun(int bufferSize, long bufferSizeMs)97 void onUnderrun(int bufferSize, long bufferSizeMs); 98 } 99 100 /** {@link AudioTrack} playback states. */ 101 @Documented 102 @Retention(RetentionPolicy.SOURCE) 103 @IntDef({PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, PLAYSTATE_PLAYING}) 104 private @interface PlayState {} 105 /** @see AudioTrack#PLAYSTATE_STOPPED */ 106 private static final int PLAYSTATE_STOPPED = AudioTrack.PLAYSTATE_STOPPED; 107 /** @see AudioTrack#PLAYSTATE_PAUSED */ 108 private static final int PLAYSTATE_PAUSED = AudioTrack.PLAYSTATE_PAUSED; 109 /** @see AudioTrack#PLAYSTATE_PLAYING */ 110 private static final int PLAYSTATE_PLAYING = AudioTrack.PLAYSTATE_PLAYING; 111 112 /** 113 * AudioTrack timestamps are deemed spurious if they are offset from the system clock by more than 114 * this amount. 115 * 116 * <p>This is a fail safe that should not be required on correctly functioning devices. 117 */ 118 private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND; 119 120 /** 121 * AudioTrack latencies are deemed impossibly large if they are greater than this amount. 122 * 123 * <p>This is a fail safe that should not be required on correctly functioning devices. 124 */ 125 private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND; 126 127 private static final long FORCE_RESET_WORKAROUND_TIMEOUT_MS = 200; 128 129 private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10; 130 private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000; 131 private static final int MIN_LATENCY_SAMPLE_INTERVAL_US = 500000; 132 133 private final Listener listener; 134 private final long[] playheadOffsets; 135 136 @Nullable private AudioTrack audioTrack; 137 private int outputPcmFrameSize; 138 private int bufferSize; 139 @Nullable private AudioTimestampPoller audioTimestampPoller; 140 private int outputSampleRate; 141 private boolean needsPassthroughWorkarounds; 142 private long bufferSizeUs; 143 144 private long smoothedPlayheadOffsetUs; 145 private long lastPlayheadSampleTimeUs; 146 147 @Nullable private Method getLatencyMethod; 148 private long latencyUs; 149 private boolean hasData; 150 151 private boolean isOutputPcm; 152 private long lastLatencySampleTimeUs; 153 private long lastRawPlaybackHeadPosition; 154 private long rawPlaybackHeadWrapCount; 155 private long passthroughWorkaroundPauseOffset; 156 private int nextPlayheadOffsetIndex; 157 private int playheadOffsetCount; 158 private long stopTimestampUs; 159 private long forceResetWorkaroundTimeMs; 160 private long stopPlaybackHeadPosition; 161 private long endPlaybackHeadPosition; 162 163 /** 164 * Creates a new audio track position tracker. 165 * 166 * @param listener A listener for position tracking events. 167 */ AudioTrackPositionTracker(Listener listener)168 public AudioTrackPositionTracker(Listener listener) { 169 this.listener = Assertions.checkNotNull(listener); 170 if (Util.SDK_INT >= 18) { 171 try { 172 getLatencyMethod = AudioTrack.class.getMethod("getLatency", (Class<?>[]) null); 173 } catch (NoSuchMethodException e) { 174 // There's no guarantee this method exists. Do nothing. 175 } 176 } 177 playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT]; 178 } 179 180 /** 181 * Sets the {@link AudioTrack} to wrap. Subsequent method calls on this instance relate to this 182 * track's position, until the next call to {@link #reset()}. 183 * 184 * @param audioTrack The audio track to wrap. 185 * @param outputEncoding The encoding of the audio track. 186 * @param outputPcmFrameSize For PCM output encodings, the frame size. The value is ignored 187 * otherwise. 188 * @param bufferSize The audio track buffer size in bytes. 189 */ setAudioTrack( AudioTrack audioTrack, @C.Encoding int outputEncoding, int outputPcmFrameSize, int bufferSize)190 public void setAudioTrack( 191 AudioTrack audioTrack, 192 @C.Encoding int outputEncoding, 193 int outputPcmFrameSize, 194 int bufferSize) { 195 this.audioTrack = audioTrack; 196 this.outputPcmFrameSize = outputPcmFrameSize; 197 this.bufferSize = bufferSize; 198 audioTimestampPoller = new AudioTimestampPoller(audioTrack); 199 outputSampleRate = audioTrack.getSampleRate(); 200 needsPassthroughWorkarounds = needsPassthroughWorkarounds(outputEncoding); 201 isOutputPcm = Util.isEncodingLinearPcm(outputEncoding); 202 bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET; 203 lastRawPlaybackHeadPosition = 0; 204 rawPlaybackHeadWrapCount = 0; 205 passthroughWorkaroundPauseOffset = 0; 206 hasData = false; 207 stopTimestampUs = C.TIME_UNSET; 208 forceResetWorkaroundTimeMs = C.TIME_UNSET; 209 latencyUs = 0; 210 } 211 getCurrentPositionUs(boolean sourceEnded)212 public long getCurrentPositionUs(boolean sourceEnded) { 213 if (Assertions.checkNotNull(this.audioTrack).getPlayState() == PLAYSTATE_PLAYING) { 214 maybeSampleSyncParams(); 215 } 216 217 // If the device supports it, use the playback timestamp from AudioTrack.getTimestamp. 218 // Otherwise, derive a smoothed position by sampling the track's frame position. 219 long systemTimeUs = System.nanoTime() / 1000; 220 AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller); 221 if (audioTimestampPoller.hasTimestamp()) { 222 // Calculate the speed-adjusted position using the timestamp (which may be in the future). 223 long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); 224 long timestampPositionUs = framesToDurationUs(timestampPositionFrames); 225 if (!audioTimestampPoller.isTimestampAdvancing()) { 226 return timestampPositionUs; 227 } 228 long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs(); 229 return timestampPositionUs + elapsedSinceTimestampUs; 230 } else { 231 long positionUs; 232 if (playheadOffsetCount == 0) { 233 // The AudioTrack has started, but we don't have any samples to compute a smoothed position. 234 positionUs = getPlaybackHeadPositionUs(); 235 } else { 236 // getPlaybackHeadPositionUs() only has a granularity of ~20 ms, so we base the position off 237 // the system clock (and a smoothed offset between it and the playhead position) so as to 238 // prevent jitter in the reported positions. 239 positionUs = systemTimeUs + smoothedPlayheadOffsetUs; 240 } 241 if (!sourceEnded) { 242 positionUs -= latencyUs; 243 } 244 return positionUs; 245 } 246 } 247 248 /** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */ start()249 public void start() { 250 Assertions.checkNotNull(audioTimestampPoller).reset(); 251 } 252 253 /** Returns whether the audio track is in the playing state. */ isPlaying()254 public boolean isPlaying() { 255 return Assertions.checkNotNull(audioTrack).getPlayState() == PLAYSTATE_PLAYING; 256 } 257 258 /** 259 * Checks the state of the audio track and returns whether the caller can write data to the track. 260 * Notifies {@link Listener#onUnderrun(int, long)} if the track has underrun. 261 * 262 * @param writtenFrames The number of frames that have been written. 263 * @return Whether the caller can write data to the track. 264 */ mayHandleBuffer(long writtenFrames)265 public boolean mayHandleBuffer(long writtenFrames) { 266 @PlayState int playState = Assertions.checkNotNull(audioTrack).getPlayState(); 267 if (needsPassthroughWorkarounds) { 268 // An AC-3 audio track continues to play data written while it is paused. Stop writing so its 269 // buffer empties. See [Internal: b/18899620]. 270 if (playState == PLAYSTATE_PAUSED) { 271 // We force an underrun to pause the track, so don't notify the listener in this case. 272 hasData = false; 273 return false; 274 } 275 276 // A new AC-3 audio track's playback position continues to increase from the old track's 277 // position for a short time after is has been released. Avoid writing data until the playback 278 // head position actually returns to zero. 279 if (playState == PLAYSTATE_STOPPED && getPlaybackHeadPosition() == 0) { 280 return false; 281 } 282 } 283 284 boolean hadData = hasData; 285 hasData = hasPendingData(writtenFrames); 286 if (hadData && !hasData && playState != PLAYSTATE_STOPPED && listener != null) { 287 listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs)); 288 } 289 290 return true; 291 } 292 293 /** 294 * Returns an estimate of the number of additional bytes that can be written to the audio track's 295 * buffer without running out of space. 296 * 297 * <p>May only be called if the output encoding is one of the PCM encodings. 298 * 299 * @param writtenBytes The number of bytes written to the audio track so far. 300 * @return An estimate of the number of bytes that can be written. 301 */ getAvailableBufferSize(long writtenBytes)302 public int getAvailableBufferSize(long writtenBytes) { 303 int bytesPending = (int) (writtenBytes - (getPlaybackHeadPosition() * outputPcmFrameSize)); 304 return bufferSize - bytesPending; 305 } 306 307 /** Returns whether the track is in an invalid state and must be recreated. */ isStalled(long writtenFrames)308 public boolean isStalled(long writtenFrames) { 309 return forceResetWorkaroundTimeMs != C.TIME_UNSET 310 && writtenFrames > 0 311 && SystemClock.elapsedRealtime() - forceResetWorkaroundTimeMs 312 >= FORCE_RESET_WORKAROUND_TIMEOUT_MS; 313 } 314 315 /** 316 * Records the writing position at which the stream ended, so that the reported position can 317 * continue to increment while remaining data is played out. 318 * 319 * @param writtenFrames The number of frames that have been written. 320 */ handleEndOfStream(long writtenFrames)321 public void handleEndOfStream(long writtenFrames) { 322 stopPlaybackHeadPosition = getPlaybackHeadPosition(); 323 stopTimestampUs = SystemClock.elapsedRealtime() * 1000; 324 endPlaybackHeadPosition = writtenFrames; 325 } 326 327 /** 328 * Returns whether the audio track has any pending data to play out at its current position. 329 * 330 * @param writtenFrames The number of frames written to the audio track. 331 * @return Whether the audio track has any pending data to play out. 332 */ hasPendingData(long writtenFrames)333 public boolean hasPendingData(long writtenFrames) { 334 return writtenFrames > getPlaybackHeadPosition() 335 || forceHasPendingData(); 336 } 337 338 /** 339 * Pauses the audio track position tracker, returning whether the audio track needs to be paused 340 * to cause playback to pause. If {@code false} is returned the audio track will pause without 341 * further interaction, as the end of stream has been handled. 342 */ pause()343 public boolean pause() { 344 resetSyncParams(); 345 if (stopTimestampUs == C.TIME_UNSET) { 346 // The audio track is going to be paused, so reset the timestamp poller to ensure it doesn't 347 // supply an advancing position. 348 Assertions.checkNotNull(audioTimestampPoller).reset(); 349 return true; 350 } 351 // We've handled the end of the stream already, so there's no need to pause the track. 352 return false; 353 } 354 355 /** 356 * Resets the position tracker. Should be called when the audio track previous passed to {@link 357 * #setAudioTrack(AudioTrack, int, int, int)} is no longer in use. 358 */ reset()359 public void reset() { 360 resetSyncParams(); 361 audioTrack = null; 362 audioTimestampPoller = null; 363 } 364 maybeSampleSyncParams()365 private void maybeSampleSyncParams() { 366 long playbackPositionUs = getPlaybackHeadPositionUs(); 367 if (playbackPositionUs == 0) { 368 // The AudioTrack hasn't output anything yet. 369 return; 370 } 371 long systemTimeUs = System.nanoTime() / 1000; 372 if (systemTimeUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) { 373 // Take a new sample and update the smoothed offset between the system clock and the playhead. 374 playheadOffsets[nextPlayheadOffsetIndex] = playbackPositionUs - systemTimeUs; 375 nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT; 376 if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) { 377 playheadOffsetCount++; 378 } 379 lastPlayheadSampleTimeUs = systemTimeUs; 380 smoothedPlayheadOffsetUs = 0; 381 for (int i = 0; i < playheadOffsetCount; i++) { 382 smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount; 383 } 384 } 385 386 if (needsPassthroughWorkarounds) { 387 // Don't sample the timestamp and latency if this is an AC-3 passthrough AudioTrack on 388 // platform API versions 21/22, as incorrect values are returned. See [Internal: b/21145353]. 389 return; 390 } 391 392 maybePollAndCheckTimestamp(systemTimeUs, playbackPositionUs); 393 maybeUpdateLatency(systemTimeUs); 394 } 395 maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs)396 private void maybePollAndCheckTimestamp(long systemTimeUs, long playbackPositionUs) { 397 AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller); 398 if (!audioTimestampPoller.maybePollTimestamp(systemTimeUs)) { 399 return; 400 } 401 402 // Perform sanity checks on the timestamp and accept/reject it. 403 long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs(); 404 long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); 405 if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) { 406 listener.onSystemTimeUsMismatch( 407 audioTimestampPositionFrames, 408 audioTimestampSystemTimeUs, 409 systemTimeUs, 410 playbackPositionUs); 411 audioTimestampPoller.rejectTimestamp(); 412 } else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs) 413 > MAX_AUDIO_TIMESTAMP_OFFSET_US) { 414 listener.onPositionFramesMismatch( 415 audioTimestampPositionFrames, 416 audioTimestampSystemTimeUs, 417 systemTimeUs, 418 playbackPositionUs); 419 audioTimestampPoller.rejectTimestamp(); 420 } else { 421 audioTimestampPoller.acceptTimestamp(); 422 } 423 } 424 maybeUpdateLatency(long systemTimeUs)425 private void maybeUpdateLatency(long systemTimeUs) { 426 if (isOutputPcm 427 && getLatencyMethod != null 428 && systemTimeUs - lastLatencySampleTimeUs >= MIN_LATENCY_SAMPLE_INTERVAL_US) { 429 try { 430 // Compute the audio track latency, excluding the latency due to the buffer (leaving 431 // latency due to the mixer and audio hardware driver). 432 latencyUs = 433 castNonNull((Integer) getLatencyMethod.invoke(Assertions.checkNotNull(audioTrack))) 434 * 1000L 435 - bufferSizeUs; 436 // Sanity check that the latency is non-negative. 437 latencyUs = Math.max(latencyUs, 0); 438 // Sanity check that the latency isn't too large. 439 if (latencyUs > MAX_LATENCY_US) { 440 listener.onInvalidLatency(latencyUs); 441 latencyUs = 0; 442 } 443 } catch (Exception e) { 444 // The method existed, but doesn't work. Don't try again. 445 getLatencyMethod = null; 446 } 447 lastLatencySampleTimeUs = systemTimeUs; 448 } 449 } 450 framesToDurationUs(long frameCount)451 private long framesToDurationUs(long frameCount) { 452 return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate; 453 } 454 resetSyncParams()455 private void resetSyncParams() { 456 smoothedPlayheadOffsetUs = 0; 457 playheadOffsetCount = 0; 458 nextPlayheadOffsetIndex = 0; 459 lastPlayheadSampleTimeUs = 0; 460 } 461 462 /** 463 * If passthrough workarounds are enabled, pausing is implemented by forcing the AudioTrack to 464 * underrun. In this case, still behave as if we have pending data, otherwise writing won't 465 * resume. 466 */ forceHasPendingData()467 private boolean forceHasPendingData() { 468 return needsPassthroughWorkarounds 469 && Assertions.checkNotNull(audioTrack).getPlayState() == AudioTrack.PLAYSTATE_PAUSED 470 && getPlaybackHeadPosition() == 0; 471 } 472 473 /** 474 * Returns whether to work around problems with passthrough audio tracks. See [Internal: 475 * b/18899620, b/19187573, b/21145353]. 476 */ needsPassthroughWorkarounds(@.Encoding int outputEncoding)477 private static boolean needsPassthroughWorkarounds(@C.Encoding int outputEncoding) { 478 return Util.SDK_INT < 23 479 && (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3); 480 } 481 getPlaybackHeadPositionUs()482 private long getPlaybackHeadPositionUs() { 483 return framesToDurationUs(getPlaybackHeadPosition()); 484 } 485 486 /** 487 * {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as an 488 * unsigned 32 bit integer, which also wraps around periodically. This method returns the playback 489 * head position as a long that will only wrap around if the value exceeds {@link Long#MAX_VALUE} 490 * (which in practice will never happen). 491 * 492 * @return The playback head position, in frames. 493 */ getPlaybackHeadPosition()494 private long getPlaybackHeadPosition() { 495 AudioTrack audioTrack = Assertions.checkNotNull(this.audioTrack); 496 if (stopTimestampUs != C.TIME_UNSET) { 497 // Simulate the playback head position up to the total number of frames submitted. 498 long elapsedTimeSinceStopUs = (SystemClock.elapsedRealtime() * 1000) - stopTimestampUs; 499 long framesSinceStop = (elapsedTimeSinceStopUs * outputSampleRate) / C.MICROS_PER_SECOND; 500 return Math.min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop); 501 } 502 503 int state = audioTrack.getPlayState(); 504 if (state == PLAYSTATE_STOPPED) { 505 // The audio track hasn't been started. 506 return 0; 507 } 508 509 long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition(); 510 if (needsPassthroughWorkarounds) { 511 // Work around an issue with passthrough/direct AudioTracks on platform API versions 21/22 512 // where the playback head position jumps back to zero on paused passthrough/direct audio 513 // tracks. See [Internal: b/19187573]. 514 if (state == PLAYSTATE_PAUSED && rawPlaybackHeadPosition == 0) { 515 passthroughWorkaroundPauseOffset = lastRawPlaybackHeadPosition; 516 } 517 rawPlaybackHeadPosition += passthroughWorkaroundPauseOffset; 518 } 519 520 if (Util.SDK_INT <= 29) { 521 if (rawPlaybackHeadPosition == 0 522 && lastRawPlaybackHeadPosition > 0 523 && state == PLAYSTATE_PLAYING) { 524 // If connecting a Bluetooth audio device fails, the AudioTrack may be left in a state 525 // where its Java API is in the playing state, but the native track is stopped. When this 526 // happens the playback head position gets stuck at zero. In this case, return the old 527 // playback head position and force the track to be reset after 528 // {@link #FORCE_RESET_WORKAROUND_TIMEOUT_MS} has elapsed. 529 if (forceResetWorkaroundTimeMs == C.TIME_UNSET) { 530 forceResetWorkaroundTimeMs = SystemClock.elapsedRealtime(); 531 } 532 return lastRawPlaybackHeadPosition; 533 } else { 534 forceResetWorkaroundTimeMs = C.TIME_UNSET; 535 } 536 } 537 538 if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) { 539 // The value must have wrapped around. 540 rawPlaybackHeadWrapCount++; 541 } 542 lastRawPlaybackHeadPosition = rawPlaybackHeadPosition; 543 return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32); 544 } 545 } 546