• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "VideoRenderQualityTracker"
18 #include <utils/Log.h>
19 
20 #include <media/stagefright/VideoRenderQualityTracker.h>
21 
22 #include <assert.h>
23 #include <charconv>
24 #include <cmath>
25 #include <stdio.h>
26 #include <sys/time.h>
27 
28 #include <android-base/parsebool.h>
29 #include <android-base/parseint.h>
30 
31 namespace android {
32 
33 using android::base::ParseBoolResult;
34 
35 static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
36 static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
37         VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
38 
39 typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
40         GetServerConfigurableFlagFn;
41 
getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,char const * flagNameSuffix,bool * value)42 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
43                                       char const *flagNameSuffix, bool *value) {
44     std::string flagName("render_metrics_");
45     flagName.append(flagNameSuffix);
46     std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName,
47                                                           *value ? "true" : "false");
48     switch (android::base::ParseBool(valueStr)) {
49     case ParseBoolResult::kTrue: *value = true; break;
50     case ParseBoolResult::kFalse: *value = false; break;
51     case ParseBoolResult::kError:
52         ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
53               valueStr.c_str());
54         break;
55     }
56 }
57 
getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,char const * flagNameSuffix,int32_t * value)58 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
59                                       char const *flagNameSuffix, int32_t *value) {
60     char defaultStr[11];
61     sprintf(defaultStr, "%d", int(*value));
62     std::string flagName("render_metrics_");
63     flagName.append(flagNameSuffix);
64     std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, defaultStr);
65     if (!android::base::ParseInt(valueStr.c_str(), value) || valueStr.size() == 0) {
66         ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
67               valueStr.c_str());
68         return;
69     }
70 }
71 
72 template<typename T>
getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,char const * flagNameSuffix,std::vector<T> * value)73 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
74                                       char const *flagNameSuffix, std::vector<T> *value) {
75     std::stringstream sstr;
76     for (int i = 0; i < value->size(); ++i) {
77         if (i != 0) {
78             sstr << ",";
79         }
80         sstr << (*value)[i];
81     }
82     std::string flagName("render_metrics_");
83     flagName.append(flagNameSuffix);
84     std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, sstr.str());
85     if (valueStr.size() == 0) {
86         return;
87     }
88     // note: using android::base::Tokenize fails to catch parsing failures for values ending in ','
89     std::vector<T> newValues;
90     const char *p = valueStr.c_str();
91     const char *last = p + valueStr.size();
92     while (p != last) {
93         if (*p == ',') {
94             p++;
95         }
96         T value = -1;
97         auto [ptr, error] = std::from_chars(p, last, value);
98         if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
99             ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
100                   valueStr.c_str());
101             return;
102         }
103         p = ptr;
104         newValues.push_back(value);
105     }
106     *value = std::move(newValues);
107 }
108 
VideoRenderQualityMetrics()109 VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
110     clear();
111 }
112 
clear()113 void VideoRenderQualityMetrics::clear() {
114     firstRenderTimeUs = 0;
115     frameReleasedCount = 0;
116     frameRenderedCount = 0;
117     frameDroppedCount = 0;
118     frameSkippedCount = 0;
119     contentFrameRate = FRAME_RATE_UNDETERMINED;
120     desiredFrameRate = FRAME_RATE_UNDETERMINED;
121     actualFrameRate = FRAME_RATE_UNDETERMINED;
122     freezeEventCount = 0;
123     freezeDurationMsHistogram.clear();
124     freezeDistanceMsHistogram.clear();
125     judderEventCount = 0;
126     judderScoreHistogram.clear();
127 }
128 
129 VideoRenderQualityTracker::Configuration
getFromServerConfigurableFlags(GetServerConfigurableFlagFn getServerConfigurableFlagFn)130         VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
131             GetServerConfigurableFlagFn getServerConfigurableFlagFn) {
132     VideoRenderQualityTracker::Configuration c;
133 #define getFlag(FIELDNAME, FLAGNAME) \
134     getServerConfigurableFlag(getServerConfigurableFlagFn, FLAGNAME, &c.FIELDNAME)
135     getFlag(enabled, "enabled");
136     getFlag(areSkippedFramesDropped, "are_skipped_frames_dropped");
137     getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
138     getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
139     getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
140     getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
141     getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
142     getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
143     getFlag(freezeEventMax, "freeze_event_max");
144     getFlag(freezeEventDetailsMax, "freeze_event_details_max");
145     getFlag(freezeEventDistanceToleranceMs, "freeze_event_distance_tolerance_ms");
146     getFlag(judderErrorToleranceUs, "judder_error_tolerance_us");
147     getFlag(judderScoreHistogramBuckets, "judder_score_histogram_buckets");
148     getFlag(judderScoreHistogramToScore, "judder_score_histogram_to_score");
149     getFlag(judderEventMax, "judder_event_max");
150     getFlag(judderEventDetailsMax, "judder_event_details_max");
151     getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
152 #undef getFlag
153     return c;
154 }
155 
Configuration()156 VideoRenderQualityTracker::Configuration::Configuration() {
157     enabled = false;
158 
159     // Assume that the app is skipping frames because it's detected that the frame couldn't be
160     // rendered in time.
161     areSkippedFramesDropped = true;
162 
163     // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
164     maxExpectedContentFrameDurationUs = 400 * 1000;
165 
166     // Allow for 2 milliseconds of deviation when detecting frame rates
167     frameRateDetectionToleranceUs = 2 * 1000;
168 
169     // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
170     // because of frame drops for live content, or because the user is seeking.
171     liveContentFrameDropToleranceUs = 200 * 1000;
172 
173     // Freeze configuration
174     freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
175     freezeDurationMsHistogramToScore = {1,  1,  1,  1,  1,   1,   1,   1,   1,   1,   1,   1,   1};
176     freezeDistanceMsHistogramBuckets = {0, 20, 100, 400, 1000, 2000, 3000, 4000, 8000, 15000, 30000,
177                                         60000};
178     freezeEventMax = 0; // enabled only when debugging
179     freezeEventDetailsMax = 20;
180     freezeEventDistanceToleranceMs = 60000; // lump freeze occurrences together when 60s or less
181 
182     // Judder configuration
183     judderErrorToleranceUs = 2000;
184     judderScoreHistogramBuckets = {1, 4, 5, 9, 11, 20, 30, 40, 50, 60, 70, 80};
185     judderScoreHistogramToScore = {1, 1, 1, 1,  1,  1,  1,  1,  1,  1,  1,  1};
186     judderEventMax = 0; // enabled only when debugging
187     judderEventDetailsMax = 20;
188     judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
189 }
190 
VideoRenderQualityTracker()191 VideoRenderQualityTracker::VideoRenderQualityTracker() : mConfiguration(Configuration()) {
192     configureHistograms(mMetrics, mConfiguration);
193     clear();
194 }
195 
VideoRenderQualityTracker(const Configuration & configuration)196 VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
197         mConfiguration(configuration) {
198     configureHistograms(mMetrics, mConfiguration);
199     clear();
200 }
201 
onTunnelFrameQueued(int64_t contentTimeUs)202 void VideoRenderQualityTracker::onTunnelFrameQueued(int64_t contentTimeUs) {
203     if (!mConfiguration.enabled) {
204         return;
205     }
206 
207     // Since P-frames are queued out of order, hold onto the P-frame until we can track it in
208     // render order. This only works because it depends on today's encoding algorithms that only
209     // allow B-frames to refer to ONE P-frame that comes after it. If the cardinality of P-frames
210     // in a single mini-GOP is increased, this algorithm breaks down.
211     if (mTunnelFrameQueuedContentTimeUs == -1) {
212         mTunnelFrameQueuedContentTimeUs = contentTimeUs;
213     } else if (contentTimeUs < mTunnelFrameQueuedContentTimeUs) {
214         onFrameReleased(contentTimeUs, 0);
215     } else {
216         onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
217         mTunnelFrameQueuedContentTimeUs = contentTimeUs;
218     }
219 }
220 
onFrameSkipped(int64_t contentTimeUs)221 void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
222     if (!mConfiguration.enabled) {
223         return;
224     }
225 
226     // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
227     // app might be seeking to a starting point that isn't the first key frame.
228     if (mLastRenderTimeUs == -1) {
229         return;
230     }
231 
232     resetIfDiscontinuity(contentTimeUs, -1);
233 
234     // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
235     // app could be terminating the playback. The pending count will be added to the metrics if and
236     // when the next frame is rendered.
237     mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
238 }
239 
onFrameReleased(int64_t contentTimeUs)240 void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
241     onFrameReleased(contentTimeUs, nowUs() * 1000);
242 }
243 
onFrameReleased(int64_t contentTimeUs,int64_t desiredRenderTimeNs)244 void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
245                                                 int64_t desiredRenderTimeNs) {
246     if (!mConfiguration.enabled) {
247         return;
248     }
249 
250     int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
251     resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
252     mMetrics.frameReleasedCount++;
253     mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
254     mLastContentTimeUs = contentTimeUs;
255 }
256 
onFrameRendered(int64_t contentTimeUs,int64_t actualRenderTimeNs,FreezeEvent * freezeEventOut,JudderEvent * judderEventOut)257 void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
258                                                 FreezeEvent *freezeEventOut,
259                                                 JudderEvent *judderEventOut) {
260     if (!mConfiguration.enabled) {
261         return;
262     }
263 
264     int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
265 
266     if (mLastRenderTimeUs != -1) {
267         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
268     }
269     // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
270     // frames since the app is not skipping them to terminate playback.
271     for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
272         processMetricsForSkippedFrame(contentTimeUs);
273     }
274     mPendingSkippedFrameContentTimeUsList = {};
275 
276     // We can render a pending queued frame if it's the last frame of the video, so release it
277     // immediately.
278     if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
279         onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
280         mTunnelFrameQueuedContentTimeUs = -1;
281     }
282 
283     static const FrameInfo noFrame = {-1, -1};
284     FrameInfo nextExpectedFrame = noFrame;
285     while (!mNextExpectedRenderedFrameQueue.empty()) {
286         nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
287         mNextExpectedRenderedFrameQueue.pop();
288         // Happy path - the rendered frame is what we expected it to be
289         if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
290             break;
291         }
292         // This isn't really supposed to happen - the next rendered frame should be the expected
293         // frame, or, if there's frame drops, it will be a frame later in the content stream
294         if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
295             ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
296                   (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
297             break;
298         }
299         processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
300                                       nextExpectedFrame.desiredRenderTimeUs);
301     }
302     processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
303                                    nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
304                                    freezeEventOut, judderEventOut);
305     mLastRenderTimeUs = actualRenderTimeUs;
306 }
307 
getAndResetFreezeEvent()308 VideoRenderQualityTracker::FreezeEvent VideoRenderQualityTracker::getAndResetFreezeEvent() {
309     FreezeEvent event = std::move(mFreezeEvent);
310     mFreezeEvent.valid = false;
311     return event;
312 }
313 
getAndResetJudderEvent()314 VideoRenderQualityTracker::JudderEvent VideoRenderQualityTracker::getAndResetJudderEvent() {
315     JudderEvent event = std::move(mJudderEvent);
316     mJudderEvent.valid = false;
317     return event;
318 }
319 
getMetrics()320 const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() {
321     if (!mConfiguration.enabled) {
322         return mMetrics;
323     }
324 
325     mMetrics.freezeScore = 0;
326     if (mConfiguration.freezeDurationMsHistogramToScore.size() ==
327         mMetrics.freezeDurationMsHistogram.size()) {
328         for (int i = 0; i < mMetrics.freezeDurationMsHistogram.size(); ++i) {
329             mMetrics.freezeScore += mMetrics.freezeDurationMsHistogram[i] *
330                     mConfiguration.freezeDurationMsHistogramToScore[i];
331         }
332     }
333     mMetrics.freezeRate = float(double(mMetrics.freezeDurationMsHistogram.getSum()) /
334             mRenderDurationMs);
335 
336     mMetrics.judderScore = 0;
337     if (mConfiguration.judderScoreHistogramToScore.size() == mMetrics.judderScoreHistogram.size()) {
338         for (int i = 0; i < mMetrics.judderScoreHistogram.size(); ++i) {
339             mMetrics.judderScore += mMetrics.judderScoreHistogram[i] *
340                     mConfiguration.judderScoreHistogramToScore[i];
341         }
342     }
343     mMetrics.judderRate = float(double(mMetrics.judderScoreHistogram.getCount()) /
344             (mMetrics.frameReleasedCount + mMetrics.frameSkippedCount));
345 
346     return mMetrics;
347 }
348 
clear()349 void VideoRenderQualityTracker::clear() {
350     mRenderDurationMs = 0;
351     mMetrics.clear();
352     resetForDiscontinuity();
353 }
354 
resetForDiscontinuity()355 void VideoRenderQualityTracker::resetForDiscontinuity() {
356     mLastContentTimeUs = -1;
357     mLastRenderTimeUs = -1;
358     mLastFreezeEndTimeUs = -1;
359     mLastJudderEndTimeUs = -1;
360     mWasPreviousFrameDropped = false;
361     mFreezeEvent.valid = false;
362     mJudderEvent.valid = false;
363 
364     // Don't worry about tracking frame rendering times from now up until playback catches up to the
365     // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
366     // to the user is is minimal, so better to just keep things simple and don't bother.
367     mNextExpectedRenderedFrameQueue = {};
368     mTunnelFrameQueuedContentTimeUs = -1;
369 
370     // Ignore any frames that were skipped just prior to the discontinuity.
371     mPendingSkippedFrameContentTimeUsList = {};
372 
373     // All frame durations can be now ignored since all bets are off now on what the render
374     // durations should be after the discontinuity.
375     for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
376         mActualFrameDurationUs[i] = -1;
377         mDesiredFrameDurationUs[i] = -1;
378         mContentFrameDurationUs[i] = -1;
379     }
380     mActualFrameDurationUs.priorTimestampUs = -1;
381     mDesiredFrameDurationUs.priorTimestampUs = -1;
382     mContentFrameDurationUs.priorTimestampUs = -1;
383 }
384 
resetIfDiscontinuity(int64_t contentTimeUs,int64_t desiredRenderTimeUs)385 bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
386                                                      int64_t desiredRenderTimeUs) {
387     if (mLastContentTimeUs == -1) {
388         resetForDiscontinuity();
389         return true;
390     }
391     if (contentTimeUs < mLastContentTimeUs) {
392         ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
393               int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
394               int(contentTimeUs / 1000));
395         resetForDiscontinuity();
396         return true;
397     }
398     if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
399         // The content frame duration could be long due to frame drops for live content. This can be
400         // detected by looking at the app's desired rendering duration. If the app's rendered frame
401         // duration is roughly the same as the content's frame duration, then it is assumed that
402         // the forward discontinuity is due to frame drops for live content. A false positive can
403         // occur if the time the user spends seeking is equal to the duration of the seek. This is
404         // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
405         // distracted, and then returns to seeking forward.
406         bool skippedForwardDueToLiveContentFrameDrops = false;
407         if (desiredRenderTimeUs != -1) {
408             int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
409             int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
410             skippedForwardDueToLiveContentFrameDrops =
411                     abs(contentFrameDurationUs - desiredFrameDurationUs) <
412                     mConfiguration.liveContentFrameDropToleranceUs;
413         }
414         if (!skippedForwardDueToLiveContentFrameDrops) {
415             ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
416                 int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
417                 int(contentTimeUs / 1000));
418             resetForDiscontinuity();
419             return true;
420         }
421     }
422     return false;
423 }
424 
processMetricsForSkippedFrame(int64_t contentTimeUs)425 void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
426     mMetrics.frameSkippedCount++;
427     if (mConfiguration.areSkippedFramesDropped) {
428         processMetricsForDroppedFrame(contentTimeUs, -1);
429         return;
430     }
431     updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
432     updateFrameDurations(mDesiredFrameDurationUs, -1);
433     updateFrameDurations(mActualFrameDurationUs, -1);
434     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
435     mWasPreviousFrameDropped = false;
436 }
437 
processMetricsForDroppedFrame(int64_t contentTimeUs,int64_t desiredRenderTimeUs)438 void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
439                                                               int64_t desiredRenderTimeUs) {
440     mMetrics.frameDroppedCount++;
441     updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
442     updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
443     updateFrameDurations(mActualFrameDurationUs, -1);
444     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
445     updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
446     mWasPreviousFrameDropped = true;
447 }
448 
processMetricsForRenderedFrame(int64_t contentTimeUs,int64_t desiredRenderTimeUs,int64_t actualRenderTimeUs,FreezeEvent * freezeEventOut,JudderEvent * judderEventOut)449 void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
450                                                                int64_t desiredRenderTimeUs,
451                                                                int64_t actualRenderTimeUs,
452                                                                FreezeEvent *freezeEventOut,
453                                                                JudderEvent *judderEventOut) {
454     // Capture the timestamp at which the first frame was rendered
455     if (mMetrics.firstRenderTimeUs == 0) {
456         mMetrics.firstRenderTimeUs = actualRenderTimeUs;
457     }
458     // Capture the timestamp at which the last frame was rendered
459     mMetrics.lastRenderTimeUs = actualRenderTimeUs;
460 
461     mMetrics.frameRenderedCount++;
462 
463     // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
464     // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
465     // anything other than count it as a rendered frame.
466     if (contentTimeUs == -1) {
467         return;
468     }
469     updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
470     updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
471     updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
472     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
473     updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
474     updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
475 
476     // If the previous frame was dropped, there was a freeze if we've already rendered a frame
477     if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
478         processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
479                       mMetrics, mConfiguration);
480         mLastFreezeEndTimeUs = actualRenderTimeUs;
481     }
482     maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
483                             mConfiguration, freezeEventOut);
484 
485     // Judder is computed on the prior video frame, not the current video frame
486     int64_t judderScore = computePreviousJudderScore(mActualFrameDurationUs,
487                                                      mContentFrameDurationUs,
488                                                      mConfiguration);
489     if (judderScore != 0) {
490         int64_t judderTimeUs = actualRenderTimeUs - mActualFrameDurationUs[0] -
491                 mActualFrameDurationUs[1];
492         processJudder(judderScore, judderTimeUs, mLastJudderEndTimeUs, mActualFrameDurationUs,
493                       mContentFrameDurationUs, mJudderEvent, mMetrics, mConfiguration);
494         mLastJudderEndTimeUs = judderTimeUs + mActualFrameDurationUs[1];
495     }
496     maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
497                             mConfiguration, judderEventOut);
498 
499     mWasPreviousFrameDropped = false;
500 }
501 
processFreeze(int64_t actualRenderTimeUs,int64_t lastRenderTimeUs,int64_t lastFreezeEndTimeUs,FreezeEvent & e,VideoRenderQualityMetrics & m,const Configuration & c)502 void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
503                                               int64_t lastFreezeEndTimeUs, FreezeEvent &e,
504                                               VideoRenderQualityMetrics &m,
505                                               const Configuration &c) {
506     int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
507     m.freezeDurationMsHistogram.insert(durationMs);
508     int32_t distanceMs = -1;
509     if (lastFreezeEndTimeUs != -1) {
510         // The distance to the last freeze is measured from the end of the last freze to the start
511         // of this freeze.
512         distanceMs = int32_t((lastRenderTimeUs - lastFreezeEndTimeUs) / 1000);
513         m.freezeDistanceMsHistogram.insert(distanceMs);
514     }
515     if (c.freezeEventMax > 0) {
516         if (e.valid == false) {
517             m.freezeEventCount++;
518             e.valid = true;
519             e.initialTimeUs = lastRenderTimeUs;
520             e.durationMs = 0;
521             e.sumDurationMs = 0;
522             e.sumDistanceMs = 0;
523             e.count = 0;
524             e.details.durationMs.clear();
525             e.details.distanceMs.clear();
526         // The first occurrence in the event should not have the distance recorded as part of the
527         // event, because it belongs in a vacuum between two events. However we still want the
528         // distance recorded in the details to calculate times in all details in all events.
529         } else if (distanceMs != -1) {
530             e.durationMs += distanceMs;
531             e.sumDistanceMs += distanceMs;
532         }
533         e.durationMs += durationMs;
534         e.count++;
535         e.sumDurationMs += durationMs;
536         if (e.details.durationMs.size() < c.freezeEventDetailsMax) {
537             e.details.durationMs.push_back(durationMs);
538             e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
539         }
540     }
541 }
542 
maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,int64_t lastFreezeEndTimeUs,FreezeEvent & e,const VideoRenderQualityMetrics & m,const Configuration & c,FreezeEvent * freezeEventOut)543 void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
544                                                         int64_t lastFreezeEndTimeUs, FreezeEvent &e,
545                                                         const VideoRenderQualityMetrics & m,
546                                                         const Configuration &c,
547                                                         FreezeEvent *freezeEventOut) {
548     if (lastFreezeEndTimeUs == -1 || !e.valid) {
549         return;
550     }
551     // Future freeze occurrences are still pulled into the current freeze event if under tolerance
552     int64_t distanceMs = (actualRenderTimeUs - lastFreezeEndTimeUs) / 1000;
553     if (distanceMs < c.freezeEventDistanceToleranceMs) {
554         return;
555     }
556     if (freezeEventOut != nullptr && m.freezeEventCount <= c.freezeEventMax) {
557         *freezeEventOut = std::move(e);
558     }
559     // start recording a new freeze event after pushing the current one back to the caller
560     e.valid = false;
561 }
562 
computePreviousJudderScore(const FrameDurationUs & actualFrameDurationUs,const FrameDurationUs & contentFrameDurationUs,const Configuration & c)563 int64_t VideoRenderQualityTracker::computePreviousJudderScore(
564         const FrameDurationUs &actualFrameDurationUs,
565         const FrameDurationUs &contentFrameDurationUs,
566         const Configuration &c) {
567     // If the frame before or after was dropped, then don't generate a judder score, since any
568     // problems with frame drops are scored as a freeze instead.
569     if (actualFrameDurationUs[0] == -1 || actualFrameDurationUs[1] == -1 ||
570         actualFrameDurationUs[2] == -1) {
571         return 0;
572     }
573 
574     // Don't score judder for when playback is paused or rebuffering (long frame duration), or if
575     // the player is intentionally playing each frame at a slow rate (e.g. half-rate). If the long
576     // frame duration was unintentional, it is assumed that this will be coupled with a later frame
577     // drop, and be scored as a freeze instead of judder.
578     if (actualFrameDurationUs[1] >= 2 * contentFrameDurationUs[1]) {
579         return 0;
580     }
581 
582     // The judder score is based on the error of this frame
583     int64_t errorUs = actualFrameDurationUs[1] - contentFrameDurationUs[1];
584     // Don't score judder if the previous frame has high error, but this frame has low error
585     if (abs(errorUs) < c.judderErrorToleranceUs) {
586         return 0;
587     }
588 
589     // Add a penalty if this frame has judder that amplifies the problem introduced by previous
590     // judder, instead of catching up for the previous judder (50, 16, 16, 50) vs (50, 16, 50, 16)
591     int64_t previousErrorUs = actualFrameDurationUs[2] - contentFrameDurationUs[2];
592     // Don't add the pentalty for errors from the previous frame if the previous frame has low error
593     if (abs(previousErrorUs) >= c.judderErrorToleranceUs) {
594         errorUs = abs(errorUs) + abs(errorUs + previousErrorUs);
595     }
596 
597     // Avoid scoring judder for 3:2 pulldown or other minimally-small frame duration errors
598     if (abs(errorUs) < contentFrameDurationUs[1] / 4) {
599         return 0;
600     }
601 
602     return abs(errorUs) / 1000; // error in millis to keep numbers small
603 }
604 
processJudder(int32_t judderScore,int64_t judderTimeUs,int64_t lastJudderEndTime,const FrameDurationUs & actualDurationUs,const FrameDurationUs & contentDurationUs,JudderEvent & e,VideoRenderQualityMetrics & m,const Configuration & c)605 void VideoRenderQualityTracker::processJudder(int32_t judderScore, int64_t judderTimeUs,
606                                               int64_t lastJudderEndTime,
607                                               const FrameDurationUs &actualDurationUs,
608                                               const FrameDurationUs &contentDurationUs,
609                                               JudderEvent &e, VideoRenderQualityMetrics &m,
610                                               const Configuration &c) {
611     int32_t distanceMs = -1;
612     if (lastJudderEndTime != -1) {
613         distanceMs = int32_t((judderTimeUs - lastJudderEndTime) / 1000);
614     }
615     m.judderScoreHistogram.insert(judderScore);
616     if (c.judderEventMax > 0) {
617         if (!e.valid) {
618             m.judderEventCount++;
619             e.valid = true;
620             e.initialTimeUs = judderTimeUs;
621             e.durationMs = 0;
622             e.sumScore = 0;
623             e.sumDistanceMs = 0;
624             e.count = 0;
625             e.details.contentRenderDurationUs.clear();
626             e.details.actualRenderDurationUs.clear();
627             e.details.distanceMs.clear();
628         // The first occurrence in the event should not have the distance recorded as part of the
629         // event, because it belongs in a vacuum between two events. However we still want the
630         // distance recorded in the details to calculate the times using all details in all events.
631         } else if (distanceMs != -1) {
632             e.durationMs += distanceMs;
633             e.sumDistanceMs += distanceMs;
634         }
635         e.durationMs += actualDurationUs[1] / 1000;
636         e.count++;
637         e.sumScore += judderScore;
638         if (e.details.contentRenderDurationUs.size() < c.judderEventDetailsMax) {
639             e.details.actualRenderDurationUs.push_back(actualDurationUs[1]);
640             e.details.contentRenderDurationUs.push_back(contentDurationUs[1]);
641             e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
642         }
643     }
644 }
645 
maybeCaptureJudderEvent(int64_t actualRenderTimeUs,int64_t lastJudderEndTimeUs,JudderEvent & e,const VideoRenderQualityMetrics & m,const Configuration & c,JudderEvent * judderEventOut)646 void VideoRenderQualityTracker::maybeCaptureJudderEvent(int64_t actualRenderTimeUs,
647                                                         int64_t lastJudderEndTimeUs, JudderEvent &e,
648                                                         const VideoRenderQualityMetrics &m,
649                                                         const Configuration &c,
650                                                         JudderEvent *judderEventOut) {
651     if (lastJudderEndTimeUs == -1 || !e.valid) {
652         return;
653     }
654     // Future judder occurrences are still pulled into the current judder event if under tolerance
655     int64_t distanceMs = (actualRenderTimeUs - lastJudderEndTimeUs) / 1000;
656     if (distanceMs < c.judderEventDistanceToleranceMs) {
657         return;
658     }
659     if (judderEventOut != nullptr && m.judderEventCount <= c.judderEventMax) {
660         *judderEventOut = std::move(e);
661     }
662     // start recording a new judder event after pushing the current one back to the caller
663     e.valid = false;
664 }
665 
configureHistograms(VideoRenderQualityMetrics & m,const Configuration & c)666 void VideoRenderQualityTracker::configureHistograms(VideoRenderQualityMetrics &m,
667                                                     const Configuration &c) {
668     m.freezeDurationMsHistogram.setup(c.freezeDurationMsHistogramBuckets);
669     m.freezeDistanceMsHistogram.setup(c.freezeDistanceMsHistogramBuckets);
670     m.judderScoreHistogram.setup(c.judderScoreHistogramBuckets);
671 }
672 
nowUs()673 int64_t VideoRenderQualityTracker::nowUs() {
674     struct timespec t;
675     t.tv_sec = t.tv_nsec = 0;
676     clock_gettime(CLOCK_MONOTONIC, &t);
677     return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
678 }
679 
updateFrameDurations(FrameDurationUs & durationUs,int64_t newTimestampUs)680 void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
681                                                      int64_t newTimestampUs) {
682     for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
683         durationUs[i] = durationUs[i - 1];
684     }
685     if (newTimestampUs == -1) {
686         durationUs[0] = -1;
687     } else {
688         durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
689                 newTimestampUs - durationUs.priorTimestampUs;
690         durationUs.priorTimestampUs = newTimestampUs;
691     }
692 }
693 
updateFrameRate(float & frameRate,const FrameDurationUs & durationUs,const Configuration & c)694 void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
695                                                 const Configuration &c) {
696     float newFrameRate = detectFrameRate(durationUs, c);
697     if (newFrameRate != FRAME_RATE_UNDETERMINED) {
698         frameRate = newFrameRate;
699     }
700 }
701 
detectFrameRate(const FrameDurationUs & durationUs,const Configuration & c)702 float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
703                                                  const Configuration &c) {
704     // At least 3 frames are necessary to detect stable frame rates
705     assert(FrameDurationUs::SIZE >= 3);
706     if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
707         return FRAME_RATE_UNDETERMINED;
708     }
709     // Only determine frame rate if the render durations are stable across 3 frames
710     if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
711         abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
712         return is32pulldown(durationUs, c) ? FRAME_RATE_24_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
713     }
714     return 1000.0 * 1000.0 / durationUs[0];
715 }
716 
is32pulldown(const FrameDurationUs & durationUs,const Configuration & c)717 bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
718                                              const Configuration &c) {
719     // At least 5 frames are necessary to detect stable 3:2 pulldown
720     assert(FrameDurationUs::SIZE >= 5);
721     if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
722         durationUs[4] == -1) {
723         return false;
724     }
725     // 3:2 pulldown expects that every other frame has identical duration...
726     if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
727         abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
728         abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
729         return false;
730     }
731     // ... for either 2 vsysncs or 3 vsyncs
732     if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
733          abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
734         (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
735          abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
736         return true;
737     }
738     return false;
739 }
740 
741 } // namespace android
742