1 /* 2 * Copyright 2023, The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef VIDEO_RENDER_QUALITY_TRACKER_H_ 18 19 #define VIDEO_RENDER_QUALITY_TRACKER_H_ 20 21 #include <assert.h> 22 #include <list> 23 #include <queue> 24 25 #include <media/stagefright/MediaHistogram.h> 26 27 namespace android { 28 29 // A variety of video rendering quality metrics. 30 struct VideoRenderQualityMetrics { 31 static constexpr float FRAME_RATE_UNDETERMINED = -1.0f; 32 static constexpr float FRAME_RATE_24_3_2_PULLDOWN = -2.0f; 33 34 VideoRenderQualityMetrics(); 35 36 void clear(); 37 38 // The render time of the first video frame. 39 int64_t firstRenderTimeUs; 40 41 // The render time of the last video frame. 42 int64_t lastRenderTimeUs; 43 44 // The number of frames released to be rendered. 45 int64_t frameReleasedCount; 46 47 // The number of frames actually rendered. 48 int64_t frameRenderedCount; 49 50 // The number of frames dropped - frames that were released but never rendered. 51 int64_t frameDroppedCount; 52 53 // The number of frames that were intentionally dropped/skipped by the app. 54 int64_t frameSkippedCount; 55 56 // The frame rate as detected by looking at the position timestamp from the content stream. 57 float contentFrameRate; 58 59 // The frame rate as detected by looking at the desired render time passed in by the app. 60 float desiredFrameRate; 61 62 // The frame rate as detected by looking at the actual render time, as returned by the system 63 // post-render. 64 float actualFrameRate; 65 66 // A histogram of the durations of freezes due to dropped/skipped frames. 67 MediaHistogram<int32_t> freezeDurationMsHistogram; 68 // The computed overall freeze score using the above histogram and score conversion table. The 69 // score is based on counts in the histogram bucket, multiplied by the value in the score 70 // conversion table for that bucket. For example, the impact of a short freeze may be minimal, 71 // but the impact of long freeze may be disproportionally worse. Therefore, the score 72 // multipliers for each bucket might increase exponentially instead of linearly. A score 73 // multiplier of zero would reflect that small freeze durations have near-zero impact to the 74 // user experience. 75 int32_t freezeScore; 76 // The computed percentage of total playback duration that was frozen. 77 float freezeRate; 78 // The number of freeze events. 79 int32_t freezeEventCount; 80 81 // A histogram of the durations between each freeze. 82 MediaHistogram<int32_t> freezeDistanceMsHistogram; 83 84 // A histogram of the judder scores - based on the error tolerance between actual render 85 // duration of each frame and the ideal render duration. 86 MediaHistogram<int32_t> judderScoreHistogram; 87 // The computed overall judder score using the above histogram and score conversion table. The 88 // score is based on counts in the histogram bucket, multiplied by the value in the score 89 // conversion table for that bucket. For example, the impact of minimal judder may be small, 90 // but the impact of large judder may be disproportionally worse. Therefore, the score 91 // multipliers for each bucket might increase exponentially instead of linearly. A score 92 // multiplier of zero would reflect that small judder errors have near-zero impact to the user 93 // experience. 94 int32_t judderScore; 95 // The computed percentage of total frames that had judder. 96 float judderRate; 97 // The number of judder events. 98 int32_t judderEventCount; 99 }; 100 101 /////////////////////////////////////////////////////// 102 // This class analyzes various timestamps related to video rendering to compute a set of metrics 103 // that attempt to capture the quality of the user experience during video playback. 104 // 105 // The following timestamps (in microseconds) are analyzed to compute these metrics: 106 // * The content timestamp found in the content stream, indicating the position of each video 107 // frame. 108 // * The desired timestamp passed in by the app, indicating at what point in time in the future 109 // the app would like the frame to be rendered. 110 // * The actual timestamp passed in by the display subsystem, indicating the point in time at 111 // which the frame was actually rendered. 112 // 113 // Core to the algorithms are deriving frame durations based on these timestamps and determining 114 // the result of each video frame in the content stream: 115 // * skipped: the app didn't want to render the frame 116 // * dropped: the display subsystem could not render the frame in time 117 // * rendered: the display subsystem rendered the frame 118 // 119 class VideoRenderQualityTracker { 120 public: 121 // Configurable elements of the metrics algorithms 122 class Configuration { 123 public: 124 // system/server_configurable_flags/libflags/include/get_flags.h:GetServerConfigurableFlag 125 typedef std::string (*GetServerConfigurableFlagFn)( 126 const std::string& experiment_category_name, 127 const std::string& experiment_flag_name, 128 const std::string& default_value); 129 130 static Configuration getFromServerConfigurableFlags( 131 GetServerConfigurableFlagFn getServerConfigurableFlagFn); 132 133 Configuration(); 134 135 // Whether or not frame render quality is tracked. 136 bool enabled; 137 138 // Whether or not frames that are intentionally not rendered by the app should be considered 139 // as dropped. 140 bool areSkippedFramesDropped; 141 142 // How large of a jump forward in content time is allowed before it is considered a 143 // discontinuity (seek/playlist) and various internal states are reset. 144 int32_t maxExpectedContentFrameDurationUs; 145 146 // How much tolerance in frame duration when considering whether or not two frames have the 147 // same frame rate. 148 int32_t frameRateDetectionToleranceUs; 149 150 // A skip forward in content time could occur during frame drops of live content. Therefore 151 // the content frame duration and the app-desired frame duration are compared using this 152 // tolerance to determine whether the app is intentionally seeking forward or whether the 153 // skip forward in content time is due to frame drops. If the app-desired frame duration is 154 // short, but the content frame duration is large, it is assumed the app is intentionally 155 // seeking forward. 156 int32_t liveContentFrameDropToleranceUs; 157 158 // Freeze configuration 159 // 160 // The values used to distribute freeze durations across a histogram. 161 std::vector<int32_t> freezeDurationMsHistogramBuckets; 162 // 163 // The values used to multiply the counts in the histogram buckets above to compute an 164 // overall score. This allows the score to reflect disproportionate impact as freeze 165 // durations increase. 166 std::vector<int64_t> freezeDurationMsHistogramToScore; 167 // 168 // The values used to distribute distances between freezes across a histogram. 169 std::vector<int32_t> freezeDistanceMsHistogramBuckets; 170 // 171 // The maximum number of freeze events to send back to the caller. 172 int32_t freezeEventMax; 173 // 174 // The maximum number of detail entries tracked per freeze event. 175 int32_t freezeEventDetailsMax; 176 // 177 // The maximum distance in time between two freeze occurrences such that both will be 178 // lumped into the same freeze event. 179 int32_t freezeEventDistanceToleranceMs; 180 181 // Judder configuration 182 // 183 // A judder error lower than this value is not scored as judder. 184 int32_t judderErrorToleranceUs; 185 // 186 // The values used to distribute judder scores across a histogram. 187 std::vector<int32_t> judderScoreHistogramBuckets; 188 // 189 // The values used to multiply the counts in the histogram buckets above to compute an 190 // overall score. This allows the score to reflect disproportionate impact as judder scores 191 // increase. 192 std::vector<int64_t> judderScoreHistogramToScore; 193 // 194 // The maximum number of judder events to send back to the caller. 195 int32_t judderEventMax; 196 // 197 // The maximum number of detail entries tracked per judder event. 198 int32_t judderEventDetailsMax; 199 // 200 // The maximum distance in time between two judder occurrences such that both will be 201 // lumped into the same judder event. 202 int32_t judderEventDistanceToleranceMs; 203 }; 204 205 struct FreezeEvent { 206 // Details are captured for each freeze up to a limited number. The arrays are guaranteed to 207 // have the same size. 208 struct Details { 209 /// The duration of the freeze. 210 std::vector<int32_t> durationMs; 211 // The distance between the beginning of this freeze and the end of the previous freeze. 212 std::vector<int32_t> distanceMs; 213 }; 214 // Whether or not the data in this structure is valid. 215 bool valid = false; 216 // The time at which the first freeze for this event was detected. 217 int64_t initialTimeUs; 218 // The total duration from the beginning of the first freeze to the end of the last freeze 219 // in this event. 220 int32_t durationMs; 221 // The number of freezes in this event. 222 int64_t count; 223 // The sum of all durations of all freezes in this event. 224 int64_t sumDurationMs; 225 // The sum of all distances between each freeze in this event. 226 int64_t sumDistanceMs; 227 // Detailed information for the first N freezes in this event. 228 Details details; 229 }; 230 231 struct JudderEvent { 232 // Details are captured for each frame judder up to a limited number. The arrays are 233 // guaranteed to have the same size. 234 struct Details { 235 // The actual render duration of the frame for this judder occurrence. 236 std::vector<int32_t> actualRenderDurationUs; 237 // The content render duration of the frame for this judder occurrence. 238 std::vector<int32_t> contentRenderDurationUs; 239 // The distance from this judder occurrence and the previous judder occurrence. 240 std::vector<int32_t> distanceMs; 241 }; 242 // Whether or not the data in this structure is valid. 243 bool valid = false; 244 // The time at which the first judder occurrence for this event was detected. 245 int64_t initialTimeUs; 246 // The total duration from the first judder occurrence to the last judder occurrence in this 247 // event. 248 int32_t durationMs; 249 // The number of judder occurrences in this event. 250 int64_t count; 251 // The sum of all judder scores in this event. 252 int64_t sumScore; 253 // The sum of all distances between each judder occurrence in this event. 254 int64_t sumDistanceMs; 255 // Detailed information for the first N judder occurrences in this event. 256 Details details; 257 }; 258 259 VideoRenderQualityTracker(); 260 VideoRenderQualityTracker(const Configuration &configuration); 261 262 // Called when a tunnel mode frame has been queued. 263 void onTunnelFrameQueued(int64_t contentTimeUs); 264 265 // Called when the app has intentionally decided not to render this frame. 266 void onFrameSkipped(int64_t contentTimeUs); 267 268 // Called when the app has requested the frame to be rendered as soon as possible. 269 void onFrameReleased(int64_t contentTimeUs); 270 271 // Called when the app has requested the frame to be rendered at a specific point in time in the 272 // future. 273 void onFrameReleased(int64_t contentTimeUs, int64_t desiredRenderTimeNs); 274 275 // Called when the system has detected that the frame has actually been rendered to the display. 276 // Returns any freeze events or judder events that were detected. 277 void onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs, 278 FreezeEvent *freezeEventOut = nullptr, 279 JudderEvent *judderEventOut = nullptr); 280 281 // Gets and resets data for the current freeze event. 282 FreezeEvent getAndResetFreezeEvent(); 283 284 // Gets and resets data for the current judder event. 285 JudderEvent getAndResetJudderEvent(); 286 287 // Retrieve the metrics. 288 const VideoRenderQualityMetrics &getMetrics(); 289 290 // Called when a change in codec state will result in a content discontinuity - e.g. flush. 291 void resetForDiscontinuity(); 292 293 // Clear out all metrics and tracking - e.g. codec reconfigured. 294 void clear(); 295 296 private: 297 // Tracking of frames that are pending to be rendered to the display. 298 struct FrameInfo { 299 int64_t contentTimeUs; 300 int64_t desiredRenderTimeUs; 301 }; 302 303 // Historic tracking of frame durations 304 struct FrameDurationUs { 305 static const int SIZE = 5; 306 FrameDurationUsFrameDurationUs307 FrameDurationUs() { 308 for (int i = 0; i < SIZE; ++i) { 309 durationUs[i] = -1; 310 } 311 priorTimestampUs = -1; 312 } 313 314 int32_t &operator[](int index) { 315 assert(index < SIZE); 316 return durationUs[index]; 317 } 318 319 const int32_t &operator[](int index) const { 320 assert(index < SIZE); 321 return durationUs[index]; 322 } 323 324 // The duration of the past N frames. 325 int32_t durationUs[SIZE]; 326 327 // The timestamp of the previous frame. 328 int64_t priorTimestampUs; 329 }; 330 331 // Configure histograms for the metrics. 332 static void configureHistograms(VideoRenderQualityMetrics &m, const Configuration &c); 333 334 // The current time in microseconds. 335 static int64_t nowUs(); 336 337 // A new frame has been processed, so update the frame durations based on the new frame 338 // timestamp. 339 static void updateFrameDurations(FrameDurationUs &durationUs, int64_t newTimestampUs); 340 341 // Update a frame rate if, and only if, one can be detected. 342 static void updateFrameRate(float &frameRate, const FrameDurationUs &durationUs, 343 const Configuration &c); 344 345 // Examine the past few frames to detect the frame rate based on each frame's render duration. 346 static float detectFrameRate(const FrameDurationUs &durationUs, const Configuration &c); 347 348 // Determine whether or not 3:2 pulldowng for displaying 24fps content on 60Hz displays is 349 // occurring. 350 static bool is32pulldown(const FrameDurationUs &durationUs, const Configuration &c); 351 352 // Process a frame freeze. 353 static void processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs, 354 int64_t lastFreezeEndTimeUs, FreezeEvent &e, 355 VideoRenderQualityMetrics &m, const Configuration &c); 356 357 // Retrieve a freeze event if an event just finished. 358 static void maybeCaptureFreezeEvent(int64_t actualRenderTimeUs, int64_t lastFreezeEndTimeUs, 359 FreezeEvent &e, const VideoRenderQualityMetrics & m, 360 const Configuration &c, FreezeEvent *freezeEventOut); 361 362 // Compute a judder score for the previously-rendered frame. 363 static int64_t computePreviousJudderScore(const FrameDurationUs &actualRenderDurationUs, 364 const FrameDurationUs &contentRenderDurationUs, 365 const Configuration &c); 366 367 // Process a frame judder. 368 static void processJudder(int32_t judderScore, int64_t judderTimeUs, 369 int64_t lastJudderEndTimeUs, 370 const FrameDurationUs &contentDurationUs, 371 const FrameDurationUs &actualDurationUs, JudderEvent &e, 372 VideoRenderQualityMetrics &m, const Configuration &c); 373 374 // Retrieve a judder event if an event just finished. 375 static void maybeCaptureJudderEvent(int64_t actualRenderTimeUs, int64_t lastJudderEndTimeUs, 376 JudderEvent &e, const VideoRenderQualityMetrics & m, 377 const Configuration &c, JudderEvent *judderEventOut); 378 379 // Check to see if a discontinuity has occurred by examining the content time and the 380 // app-desired render time. If so, reset some internal state. 381 bool resetIfDiscontinuity(int64_t contentTimeUs, int64_t desiredRenderTimeUs); 382 383 // Update the metrics because a skipped frame was detected. 384 void processMetricsForSkippedFrame(int64_t contentTimeUs); 385 386 // Update the metrics because a dropped frame was detected. 387 void processMetricsForDroppedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs); 388 389 // Update the metrics because a rendered frame was detected. 390 void processMetricsForRenderedFrame(int64_t contentTimeUs, int64_t desiredRenderTimeUs, 391 int64_t actualRenderTimeUs, 392 FreezeEvent *freezeEventOut, JudderEvent *judderEventOut); 393 394 // Configurable elements of the metrics algorithms. 395 const Configuration mConfiguration; 396 397 // Metrics are updated every time a frame event occurs - skipped, dropped, rendered. 398 VideoRenderQualityMetrics mMetrics; 399 400 // The most recently processed timestamp referring to the position in the content stream. 401 int64_t mLastContentTimeUs; 402 403 // The most recently processed timestamp referring to the wall clock time a frame was rendered. 404 int64_t mLastRenderTimeUs; 405 406 // The most recent timestamp of the first frame rendered after the freeze. 407 int64_t mLastFreezeEndTimeUs; 408 409 // The most recent timestamp of frame judder. 410 int64_t mLastJudderEndTimeUs; 411 412 // The render duration of the playback. 413 int64_t mRenderDurationMs; 414 415 // True if the previous frame was dropped. 416 bool mWasPreviousFrameDropped; 417 418 // The freeze event that's currently being tracked. 419 FreezeEvent mFreezeEvent; 420 421 // The judder event that's currently being tracked. 422 JudderEvent mJudderEvent; 423 424 // Frames skipped at the end of playback shouldn't really be considered skipped, therefore keep 425 // a list of the frames, and process them as skipped frames the next time a frame is rendered. 426 std::list<int64_t> mPendingSkippedFrameContentTimeUsList; 427 428 // Since the system only signals when a frame is rendered, dropped frames are detected by 429 // checking to see if the next expected frame is rendered. If not, it is considered dropped. 430 std::queue<FrameInfo> mNextExpectedRenderedFrameQueue; 431 432 // When B-frames are present in the stream, a P-frame will be queued before the B-frame even 433 // though it is rendered after. Therefore, the P-frame is held here and not inserted into 434 // mNextExpectedRenderedFrameQueue until it should be inserted to maintain render order. 435 int64_t mTunnelFrameQueuedContentTimeUs; 436 437 // Frame durations derived from timestamps encoded into the content stream. These are the 438 // durations that each frame is supposed to be rendered for. 439 FrameDurationUs mContentFrameDurationUs; 440 441 // Frame durations derived from timestamps passed in by the app, indicating the wall clock time 442 // at which the app would like to have the frame rendered. 443 FrameDurationUs mDesiredFrameDurationUs; 444 445 // Frame durations derived from timestamps captured by the display subsystem, indicating the 446 // wall clock atime at which the frame is actually rendered. 447 FrameDurationUs mActualFrameDurationUs; 448 }; 449 450 } // namespace android 451 452 #endif // VIDEO_RENDER_QUALITY_TRACKER_H_ 453