1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "VideoFrameScheduler"
19 #include <utils/Log.h>
20 #define ATRACE_TAG ATRACE_TAG_VIDEO
21 #include <utils/Trace.h>
22
23 #include <sys/time.h>
24
25 #include <binder/IServiceManager.h>
26 #include <gui/ISurfaceComposer.h>
27 #include <ui/DisplayStatInfo.h>
28
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/foundation/AUtils.h>
31
32 #include "VideoFrameScheduler.h"
33
34 namespace android {
35
36 static const nsecs_t kNanosIn1s = 1000000000;
37
38 template<class T>
compare(const T * lhs,const T * rhs)39 static int compare(const T *lhs, const T *rhs) {
40 if (*lhs < *rhs) {
41 return -1;
42 } else if (*lhs > *rhs) {
43 return 1;
44 } else {
45 return 0;
46 }
47 }
48
49 /* ======================================================================= */
50 /* PLL */
51 /* ======================================================================= */
52
53 static const size_t kMinSamplesToStartPrime = 3;
54 static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
55 static const size_t kMinSamplesToEstimatePeriod = 3;
56 static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
57
58 static const size_t kPrecision = 12;
59 static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
60 static const int64_t kMultiplesThresholdDiv = 4; // 25%
61 static const int64_t kReFitThresholdDiv = 100; // 1%
62 static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
63 static const nsecs_t kMinPeriod = kNanosIn1s / 120; // 120Hz
64 static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
65
PLL()66 VideoFrameScheduler::PLL::PLL()
67 : mPeriod(-1),
68 mPhase(0),
69 mPrimed(false),
70 mSamplesUsedForPriming(0),
71 mLastTime(-1),
72 mNumSamples(0) {
73 }
74
reset(float fps)75 void VideoFrameScheduler::PLL::reset(float fps) {
76 //test();
77
78 mSamplesUsedForPriming = 0;
79 mLastTime = -1;
80
81 // set up or reset video PLL
82 if (fps <= 0.f) {
83 mPeriod = -1;
84 mPrimed = false;
85 } else {
86 ALOGV("reset at %.1f fps", fps);
87 mPeriod = (nsecs_t)(1e9 / fps + 0.5);
88 mPrimed = true;
89 }
90
91 restart();
92 }
93
94 // reset PLL but keep previous period estimate
restart()95 void VideoFrameScheduler::PLL::restart() {
96 mNumSamples = 0;
97 mPhase = -1;
98 }
99
100 #if 0
101
102 void VideoFrameScheduler::PLL::test() {
103 nsecs_t period = kNanosIn1s / 60;
104 mTimes[0] = 0;
105 mTimes[1] = period;
106 mTimes[2] = period * 3;
107 mTimes[3] = period * 4;
108 mTimes[4] = period * 7;
109 mTimes[5] = period * 8;
110 mTimes[6] = period * 10;
111 mTimes[7] = period * 12;
112 mNumSamples = 8;
113 int64_t a, b, err;
114 fit(0, period * 12 / 7, 8, &a, &b, &err);
115 // a = 0.8(5)+
116 // b = -0.14097(2)+
117 // err = 0.2750578(703)+
118 ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
119 (long long)a, (a / (float)(1 << kPrecision)),
120 (long long)b, (b / (float)(1 << kPrecision)),
121 (long long)err, (err / (float)(1 << (kPrecision * 2))));
122 }
123
124 #endif
125
fit(nsecs_t phase,nsecs_t period,size_t numSamplesToUse,int64_t * a,int64_t * b,int64_t * err)126 bool VideoFrameScheduler::PLL::fit(
127 nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
128 int64_t *a, int64_t *b, int64_t *err) {
129 if (numSamplesToUse > mNumSamples) {
130 numSamplesToUse = mNumSamples;
131 }
132
133 int64_t sumX = 0;
134 int64_t sumXX = 0;
135 int64_t sumXY = 0;
136 int64_t sumYY = 0;
137 int64_t sumY = 0;
138
139 int64_t x = 0; // x usually is in [0..numSamplesToUse)
140 nsecs_t lastTime;
141 for (size_t i = 0; i < numSamplesToUse; i++) {
142 size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
143 nsecs_t time = mTimes[ix];
144 if (i > 0) {
145 x += divRound(time - lastTime, period);
146 }
147 // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
148 // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
149 // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
150 // while we are not refitting.
151 int64_t y = divRound(time - phase, period >> kPrecision);
152 sumX += x;
153 sumY += y;
154 sumXX += x * x;
155 sumXY += x * y;
156 sumYY += y * y;
157 lastTime = time;
158 }
159
160 int64_t div = numSamplesToUse * sumXX - sumX * sumX;
161 if (div == 0) {
162 return false;
163 }
164
165 int64_t a_nom = numSamplesToUse * sumXY - sumX * sumY;
166 int64_t b_nom = sumXX * sumY - sumX * sumXY;
167 *a = divRound(a_nom, div);
168 *b = divRound(b_nom, div);
169 // don't use a and b directly as the rounding error is significant
170 *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
171 ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
172 numSamplesToUse,
173 (long long)*a, (*a / (float)(1 << kPrecision)),
174 (long long)*b, (*b / (float)(1 << kPrecision)),
175 (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
176 return true;
177 }
178
prime(size_t numSamplesToUse)179 void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
180 if (numSamplesToUse > mNumSamples) {
181 numSamplesToUse = mNumSamples;
182 }
183 CHECK(numSamplesToUse >= 3); // must have at least 3 samples
184
185 // estimate video framerate from deltas between timestamps, and
186 // 2nd order deltas
187 Vector<nsecs_t> deltas;
188 nsecs_t lastTime, firstTime;
189 for (size_t i = 0; i < numSamplesToUse; ++i) {
190 size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
191 nsecs_t time = mTimes[index];
192 if (i > 0) {
193 if (time - lastTime > kMinPeriod) {
194 //ALOGV("delta: %lld", (long long)(time - lastTime));
195 deltas.push(time - lastTime);
196 }
197 } else {
198 firstTime = time;
199 }
200 lastTime = time;
201 }
202 deltas.sort(compare<nsecs_t>);
203 size_t numDeltas = deltas.size();
204 if (numDeltas > 1) {
205 nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
206 nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
207 for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
208 if (deltas[i] > deltaMaxLimit) {
209 deltas.resize(i);
210 numDeltas = i;
211 break;
212 }
213 }
214 for (size_t i = 1; i < numDeltas; ++i) {
215 nsecs_t delta2nd = deltas[i] - deltas[i - 1];
216 if (delta2nd >= deltaMinLimit) {
217 //ALOGV("delta2: %lld", (long long)(delta2nd));
218 deltas.push(delta2nd);
219 }
220 }
221 }
222
223 // use the one that yields the best match
224 int64_t bestScore;
225 for (size_t i = 0; i < deltas.size(); ++i) {
226 nsecs_t delta = deltas[i];
227 int64_t score = 0;
228 #if 1
229 // simplest score: number of deltas that are near multiples
230 size_t matches = 0;
231 for (size_t j = 0; j < deltas.size(); ++j) {
232 nsecs_t err = periodicError(deltas[j], delta);
233 if (err < delta / kMultiplesThresholdDiv) {
234 ++matches;
235 }
236 }
237 score = matches;
238 #if 0
239 // could be weighed by the (1 - normalized error)
240 if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
241 int64_t a, b, err;
242 fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
243 err = (1 << (2 * kPrecision)) - err;
244 score *= max(0, err);
245 }
246 #endif
247 #else
248 // or use the error as a negative score
249 if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
250 int64_t a, b, err;
251 fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
252 score = -delta * err;
253 }
254 #endif
255 if (i == 0 || score > bestScore) {
256 bestScore = score;
257 mPeriod = delta;
258 mPhase = firstTime;
259 }
260 }
261 ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod);
262 }
263
addSample(nsecs_t time)264 nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
265 if (mLastTime >= 0
266 // if time goes backward, or we skipped rendering
267 && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
268 restart();
269 }
270
271 mLastTime = time;
272 mTimes[mNumSamples % kHistorySize] = time;
273 ++mNumSamples;
274
275 bool doFit = time > mRefitAt;
276 if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
277 prime(kMinSamplesToStopPrime);
278 ++mSamplesUsedForPriming;
279 doFit = true;
280 }
281 if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
282 if (mPhase < 0) {
283 // initialize phase to the current render time
284 mPhase = time;
285 doFit = true;
286 } else if (!doFit) {
287 int64_t err = periodicError(time - mPhase, mPeriod);
288 doFit = err > mPeriod / kReFitThresholdDiv;
289 }
290
291 if (doFit) {
292 int64_t a, b, err;
293 if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
294 // samples are not suitable for fitting. this means they are
295 // also not suitable for priming.
296 ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
297 return mPeriod;
298 }
299
300 mRefitAt = time + kRefitRefreshPeriod;
301
302 mPhase += (mPeriod * b) >> kPrecision;
303 mPeriod = (mPeriod * a) >> kPrecision;
304 ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
305
306 if (err < kErrorThreshold) {
307 if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
308 mPrimed = true;
309 }
310 } else {
311 mPrimed = false;
312 mSamplesUsedForPriming = 0;
313 }
314 }
315 }
316 return mPeriod;
317 }
318
319 /* ======================================================================= */
320 /* Frame Scheduler */
321 /* ======================================================================= */
322
323 static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
324 static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
325
VideoFrameScheduler()326 VideoFrameScheduler::VideoFrameScheduler()
327 : mVsyncTime(0),
328 mVsyncPeriod(0),
329 mVsyncRefreshAt(0),
330 mLastVsyncTime(-1),
331 mTimeCorrection(0) {
332 }
333
updateVsync()334 void VideoFrameScheduler::updateVsync() {
335 mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
336 mVsyncPeriod = 0;
337 mVsyncTime = 0;
338
339 // TODO: schedule frames for the destination surface
340 // For now, surface flinger only schedules frames on the primary display
341 if (mComposer == NULL) {
342 String16 name("SurfaceFlinger");
343 sp<IServiceManager> sm = defaultServiceManager();
344 mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
345 }
346 if (mComposer != NULL) {
347 DisplayStatInfo stats;
348 status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
349 if (res == OK) {
350 ALOGV("vsync time:%lld period:%lld",
351 (long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
352 mVsyncTime = stats.vsyncTime;
353 mVsyncPeriod = stats.vsyncPeriod;
354 } else {
355 ALOGW("getDisplayStats returned %d", res);
356 }
357 } else {
358 ALOGW("could not get surface mComposer service");
359 }
360 }
361
init(float videoFps)362 void VideoFrameScheduler::init(float videoFps) {
363 updateVsync();
364
365 mLastVsyncTime = -1;
366 mTimeCorrection = 0;
367
368 mPll.reset(videoFps);
369 }
370
restart()371 void VideoFrameScheduler::restart() {
372 mLastVsyncTime = -1;
373 mTimeCorrection = 0;
374
375 mPll.restart();
376 }
377
getVsyncPeriod()378 nsecs_t VideoFrameScheduler::getVsyncPeriod() {
379 if (mVsyncPeriod > 0) {
380 return mVsyncPeriod;
381 }
382 return kDefaultVsyncPeriod;
383 }
384
schedule(nsecs_t renderTime)385 nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
386 nsecs_t origRenderTime = renderTime;
387
388 nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
389 if (now >= mVsyncRefreshAt) {
390 updateVsync();
391 }
392
393 // without VSYNC info, there is nothing to do
394 if (mVsyncPeriod == 0) {
395 ALOGV("no vsync: render=%lld", (long long)renderTime);
396 return renderTime;
397 }
398
399 // ensure vsync time is well before (corrected) render time
400 if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
401 mVsyncTime -=
402 ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
403 }
404
405 // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
406 // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
407 renderTime -= mVsyncPeriod / 2;
408
409 const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
410 if (videoPeriod > 0) {
411 // Smooth out rendering
412 size_t N = 12;
413 nsecs_t fiveSixthDev =
414 abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
415 / (mVsyncPeriod / 100);
416 // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
417 if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
418 N = 20;
419 }
420
421 nsecs_t offset = 0;
422 nsecs_t edgeRemainder = 0;
423 for (size_t i = 1; i <= N; i++) {
424 offset +=
425 (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
426 edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
427 }
428 mTimeCorrection += mVsyncPeriod / 2 - offset / N;
429 renderTime += mTimeCorrection;
430 nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
431 edgeRemainder = abs(edgeRemainder / N - mVsyncPeriod / 2);
432 if (edgeRemainder <= mVsyncPeriod / 3) {
433 correctionLimit /= 2;
434 }
435
436 // estimate how many VSYNCs a frame will spend on the display
437 nsecs_t nextVsyncTime =
438 renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
439 if (mLastVsyncTime >= 0) {
440 size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
441 size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
442 bool vsyncsPerFrameAreNearlyConstant =
443 periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
444
445 if (mTimeCorrection > correctionLimit &&
446 (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
447 // remove a VSYNC
448 mTimeCorrection -= mVsyncPeriod / 2;
449 renderTime -= mVsyncPeriod / 2;
450 nextVsyncTime -= mVsyncPeriod;
451 --vsyncsForLastFrame;
452 } else if (mTimeCorrection < -correctionLimit &&
453 (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
454 // add a VSYNC
455 mTimeCorrection += mVsyncPeriod / 2;
456 renderTime += mVsyncPeriod / 2;
457 nextVsyncTime += mVsyncPeriod;
458 ++vsyncsForLastFrame;
459 }
460 ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
461 }
462 mLastVsyncTime = nextVsyncTime;
463 }
464
465 // align rendertime to the center between VSYNC edges
466 renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
467 renderTime += mVsyncPeriod / 2;
468 ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
469 ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
470 return renderTime;
471 }
472
release()473 void VideoFrameScheduler::release() {
474 mComposer.clear();
475 }
476
~VideoFrameScheduler()477 VideoFrameScheduler::~VideoFrameScheduler() {
478 release();
479 }
480
481 } // namespace android
482
483