1 /*
2 * Copyright 2012, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #include <utils/Log.h>
21
22 #include <set>
23 #include <stdlib.h>
24
25 #include <inttypes.h>
26 #include <stdlib.h>
27 #include <dlfcn.h>
28
29 #include <C2Buffer.h>
30
31 #include "include/SoftwareRenderer.h"
32 #include "PlaybackDurationAccumulator.h"
33
34 #include <android/binder_manager.h>
35 #include <android/content/pm/IPackageManagerNative.h>
36 #include <android/hardware/cas/native/1.0/IDescrambler.h>
37 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
38
39 #include <aidl/android/media/BnResourceManagerClient.h>
40 #include <aidl/android/media/IResourceManagerService.h>
41 #include <android/binder_ibinder.h>
42 #include <android/binder_manager.h>
43 #include <android/dlext.h>
44 #include <binder/IMemory.h>
45 #include <binder/IServiceManager.h>
46 #include <binder/MemoryDealer.h>
47 #include <cutils/properties.h>
48 #include <gui/BufferQueue.h>
49 #include <gui/Surface.h>
50 #include <hidlmemory/FrameworkUtils.h>
51 #include <mediadrm/ICrypto.h>
52 #include <media/IOMX.h>
53 #include <media/MediaCodecBuffer.h>
54 #include <media/MediaCodecInfo.h>
55 #include <media/MediaMetricsItem.h>
56 #include <media/MediaResource.h>
57 #include <media/NdkMediaErrorPriv.h>
58 #include <media/NdkMediaFormat.h>
59 #include <media/NdkMediaFormatPriv.h>
60 #include <media/formatshaper/FormatShaper.h>
61 #include <media/stagefright/foundation/ABuffer.h>
62 #include <media/stagefright/foundation/ADebug.h>
63 #include <media/stagefright/foundation/AMessage.h>
64 #include <media/stagefright/foundation/AString.h>
65 #include <media/stagefright/foundation/AUtils.h>
66 #include <media/stagefright/foundation/avc_utils.h>
67 #include <media/stagefright/foundation/hexdump.h>
68 #include <media/stagefright/ACodec.h>
69 #include <media/stagefright/BatteryChecker.h>
70 #include <media/stagefright/BufferProducerWrapper.h>
71 #include <media/stagefright/CCodec.h>
72 #include <media/stagefright/MediaCodec.h>
73 #include <media/stagefright/MediaCodecConstants.h>
74 #include <media/stagefright/MediaCodecList.h>
75 #include <media/stagefright/MediaCodecConstants.h>
76 #include <media/stagefright/MediaDefs.h>
77 #include <media/stagefright/MediaErrors.h>
78 #include <media/stagefright/MediaFilter.h>
79 #include <media/stagefright/OMXClient.h>
80 #include <media/stagefright/PersistentSurface.h>
81 #include <media/stagefright/SurfaceUtils.h>
82 #include <nativeloader/dlext_namespaces.h>
83 #include <private/android_filesystem_config.h>
84 #include <utils/Singleton.h>
85
86 namespace android {
87
88 using Status = ::ndk::ScopedAStatus;
89 using aidl::android::media::BnResourceManagerClient;
90 using aidl::android::media::IResourceManagerClient;
91 using aidl::android::media::IResourceManagerService;
92
93 // key for media statistics
94 static const char *kCodecKeyName = "codec";
95 // attrs for media statistics
96 // NB: these are matched with public Java API constants defined
97 // in frameworks/base/media/java/android/media/MediaCodec.java
98 // These must be kept synchronized with the constants there.
99 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
100 static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
101 static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
102 static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
103 static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
104 static const char *kCodecModeAudio = "audio";
105 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
106 static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
107 static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
108 static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
109 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
110 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
111 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
112 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
113 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
114 static const char *kCodecPriority = "android.media.mediacodec.priority";
115
116 // Min/Max QP before shaping
117 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
118 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
119 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
120 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
121 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
122 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
123
124 // Min/Max QP after shaping
125 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
126 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
127 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
128 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
129 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
130 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
131
132 // NB: These are not yet exposed as public Java API constants.
133 static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
134 static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
135 static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
136 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
137 static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
138 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
139 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
140 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
141 static const char *kCodecError = "android.media.mediacodec.errcode";
142 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
143 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
144 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
145 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
146 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
147 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
148 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
149 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
150 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
151 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
152
153 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
154 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
155 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
156 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
157 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
158 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
159 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
160 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
161 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
162 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
163
164 // the kCodecRecent* fields appear only in getMetrics() results
165 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
166 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
167 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
168 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
169 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
170 static const char *kCodecPlaybackDurationSec =
171 "android.media.mediacodec.playback-duration-sec"; /* in sec */
172
173 /* -1: shaper disabled
174 >=0: number of fields changed */
175 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
176
177 // XXX suppress until we get our representation right
178 static bool kEmitHistogram = false;
179
180
getId(const std::shared_ptr<IResourceManagerClient> & client)181 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
182 return (int64_t) client.get();
183 }
184
isResourceError(status_t err)185 static bool isResourceError(status_t err) {
186 return (err == NO_MEMORY);
187 }
188
189 static const int kMaxRetry = 2;
190 static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
191 static const int kNumBuffersAlign = 16;
192
193 static const C2MemoryUsage kDefaultReadWriteUsage{
194 C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
195
196 ////////////////////////////////////////////////////////////////////////////////
197
198 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient199 explicit ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {}
200
reclaimResourceandroid::ResourceManagerClient201 Status reclaimResource(bool* _aidl_return) override {
202 sp<MediaCodec> codec = mMediaCodec.promote();
203 if (codec == NULL) {
204 // codec is already gone.
205 *_aidl_return = true;
206 return Status::ok();
207 }
208 status_t err = codec->reclaim();
209 if (err == WOULD_BLOCK) {
210 ALOGD("Wait for the client to release codec.");
211 usleep(kMaxReclaimWaitTimeInUs);
212 ALOGD("Try to reclaim again.");
213 err = codec->reclaim(true /* force */);
214 }
215 if (err != OK) {
216 ALOGW("ResourceManagerClient failed to release codec with err %d", err);
217 }
218 *_aidl_return = (err == OK);
219 return Status::ok();
220 }
221
getNameandroid::ResourceManagerClient222 Status getName(::std::string* _aidl_return) override {
223 _aidl_return->clear();
224 sp<MediaCodec> codec = mMediaCodec.promote();
225 if (codec == NULL) {
226 // codec is already gone.
227 return Status::ok();
228 }
229
230 AString name;
231 if (codec->getName(&name) == OK) {
232 *_aidl_return = name.c_str();
233 }
234 return Status::ok();
235 }
236
~ResourceManagerClientandroid::ResourceManagerClient237 virtual ~ResourceManagerClient() {}
238
239 private:
240 wp<MediaCodec> mMediaCodec;
241
242 DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
243 };
244
245 struct MediaCodec::ResourceManagerServiceProxy : public RefBase {
246 ResourceManagerServiceProxy(pid_t pid, uid_t uid,
247 const std::shared_ptr<IResourceManagerClient> &client);
248 virtual ~ResourceManagerServiceProxy();
249
250 void init();
251
252 // implements DeathRecipient
253 static void BinderDiedCallback(void* cookie);
254 void binderDied();
255 static Mutex sLockCookies;
256 static std::set<void*> sCookies;
257 static void addCookie(void* cookie);
258 static void removeCookie(void* cookie);
259
260 void addResource(const MediaResourceParcel &resource);
261 void removeResource(const MediaResourceParcel &resource);
262 void removeClient();
263 void markClientForPendingRemoval();
264 bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
265
266 private:
267 Mutex mLock;
268 pid_t mPid;
269 uid_t mUid;
270 std::shared_ptr<IResourceManagerService> mService;
271 std::shared_ptr<IResourceManagerClient> mClient;
272 ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
273 };
274
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)275 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
276 pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
277 : mPid(pid), mUid(uid), mClient(client),
278 mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
279 if (mPid == MediaCodec::kNoPid) {
280 mPid = AIBinder_getCallingPid();
281 }
282 }
283
~ResourceManagerServiceProxy()284 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
285
286 // remove the cookie, so any in-flight death notification will get dropped
287 // by our handler.
288 removeCookie(this);
289
290 Mutex::Autolock _l(mLock);
291 if (mService != nullptr) {
292 AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
293 mService = nullptr;
294 }
295 }
296
init()297 void MediaCodec::ResourceManagerServiceProxy::init() {
298 ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
299 mService = IResourceManagerService::fromBinder(binder);
300 if (mService == nullptr) {
301 ALOGE("Failed to get ResourceManagerService");
302 return;
303 }
304
305 // Kill clients pending removal.
306 mService->reclaimResourcesFromClientsPendingRemoval(mPid);
307
308 // so our handler will process the death notifications
309 addCookie(this);
310
311 // after this, require mLock whenever using mService
312 AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
313 }
314
315 //static
316 Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
317 std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
318
319 //static
addCookie(void * cookie)320 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
321 Mutex::Autolock _l(sLockCookies);
322 sCookies.insert(cookie);
323 }
324
325 //static
removeCookie(void * cookie)326 void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
327 Mutex::Autolock _l(sLockCookies);
328 sCookies.erase(cookie);
329 }
330
331 //static
BinderDiedCallback(void * cookie)332 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
333 Mutex::Autolock _l(sLockCookies);
334 if (sCookies.find(cookie) != sCookies.end()) {
335 auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
336 thiz->binderDied();
337 }
338 }
339
binderDied()340 void MediaCodec::ResourceManagerServiceProxy::binderDied() {
341 ALOGW("ResourceManagerService died.");
342 Mutex::Autolock _l(mLock);
343 mService = nullptr;
344 }
345
addResource(const MediaResourceParcel & resource)346 void MediaCodec::ResourceManagerServiceProxy::addResource(
347 const MediaResourceParcel &resource) {
348 std::vector<MediaResourceParcel> resources;
349 resources.push_back(resource);
350
351 Mutex::Autolock _l(mLock);
352 if (mService == nullptr) {
353 return;
354 }
355 mService->addResource(mPid, mUid, getId(mClient), mClient, resources);
356 }
357
removeResource(const MediaResourceParcel & resource)358 void MediaCodec::ResourceManagerServiceProxy::removeResource(
359 const MediaResourceParcel &resource) {
360 std::vector<MediaResourceParcel> resources;
361 resources.push_back(resource);
362
363 Mutex::Autolock _l(mLock);
364 if (mService == nullptr) {
365 return;
366 }
367 mService->removeResource(mPid, getId(mClient), resources);
368 }
369
removeClient()370 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
371 Mutex::Autolock _l(mLock);
372 if (mService == nullptr) {
373 return;
374 }
375 mService->removeClient(mPid, getId(mClient));
376 }
377
markClientForPendingRemoval()378 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
379 Mutex::Autolock _l(mLock);
380 if (mService == nullptr) {
381 return;
382 }
383 mService->markClientForPendingRemoval(mPid, getId(mClient));
384 }
385
reclaimResource(const std::vector<MediaResourceParcel> & resources)386 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
387 const std::vector<MediaResourceParcel> &resources) {
388 Mutex::Autolock _l(mLock);
389 if (mService == NULL) {
390 return false;
391 }
392 bool success;
393 Status status = mService->reclaimResource(mPid, resources, &success);
394 return status.isOk() && success;
395 }
396
397 ////////////////////////////////////////////////////////////////////////////////
398
BufferInfo()399 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
400
401 ////////////////////////////////////////////////////////////////////////////////
402
403 class MediaCodec::ReleaseSurface {
404 public:
ReleaseSurface(uint64_t usage)405 explicit ReleaseSurface(uint64_t usage) {
406 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
407 mSurface = new Surface(mProducer, false /* controlledByApp */);
408 struct ConsumerListener : public BnConsumerListener {
409 ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
410 mConsumer = consumer;
411 }
412 void onFrameAvailable(const BufferItem&) override {
413 BufferItem buffer;
414 // consume buffer
415 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
416 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
417 consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
418 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
419 }
420 }
421
422 wp<IGraphicBufferConsumer> mConsumer;
423 void onBuffersReleased() override {}
424 void onSidebandStreamChanged() override {}
425 };
426 sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
427 mConsumer->consumerConnect(listener, false);
428 mConsumer->setConsumerName(String8{"MediaCodec.release"});
429 mConsumer->setConsumerUsageBits(usage);
430 }
431
getSurface()432 const sp<Surface> &getSurface() {
433 return mSurface;
434 }
435
436 private:
437 sp<IGraphicBufferProducer> mProducer;
438 sp<IGraphicBufferConsumer> mConsumer;
439 sp<Surface> mSurface;
440 };
441
442 ////////////////////////////////////////////////////////////////////////////////
443
444 namespace {
445
446 enum {
447 kWhatFillThisBuffer = 'fill',
448 kWhatDrainThisBuffer = 'drai',
449 kWhatEOS = 'eos ',
450 kWhatStartCompleted = 'Scom',
451 kWhatStopCompleted = 'scom',
452 kWhatReleaseCompleted = 'rcom',
453 kWhatFlushCompleted = 'fcom',
454 kWhatError = 'erro',
455 kWhatComponentAllocated = 'cAll',
456 kWhatComponentConfigured = 'cCon',
457 kWhatInputSurfaceCreated = 'isfc',
458 kWhatInputSurfaceAccepted = 'isfa',
459 kWhatSignaledInputEOS = 'seos',
460 kWhatOutputFramesRendered = 'outR',
461 kWhatOutputBuffersChanged = 'outC',
462 kWhatFirstTunnelFrameReady = 'ftfR',
463 };
464
465 class BufferCallback : public CodecBase::BufferCallback {
466 public:
467 explicit BufferCallback(const sp<AMessage> ¬ify);
468 virtual ~BufferCallback() = default;
469
470 virtual void onInputBufferAvailable(
471 size_t index, const sp<MediaCodecBuffer> &buffer) override;
472 virtual void onOutputBufferAvailable(
473 size_t index, const sp<MediaCodecBuffer> &buffer) override;
474 private:
475 const sp<AMessage> mNotify;
476 };
477
BufferCallback(const sp<AMessage> & notify)478 BufferCallback::BufferCallback(const sp<AMessage> ¬ify)
479 : mNotify(notify) {}
480
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)481 void BufferCallback::onInputBufferAvailable(
482 size_t index, const sp<MediaCodecBuffer> &buffer) {
483 sp<AMessage> notify(mNotify->dup());
484 notify->setInt32("what", kWhatFillThisBuffer);
485 notify->setSize("index", index);
486 notify->setObject("buffer", buffer);
487 notify->post();
488 }
489
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)490 void BufferCallback::onOutputBufferAvailable(
491 size_t index, const sp<MediaCodecBuffer> &buffer) {
492 sp<AMessage> notify(mNotify->dup());
493 notify->setInt32("what", kWhatDrainThisBuffer);
494 notify->setSize("index", index);
495 notify->setObject("buffer", buffer);
496 notify->post();
497 }
498
499 class CodecCallback : public CodecBase::CodecCallback {
500 public:
501 explicit CodecCallback(const sp<AMessage> ¬ify);
502 virtual ~CodecCallback() = default;
503
504 virtual void onEos(status_t err) override;
505 virtual void onStartCompleted() override;
506 virtual void onStopCompleted() override;
507 virtual void onReleaseCompleted() override;
508 virtual void onFlushCompleted() override;
509 virtual void onError(status_t err, enum ActionCode actionCode) override;
510 virtual void onComponentAllocated(const char *componentName) override;
511 virtual void onComponentConfigured(
512 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
513 virtual void onInputSurfaceCreated(
514 const sp<AMessage> &inputFormat,
515 const sp<AMessage> &outputFormat,
516 const sp<BufferProducerWrapper> &inputSurface) override;
517 virtual void onInputSurfaceCreationFailed(status_t err) override;
518 virtual void onInputSurfaceAccepted(
519 const sp<AMessage> &inputFormat,
520 const sp<AMessage> &outputFormat) override;
521 virtual void onInputSurfaceDeclined(status_t err) override;
522 virtual void onSignaledInputEOS(status_t err) override;
523 virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
524 virtual void onOutputBuffersChanged() override;
525 virtual void onFirstTunnelFrameReady() override;
526 private:
527 const sp<AMessage> mNotify;
528 };
529
CodecCallback(const sp<AMessage> & notify)530 CodecCallback::CodecCallback(const sp<AMessage> ¬ify) : mNotify(notify) {}
531
onEos(status_t err)532 void CodecCallback::onEos(status_t err) {
533 sp<AMessage> notify(mNotify->dup());
534 notify->setInt32("what", kWhatEOS);
535 notify->setInt32("err", err);
536 notify->post();
537 }
538
onStartCompleted()539 void CodecCallback::onStartCompleted() {
540 sp<AMessage> notify(mNotify->dup());
541 notify->setInt32("what", kWhatStartCompleted);
542 notify->post();
543 }
544
onStopCompleted()545 void CodecCallback::onStopCompleted() {
546 sp<AMessage> notify(mNotify->dup());
547 notify->setInt32("what", kWhatStopCompleted);
548 notify->post();
549 }
550
onReleaseCompleted()551 void CodecCallback::onReleaseCompleted() {
552 sp<AMessage> notify(mNotify->dup());
553 notify->setInt32("what", kWhatReleaseCompleted);
554 notify->post();
555 }
556
onFlushCompleted()557 void CodecCallback::onFlushCompleted() {
558 sp<AMessage> notify(mNotify->dup());
559 notify->setInt32("what", kWhatFlushCompleted);
560 notify->post();
561 }
562
onError(status_t err,enum ActionCode actionCode)563 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
564 sp<AMessage> notify(mNotify->dup());
565 notify->setInt32("what", kWhatError);
566 notify->setInt32("err", err);
567 notify->setInt32("actionCode", actionCode);
568 notify->post();
569 }
570
onComponentAllocated(const char * componentName)571 void CodecCallback::onComponentAllocated(const char *componentName) {
572 sp<AMessage> notify(mNotify->dup());
573 notify->setInt32("what", kWhatComponentAllocated);
574 notify->setString("componentName", componentName);
575 notify->post();
576 }
577
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)578 void CodecCallback::onComponentConfigured(
579 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
580 sp<AMessage> notify(mNotify->dup());
581 notify->setInt32("what", kWhatComponentConfigured);
582 notify->setMessage("input-format", inputFormat);
583 notify->setMessage("output-format", outputFormat);
584 notify->post();
585 }
586
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)587 void CodecCallback::onInputSurfaceCreated(
588 const sp<AMessage> &inputFormat,
589 const sp<AMessage> &outputFormat,
590 const sp<BufferProducerWrapper> &inputSurface) {
591 sp<AMessage> notify(mNotify->dup());
592 notify->setInt32("what", kWhatInputSurfaceCreated);
593 notify->setMessage("input-format", inputFormat);
594 notify->setMessage("output-format", outputFormat);
595 notify->setObject("input-surface", inputSurface);
596 notify->post();
597 }
598
onInputSurfaceCreationFailed(status_t err)599 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
600 sp<AMessage> notify(mNotify->dup());
601 notify->setInt32("what", kWhatInputSurfaceCreated);
602 notify->setInt32("err", err);
603 notify->post();
604 }
605
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)606 void CodecCallback::onInputSurfaceAccepted(
607 const sp<AMessage> &inputFormat,
608 const sp<AMessage> &outputFormat) {
609 sp<AMessage> notify(mNotify->dup());
610 notify->setInt32("what", kWhatInputSurfaceAccepted);
611 notify->setMessage("input-format", inputFormat);
612 notify->setMessage("output-format", outputFormat);
613 notify->post();
614 }
615
onInputSurfaceDeclined(status_t err)616 void CodecCallback::onInputSurfaceDeclined(status_t err) {
617 sp<AMessage> notify(mNotify->dup());
618 notify->setInt32("what", kWhatInputSurfaceAccepted);
619 notify->setInt32("err", err);
620 notify->post();
621 }
622
onSignaledInputEOS(status_t err)623 void CodecCallback::onSignaledInputEOS(status_t err) {
624 sp<AMessage> notify(mNotify->dup());
625 notify->setInt32("what", kWhatSignaledInputEOS);
626 if (err != OK) {
627 notify->setInt32("err", err);
628 }
629 notify->post();
630 }
631
onOutputFramesRendered(const std::list<FrameRenderTracker::Info> & done)632 void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
633 sp<AMessage> notify(mNotify->dup());
634 notify->setInt32("what", kWhatOutputFramesRendered);
635 if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
636 notify->post();
637 }
638 }
639
onOutputBuffersChanged()640 void CodecCallback::onOutputBuffersChanged() {
641 sp<AMessage> notify(mNotify->dup());
642 notify->setInt32("what", kWhatOutputBuffersChanged);
643 notify->post();
644 }
645
onFirstTunnelFrameReady()646 void CodecCallback::onFirstTunnelFrameReady() {
647 sp<AMessage> notify(mNotify->dup());
648 notify->setInt32("what", kWhatFirstTunnelFrameReady);
649 notify->post();
650 }
651
652 } // namespace
653
654 ////////////////////////////////////////////////////////////////////////////////
655
656 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)657 sp<MediaCodec> MediaCodec::CreateByType(
658 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
659 uid_t uid) {
660 sp<AMessage> format;
661 return CreateByType(looper, mime, encoder, err, pid, uid, format);
662 }
663
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)664 sp<MediaCodec> MediaCodec::CreateByType(
665 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
666 uid_t uid, sp<AMessage> format) {
667 Vector<AString> matchingCodecs;
668
669 MediaCodecList::findMatchingCodecs(
670 mime.c_str(),
671 encoder,
672 0,
673 format,
674 &matchingCodecs);
675
676 if (err != NULL) {
677 *err = NAME_NOT_FOUND;
678 }
679 for (size_t i = 0; i < matchingCodecs.size(); ++i) {
680 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
681 AString componentName = matchingCodecs[i];
682 status_t ret = codec->init(componentName);
683 if (err != NULL) {
684 *err = ret;
685 }
686 if (ret == OK) {
687 return codec;
688 }
689 ALOGD("Allocating component '%s' failed (%d), try next one.",
690 componentName.c_str(), ret);
691 }
692 return NULL;
693 }
694
695 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)696 sp<MediaCodec> MediaCodec::CreateByComponentName(
697 const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
698 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
699
700 const status_t ret = codec->init(name);
701 if (err != NULL) {
702 *err = ret;
703 }
704 return ret == OK ? codec : NULL; // NULL deallocates codec.
705 }
706
707 // static
CreatePersistentInputSurface()708 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
709 sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
710 if (pluginSurface != nullptr) {
711 return pluginSurface;
712 }
713
714 OMXClient client;
715 if (client.connect() != OK) {
716 ALOGE("Failed to connect to OMX to create persistent input surface.");
717 return NULL;
718 }
719
720 sp<IOMX> omx = client.interface();
721
722 sp<IGraphicBufferProducer> bufferProducer;
723 sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
724
725 status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
726
727 if (err != OK) {
728 ALOGE("Failed to create persistent input surface.");
729 return NULL;
730 }
731
732 return new PersistentSurface(bufferProducer, bufferSource);
733 }
734
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)735 MediaCodec::MediaCodec(
736 const sp<ALooper> &looper, pid_t pid, uid_t uid,
737 std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
738 std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
739 : mState(UNINITIALIZED),
740 mReleasedByResourceManager(false),
741 mLooper(looper),
742 mCodec(NULL),
743 mReplyID(0),
744 mFlags(0),
745 mStickyError(OK),
746 mSoftRenderer(NULL),
747 mIsVideo(false),
748 mVideoWidth(0),
749 mVideoHeight(0),
750 mRotationDegrees(0),
751 mDequeueInputTimeoutGeneration(0),
752 mDequeueInputReplyID(0),
753 mDequeueOutputTimeoutGeneration(0),
754 mDequeueOutputReplyID(0),
755 mTunneledInputWidth(0),
756 mTunneledInputHeight(0),
757 mTunneled(false),
758 mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
759 mHaveInputSurface(false),
760 mHavePendingInputBuffers(false),
761 mCpuBoostRequested(false),
762 mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
763 mIsSurfaceToScreen(false),
764 mLatencyUnknown(0),
765 mBytesEncoded(0),
766 mEarliestEncodedPtsUs(INT64_MAX),
767 mLatestEncodedPtsUs(INT64_MIN),
768 mFramesEncoded(0),
769 mNumLowLatencyEnables(0),
770 mNumLowLatencyDisables(0),
771 mIsLowLatencyModeOn(false),
772 mIndexOfFirstFrameWhenLowLatencyOn(-1),
773 mInputBufferCounter(0),
774 mGetCodecBase(getCodecBase),
775 mGetCodecInfo(getCodecInfo) {
776 if (uid == kNoUid) {
777 mUid = AIBinder_getCallingUid();
778 } else {
779 mUid = uid;
780 }
781 mResourceManagerProxy = new ResourceManagerServiceProxy(pid, mUid,
782 ::ndk::SharedRefBase::make<ResourceManagerClient>(this));
783 if (!mGetCodecBase) {
784 mGetCodecBase = [](const AString &name, const char *owner) {
785 return GetCodecBase(name, owner);
786 };
787 }
788 if (!mGetCodecInfo) {
789 mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
790 *info = nullptr;
791 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
792 if (!mcl) {
793 return NO_INIT; // if called from Java should raise IOException
794 }
795 AString tmp = name;
796 if (tmp.endsWith(".secure")) {
797 tmp.erase(tmp.size() - 7, 7);
798 }
799 for (const AString &codecName : { name, tmp }) {
800 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
801 if (codecIdx < 0) {
802 continue;
803 }
804 *info = mcl->getCodecInfo(codecIdx);
805 return OK;
806 }
807 return NAME_NOT_FOUND;
808 };
809 }
810
811 initMediametrics();
812 }
813
~MediaCodec()814 MediaCodec::~MediaCodec() {
815 CHECK_EQ(mState, UNINITIALIZED);
816 mResourceManagerProxy->removeClient();
817
818 flushMediametrics();
819 }
820
initMediametrics()821 void MediaCodec::initMediametrics() {
822 if (mMetricsHandle == 0) {
823 mMetricsHandle = mediametrics_create(kCodecKeyName);
824 }
825
826 mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
827
828 {
829 Mutex::Autolock al(mRecentLock);
830 for (int i = 0; i<kRecentLatencyFrames; i++) {
831 mRecentSamples[i] = kRecentSampleInvalid;
832 }
833 mRecentHead = 0;
834 }
835
836 {
837 Mutex::Autolock al(mLatencyLock);
838 mBuffersInFlight.clear();
839 mNumLowLatencyEnables = 0;
840 mNumLowLatencyDisables = 0;
841 mIsLowLatencyModeOn = false;
842 mIndexOfFirstFrameWhenLowLatencyOn = -1;
843 mInputBufferCounter = 0;
844 }
845
846 mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
847 }
848
updateMediametrics()849 void MediaCodec::updateMediametrics() {
850 ALOGV("MediaCodec::updateMediametrics");
851 if (mMetricsHandle == 0) {
852 return;
853 }
854
855 if (mLatencyHist.getCount() != 0 ) {
856 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
857 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
858 mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
859 mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
860
861 if (kEmitHistogram) {
862 // and the histogram itself
863 std::string hist = mLatencyHist.emit();
864 mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
865 }
866 }
867 if (mLatencyUnknown > 0) {
868 mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
869 }
870 int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
871 if (playbackDurationSec > 0) {
872 mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
873 }
874 if (mLifetimeStartNs > 0) {
875 nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
876 lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
877 mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
878 }
879
880 if (mBytesEncoded) {
881 Mutex::Autolock al(mOutputStatsLock);
882
883 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
884 int64_t duration = 0;
885 if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
886 duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
887 }
888 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
889 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
890 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
891 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
892 }
893
894 {
895 Mutex::Autolock al(mLatencyLock);
896 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
897 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
898 mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
899 mIndexOfFirstFrameWhenLowLatencyOn);
900 }
901 #if 0
902 // enable for short term, only while debugging
903 updateEphemeralMediametrics(mMetricsHandle);
904 #endif
905 }
906
updateEphemeralMediametrics(mediametrics_handle_t item)907 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
908 ALOGD("MediaCodec::updateEphemeralMediametrics()");
909
910 if (item == 0) {
911 return;
912 }
913
914 Histogram recentHist;
915
916 // build an empty histogram
917 recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
918
919 // stuff it with the samples in the ring buffer
920 {
921 Mutex::Autolock al(mRecentLock);
922
923 for (int i=0; i<kRecentLatencyFrames; i++) {
924 if (mRecentSamples[i] != kRecentSampleInvalid) {
925 recentHist.insert(mRecentSamples[i]);
926 }
927 }
928 }
929
930 // spit the data (if any) into the supplied analytics record
931 if (recentHist.getCount()!= 0 ) {
932 mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
933 mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
934 mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
935 mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
936
937 if (kEmitHistogram) {
938 // and the histogram itself
939 std::string hist = recentHist.emit();
940 mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
941 }
942 }
943 }
944
flushMediametrics()945 void MediaCodec::flushMediametrics() {
946 updateMediametrics();
947 if (mMetricsHandle != 0) {
948 if (mediametrics_count(mMetricsHandle) > 0) {
949 mediametrics_selfRecord(mMetricsHandle);
950 }
951 mediametrics_delete(mMetricsHandle);
952 mMetricsHandle = 0;
953 }
954 }
955
updateLowLatency(const sp<AMessage> & msg)956 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
957 int32_t lowLatency = 0;
958 if (msg->findInt32("low-latency", &lowLatency)) {
959 Mutex::Autolock al(mLatencyLock);
960 if (lowLatency > 0) {
961 ++mNumLowLatencyEnables;
962 // This is just an estimate since low latency mode change happens ONLY at key frame
963 mIsLowLatencyModeOn = true;
964 } else if (lowLatency == 0) {
965 ++mNumLowLatencyDisables;
966 // This is just an estimate since low latency mode change happens ONLY at key frame
967 mIsLowLatencyModeOn = false;
968 }
969 }
970 }
971
asString(TunnelPeekState state,const char * default_string)972 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
973 switch(state) {
974 case TunnelPeekState::kEnabledNoBuffer:
975 return "EnabledNoBuffer";
976 case TunnelPeekState::kDisabledNoBuffer:
977 return "DisabledNoBuffer";
978 case TunnelPeekState::kBufferDecoded:
979 return "BufferDecoded";
980 case TunnelPeekState::kBufferRendered:
981 return "BufferRendered";
982 case TunnelPeekState::kDisabledQueued:
983 return "DisabledQueued";
984 case TunnelPeekState::kEnabledQueued:
985 return "EnabledQueued";
986 default:
987 return default_string;
988 }
989 }
990
updateTunnelPeek(const sp<AMessage> & msg)991 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
992 int32_t tunnelPeek = 0;
993 if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
994 return;
995 }
996
997 TunnelPeekState previousState = mTunnelPeekState;
998 if(tunnelPeek == 0){
999 switch (mTunnelPeekState) {
1000 case TunnelPeekState::kEnabledNoBuffer:
1001 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
1002 break;
1003 case TunnelPeekState::kEnabledQueued:
1004 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
1005 break;
1006 default:
1007 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1008 return;
1009 }
1010 } else {
1011 switch (mTunnelPeekState) {
1012 case TunnelPeekState::kDisabledNoBuffer:
1013 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
1014 break;
1015 case TunnelPeekState::kDisabledQueued:
1016 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
1017 break;
1018 case TunnelPeekState::kBufferDecoded:
1019 msg->setInt32("android._trigger-tunnel-peek", 1);
1020 mTunnelPeekState = TunnelPeekState::kBufferRendered;
1021 break;
1022 default:
1023 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1024 return;
1025 }
1026 }
1027
1028 ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
1029 }
1030
updatePlaybackDuration(const sp<AMessage> & msg)1031 void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
1032 int what = 0;
1033 msg->findInt32("what", &what);
1034 if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
1035 static bool logged = false;
1036 if (!logged) {
1037 logged = true;
1038 ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
1039 }
1040 return;
1041 }
1042 // Playback duration only counts if the buffers are going to the screen.
1043 if (!mIsSurfaceToScreen) {
1044 return;
1045 }
1046 int64_t renderTimeNs;
1047 size_t index = 0;
1048 while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
1049 mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
1050 }
1051 }
1052
setup(int nbuckets,int64_t width,int64_t floor)1053 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
1054 {
1055 if (nbuckets <= 0 || width <= 0) {
1056 return false;
1057 }
1058
1059 // get histogram buckets
1060 if (nbuckets == mBucketCount && mBuckets != NULL) {
1061 // reuse our existing buffer
1062 memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
1063 } else {
1064 // get a new pre-zeroed buffer
1065 int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
1066 if (newbuckets == NULL) {
1067 goto bad;
1068 }
1069 if (mBuckets != NULL)
1070 free(mBuckets);
1071 mBuckets = newbuckets;
1072 }
1073
1074 mWidth = width;
1075 mFloor = floor;
1076 mCeiling = floor + nbuckets * width;
1077 mBucketCount = nbuckets;
1078
1079 mMin = INT64_MAX;
1080 mMax = INT64_MIN;
1081 mSum = 0;
1082 mCount = 0;
1083 mBelow = mAbove = 0;
1084
1085 return true;
1086
1087 bad:
1088 if (mBuckets != NULL) {
1089 free(mBuckets);
1090 mBuckets = NULL;
1091 }
1092
1093 return false;
1094 }
1095
insert(int64_t sample)1096 void MediaCodec::Histogram::insert(int64_t sample)
1097 {
1098 // histogram is not set up
1099 if (mBuckets == NULL) {
1100 return;
1101 }
1102
1103 mCount++;
1104 mSum += sample;
1105 if (mMin > sample) mMin = sample;
1106 if (mMax < sample) mMax = sample;
1107
1108 if (sample < mFloor) {
1109 mBelow++;
1110 } else if (sample >= mCeiling) {
1111 mAbove++;
1112 } else {
1113 int64_t slot = (sample - mFloor) / mWidth;
1114 CHECK(slot < mBucketCount);
1115 mBuckets[slot]++;
1116 }
1117 return;
1118 }
1119
emit()1120 std::string MediaCodec::Histogram::emit()
1121 {
1122 std::string value;
1123 char buffer[64];
1124
1125 // emits: width,Below{bucket0,bucket1,...., bucketN}above
1126 // unconfigured will emit: 0,0{}0
1127 // XXX: is this best representation?
1128 snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
1129 mFloor, mWidth, mBelow);
1130 value = buffer;
1131 for (int i = 0; i < mBucketCount; i++) {
1132 if (i != 0) {
1133 value = value + ",";
1134 }
1135 snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
1136 value = value + buffer;
1137 }
1138 snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
1139 value = value + buffer;
1140 return value;
1141 }
1142
1143 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1144 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1145
1146 // only enqueue if we have a legitimate time
1147 if (presentationUs <= 0) {
1148 ALOGV("presentation time: %" PRId64, presentationUs);
1149 return;
1150 }
1151
1152 if (mBatteryChecker != nullptr) {
1153 mBatteryChecker->onCodecActivity([this] () {
1154 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1155 });
1156 }
1157
1158 if (mIsVideo && (mFlags & kFlagIsEncoder)) {
1159 mBytesInput += buffer->size();
1160 mFramesInput++;
1161 }
1162
1163 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1164 BufferFlightTiming_t startdata = { presentationUs, nowNs };
1165
1166 {
1167 // mutex access to mBuffersInFlight and other stats
1168 Mutex::Autolock al(mLatencyLock);
1169
1170
1171 // XXX: we *could* make sure that the time is later than the end of queue
1172 // as part of a consistency check...
1173 mBuffersInFlight.push_back(startdata);
1174
1175 if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
1176 mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
1177 }
1178 ++mInputBufferCounter;
1179 }
1180 }
1181
1182 // when we get a buffer back from the codec
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1183 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1184
1185 CHECK_NE(mState, UNINITIALIZED);
1186
1187 if (mIsVideo && (mFlags & kFlagIsEncoder)) {
1188 int32_t flags = 0;
1189 (void) buffer->meta()->findInt32("flags", &flags);
1190
1191 // some of these frames, we don't want to count
1192 // standalone EOS.... has an invalid timestamp
1193 if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
1194 mBytesEncoded += buffer->size();
1195 mFramesEncoded++;
1196
1197 Mutex::Autolock al(mOutputStatsLock);
1198 int64_t timeUs = 0;
1199 if (buffer->meta()->findInt64("timeUs", &timeUs)) {
1200 if (timeUs > mLatestEncodedPtsUs) {
1201 mLatestEncodedPtsUs = timeUs;
1202 }
1203 // can't chain as an else-if or this never triggers
1204 if (timeUs < mEarliestEncodedPtsUs) {
1205 mEarliestEncodedPtsUs = timeUs;
1206 }
1207 }
1208 }
1209 }
1210
1211 // mutex access to mBuffersInFlight and other stats
1212 Mutex::Autolock al(mLatencyLock);
1213
1214 // how long this buffer took for the round trip through the codec
1215 // NB: pipelining can/will make these times larger. e.g., if each packet
1216 // is always 2 msec and we have 3 in flight at any given time, we're going to
1217 // see "6 msec" as an answer.
1218
1219 // ignore stuff with no presentation time
1220 if (presentationUs <= 0) {
1221 ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
1222 mLatencyUnknown++;
1223 return;
1224 }
1225
1226 if (mBatteryChecker != nullptr) {
1227 mBatteryChecker->onCodecActivity([this] () {
1228 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1229 });
1230 }
1231
1232 BufferFlightTiming_t startdata;
1233 bool valid = false;
1234 while (mBuffersInFlight.size() > 0) {
1235 startdata = *mBuffersInFlight.begin();
1236 ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
1237 startdata.presentationUs, startdata.startedNs);
1238 if (startdata.presentationUs == presentationUs) {
1239 // a match
1240 ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
1241 startdata.presentationUs, presentationUs);
1242 mBuffersInFlight.pop_front();
1243 valid = true;
1244 break;
1245 } else if (startdata.presentationUs < presentationUs) {
1246 // we must have missed the match for this, drop it and keep looking
1247 ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
1248 startdata.presentationUs, presentationUs);
1249 mBuffersInFlight.pop_front();
1250 continue;
1251 } else {
1252 // head is after, so we don't have a frame for ourselves
1253 ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
1254 " we have nothing to pair with",
1255 startdata.presentationUs, presentationUs);
1256 mLatencyUnknown++;
1257 return;
1258 }
1259 }
1260 if (!valid) {
1261 ALOGV("-- empty queue, so ignore that.");
1262 mLatencyUnknown++;
1263 return;
1264 }
1265
1266 // now start our calculations
1267 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1268 int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
1269
1270 mLatencyHist.insert(latencyUs);
1271
1272 // push into the recent samples
1273 {
1274 Mutex::Autolock al(mRecentLock);
1275
1276 if (mRecentHead >= kRecentLatencyFrames) {
1277 mRecentHead = 0;
1278 }
1279 mRecentSamples[mRecentHead++] = latencyUs;
1280 }
1281 }
1282
1283 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)1284 status_t MediaCodec::PostAndAwaitResponse(
1285 const sp<AMessage> &msg, sp<AMessage> *response) {
1286 status_t err = msg->postAndAwaitResponse(response);
1287
1288 if (err != OK) {
1289 return err;
1290 }
1291
1292 if (!(*response)->findInt32("err", &err)) {
1293 err = OK;
1294 }
1295
1296 return err;
1297 }
1298
PostReplyWithError(const sp<AMessage> & msg,int32_t err)1299 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
1300 sp<AReplyToken> replyID;
1301 CHECK(msg->senderAwaitsResponse(&replyID));
1302 PostReplyWithError(replyID, err);
1303 }
1304
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)1305 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
1306 int32_t finalErr = err;
1307 if (mReleasedByResourceManager) {
1308 // override the err code if MediaCodec has been released by ResourceManager.
1309 finalErr = DEAD_OBJECT;
1310 }
1311
1312 sp<AMessage> response = new AMessage;
1313 response->setInt32("err", finalErr);
1314 response->postReply(replyID);
1315 }
1316
CreateCCodec()1317 static CodecBase *CreateCCodec() {
1318 return new CCodec;
1319 }
1320
1321 //static
GetCodecBase(const AString & name,const char * owner)1322 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
1323 if (owner) {
1324 if (strcmp(owner, "default") == 0) {
1325 return new ACodec;
1326 } else if (strncmp(owner, "codec2", 6) == 0) {
1327 return CreateCCodec();
1328 }
1329 }
1330
1331 if (name.startsWithIgnoreCase("c2.")) {
1332 return CreateCCodec();
1333 } else if (name.startsWithIgnoreCase("omx.")) {
1334 // at this time only ACodec specifies a mime type.
1335 return new ACodec;
1336 } else if (name.startsWithIgnoreCase("android.filter.")) {
1337 return new MediaFilter;
1338 } else {
1339 return NULL;
1340 }
1341 }
1342
1343 struct CodecListCache {
CodecListCacheandroid::CodecListCache1344 CodecListCache()
1345 : mCodecInfoMap{[] {
1346 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1347 size_t count = mcl->countCodecs();
1348 std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
1349 for (size_t i = 0; i < count; ++i) {
1350 sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
1351 codecInfoMap.emplace(info->getCodecName(), info);
1352 }
1353 return codecInfoMap;
1354 }()} {
1355 }
1356
1357 const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
1358 };
1359
GetCodecListCache()1360 static const CodecListCache &GetCodecListCache() {
1361 static CodecListCache sCache{};
1362 return sCache;
1363 }
1364
init(const AString & name)1365 status_t MediaCodec::init(const AString &name) {
1366 mResourceManagerProxy->init();
1367
1368 // save init parameters for reset
1369 mInitName = name;
1370
1371 // Current video decoders do not return from OMX_FillThisBuffer
1372 // quickly, violating the OpenMAX specs, until that is remedied
1373 // we need to invest in an extra looper to free the main event
1374 // queue.
1375
1376 mCodecInfo.clear();
1377
1378 bool secureCodec = false;
1379 const char *owner = "";
1380 if (!name.startsWith("android.filter.")) {
1381 status_t err = mGetCodecInfo(name, &mCodecInfo);
1382 if (err != OK) {
1383 mCodec = NULL; // remove the codec.
1384 return err;
1385 }
1386 if (mCodecInfo == nullptr) {
1387 ALOGE("Getting codec info with name '%s' failed", name.c_str());
1388 return NAME_NOT_FOUND;
1389 }
1390 secureCodec = name.endsWith(".secure");
1391 Vector<AString> mediaTypes;
1392 mCodecInfo->getSupportedMediaTypes(&mediaTypes);
1393 for (size_t i = 0; i < mediaTypes.size(); ++i) {
1394 if (mediaTypes[i].startsWith("video/")) {
1395 mIsVideo = true;
1396 break;
1397 }
1398 }
1399 owner = mCodecInfo->getOwnerName();
1400 }
1401
1402 mCodec = mGetCodecBase(name, owner);
1403 if (mCodec == NULL) {
1404 ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
1405 return NAME_NOT_FOUND;
1406 }
1407
1408 if (mIsVideo) {
1409 // video codec needs dedicated looper
1410 if (mCodecLooper == NULL) {
1411 mCodecLooper = new ALooper;
1412 mCodecLooper->setName("CodecLooper");
1413 mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
1414 }
1415
1416 mCodecLooper->registerHandler(mCodec);
1417 } else {
1418 mLooper->registerHandler(mCodec);
1419 }
1420
1421 mLooper->registerHandler(this);
1422
1423 mCodec->setCallback(
1424 std::unique_ptr<CodecBase::CodecCallback>(
1425 new CodecCallback(new AMessage(kWhatCodecNotify, this))));
1426 mBufferChannel = mCodec->getBufferChannel();
1427 mBufferChannel->setCallback(
1428 std::unique_ptr<CodecBase::BufferCallback>(
1429 new BufferCallback(new AMessage(kWhatCodecNotify, this))));
1430
1431 sp<AMessage> msg = new AMessage(kWhatInit, this);
1432 if (mCodecInfo) {
1433 msg->setObject("codecInfo", mCodecInfo);
1434 // name may be different from mCodecInfo->getCodecName() if we stripped
1435 // ".secure"
1436 }
1437 msg->setString("name", name);
1438
1439 if (mMetricsHandle != 0) {
1440 mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
1441 mediametrics_setCString(mMetricsHandle, kCodecMode,
1442 mIsVideo ? kCodecModeVideo : kCodecModeAudio);
1443 }
1444
1445 if (mIsVideo) {
1446 mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
1447 }
1448
1449 status_t err;
1450 std::vector<MediaResourceParcel> resources;
1451 resources.push_back(MediaResource::CodecResource(secureCodec, mIsVideo));
1452 for (int i = 0; i <= kMaxRetry; ++i) {
1453 if (i > 0) {
1454 // Don't try to reclaim resource for the first time.
1455 if (!mResourceManagerProxy->reclaimResource(resources)) {
1456 break;
1457 }
1458 }
1459
1460 sp<AMessage> response;
1461 err = PostAndAwaitResponse(msg, &response);
1462 if (!isResourceError(err)) {
1463 break;
1464 }
1465 }
1466 return err;
1467 }
1468
setCallback(const sp<AMessage> & callback)1469 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
1470 sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
1471 msg->setMessage("callback", callback);
1472
1473 sp<AMessage> response;
1474 return PostAndAwaitResponse(msg, &response);
1475 }
1476
setOnFrameRenderedNotification(const sp<AMessage> & notify)1477 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) {
1478 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
1479 msg->setMessage("on-frame-rendered", notify);
1480 return msg->post();
1481 }
1482
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)1483 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> ¬ify) {
1484 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
1485 msg->setMessage("first-tunnel-frame-ready", notify);
1486 return msg->post();
1487 }
1488
1489 /*
1490 * MediaFormat Shaping forward declarations
1491 * including the property name we use for control.
1492 */
1493 static int enableMediaFormatShapingDefault = 1;
1494 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
1495 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
1496 bool reverse);
1497
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)1498 status_t MediaCodec::configure(
1499 const sp<AMessage> &format,
1500 const sp<Surface> &nativeWindow,
1501 const sp<ICrypto> &crypto,
1502 uint32_t flags) {
1503 return configure(format, nativeWindow, crypto, NULL, flags);
1504 }
1505
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)1506 status_t MediaCodec::configure(
1507 const sp<AMessage> &format,
1508 const sp<Surface> &surface,
1509 const sp<ICrypto> &crypto,
1510 const sp<IDescrambler> &descrambler,
1511 uint32_t flags) {
1512 sp<AMessage> msg = new AMessage(kWhatConfigure, this);
1513
1514 if (mMetricsHandle != 0) {
1515 int32_t profile = 0;
1516 if (format->findInt32("profile", &profile)) {
1517 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
1518 }
1519 int32_t level = 0;
1520 if (format->findInt32("level", &level)) {
1521 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
1522 }
1523 mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
1524 (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
1525 }
1526
1527 if (mIsVideo) {
1528 // TODO: validity check log-session-id: it should be a 32-hex-digit.
1529 format->findString("log-session-id", &mLogSessionId);
1530 format->findInt32("width", &mVideoWidth);
1531 format->findInt32("height", &mVideoHeight);
1532 if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
1533 mRotationDegrees = 0;
1534 }
1535
1536 if (mMetricsHandle != 0) {
1537 mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
1538 mediametrics_setInt32(mMetricsHandle, kCodecWidth, mVideoWidth);
1539 mediametrics_setInt32(mMetricsHandle, kCodecHeight, mVideoHeight);
1540 mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
1541 int32_t maxWidth = 0;
1542 if (format->findInt32("max-width", &maxWidth)) {
1543 mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
1544 }
1545 int32_t maxHeight = 0;
1546 if (format->findInt32("max-height", &maxHeight)) {
1547 mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
1548 }
1549 int32_t colorFormat = -1;
1550 if (format->findInt32("color-format", &colorFormat)) {
1551 mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
1552 }
1553 float frameRate = -1.0;
1554 if (format->findFloat("frame-rate", &frameRate)) {
1555 mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
1556 }
1557 float captureRate = -1.0;
1558 if (format->findFloat("capture-rate", &captureRate)) {
1559 mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
1560 }
1561 float operatingRate = -1.0;
1562 if (format->findFloat("operating-rate", &operatingRate)) {
1563 mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
1564 }
1565 int32_t priority = -1;
1566 if (format->findInt32("priority", &priority)) {
1567 mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
1568 }
1569 }
1570
1571 // Prevent possible integer overflow in downstream code.
1572 if (mVideoWidth < 0 || mVideoHeight < 0 ||
1573 (uint64_t)mVideoWidth * mVideoHeight > (uint64_t)INT32_MAX / 4) {
1574 ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
1575 return BAD_VALUE;
1576 }
1577
1578 } else {
1579 if (mMetricsHandle != 0) {
1580 int32_t channelCount;
1581 if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
1582 mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
1583 }
1584 int32_t sampleRate;
1585 if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
1586 mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
1587 }
1588 }
1589 }
1590
1591 if (flags & CONFIGURE_FLAG_ENCODE) {
1592 int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
1593 enableMediaFormatShapingDefault);
1594 if (!enableShaping) {
1595 ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
1596 if (mMetricsHandle != 0) {
1597 mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
1598 }
1599 } else {
1600 (void) shapeMediaFormat(format, flags);
1601 // XXX: do we want to do this regardless of shaping enablement?
1602 mapFormat(mComponentName, format, nullptr, false);
1603 }
1604 }
1605
1606 // push min/max QP to MediaMetrics after shaping
1607 if (mIsVideo && mMetricsHandle != 0) {
1608 int32_t qpIMin = -1;
1609 if (format->findInt32("video-qp-i-min", &qpIMin)) {
1610 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
1611 }
1612 int32_t qpIMax = -1;
1613 if (format->findInt32("video-qp-i-max", &qpIMax)) {
1614 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
1615 }
1616 int32_t qpPMin = -1;
1617 if (format->findInt32("video-qp-p-min", &qpPMin)) {
1618 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
1619 }
1620 int32_t qpPMax = -1;
1621 if (format->findInt32("video-qp-p-max", &qpPMax)) {
1622 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
1623 }
1624 int32_t qpBMin = -1;
1625 if (format->findInt32("video-qp-b-min", &qpBMin)) {
1626 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
1627 }
1628 int32_t qpBMax = -1;
1629 if (format->findInt32("video-qp-b-max", &qpBMax)) {
1630 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
1631 }
1632 }
1633
1634 updateLowLatency(format);
1635
1636 msg->setMessage("format", format);
1637 msg->setInt32("flags", flags);
1638 msg->setObject("surface", surface);
1639
1640 if (crypto != NULL || descrambler != NULL) {
1641 if (crypto != NULL) {
1642 msg->setPointer("crypto", crypto.get());
1643 } else {
1644 msg->setPointer("descrambler", descrambler.get());
1645 }
1646 if (mMetricsHandle != 0) {
1647 mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
1648 }
1649 } else if (mFlags & kFlagIsSecure) {
1650 ALOGW("Crypto or descrambler should be given for secure codec");
1651 }
1652
1653 // save msg for reset
1654 mConfigureMsg = msg;
1655
1656 sp<AMessage> callback = mCallback;
1657
1658 status_t err;
1659 std::vector<MediaResourceParcel> resources;
1660 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
1661 // Don't know the buffer size at this point, but it's fine to use 1 because
1662 // the reclaimResource call doesn't consider the requester's buffer size for now.
1663 resources.push_back(MediaResource::GraphicMemoryResource(1));
1664 for (int i = 0; i <= kMaxRetry; ++i) {
1665 sp<AMessage> response;
1666 err = PostAndAwaitResponse(msg, &response);
1667 if (err != OK && err != INVALID_OPERATION) {
1668 if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
1669 break;
1670 }
1671 // MediaCodec now set state to UNINITIALIZED upon any fatal error.
1672 // To maintain backward-compatibility, do a reset() to put codec
1673 // back into INITIALIZED state.
1674 // But don't reset if the err is INVALID_OPERATION, which means
1675 // the configure failure is due to wrong state.
1676
1677 ALOGE("configure failed with err 0x%08x, resetting...", err);
1678 status_t err2 = reset();
1679 if (err2 != OK) {
1680 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
1681 break;
1682 }
1683 if (callback != nullptr) {
1684 err2 = setCallback(callback);
1685 if (err2 != OK) {
1686 ALOGE("retrying configure: failed to set callback (%08x)", err2);
1687 break;
1688 }
1689 }
1690 }
1691 if (!isResourceError(err)) {
1692 break;
1693 }
1694 }
1695
1696 return err;
1697 }
1698
1699 // Media Format Shaping support
1700 //
1701
1702 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
1703 static bool sIsHandheld = true;
1704
connectFormatShaper()1705 static bool connectFormatShaper() {
1706 static std::once_flag sCheckOnce;
1707
1708 ALOGV("connectFormatShaper...");
1709
1710 std::call_once(sCheckOnce, [&](){
1711
1712 void *libHandle = NULL;
1713 nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
1714
1715 // prefer any copy in the mainline module
1716 //
1717 android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
1718 AString libraryName = "libmediaformatshaper.so";
1719
1720 if (mediaNs != NULL) {
1721 static const android_dlextinfo dlextinfo = {
1722 .flags = ANDROID_DLEXT_USE_NAMESPACE,
1723 .library_namespace = mediaNs,
1724 };
1725
1726 AString libraryMainline = "/apex/com.android.media/";
1727 #if __LP64__
1728 libraryMainline.append("lib64/");
1729 #else
1730 libraryMainline.append("lib/");
1731 #endif
1732 libraryMainline.append(libraryName);
1733
1734 libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
1735 &dlextinfo);
1736
1737 if (libHandle != NULL) {
1738 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
1739 dlsym(libHandle, "shaper_ops");
1740 } else {
1741 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
1742 libraryMainline.c_str());
1743 }
1744 } else {
1745 ALOGV("connectFormatShaper: couldn't find media namespace.");
1746 }
1747
1748 // fall back to the system partition, if present.
1749 //
1750 if (sShaperOps == NULL) {
1751
1752 libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
1753
1754 if (libHandle != NULL) {
1755 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
1756 dlsym(libHandle, "shaper_ops");
1757 } else {
1758 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
1759 }
1760 }
1761
1762 if (sShaperOps != nullptr
1763 && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
1764 ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
1765 sShaperOps->version);
1766 sShaperOps = nullptr;
1767 }
1768
1769 if (sShaperOps != nullptr) {
1770 ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
1771 }
1772
1773 nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
1774 ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
1775 (loading_finished - loading_started)/1000);
1776
1777
1778 // we also want to know whether this is a handheld device
1779 // start with assumption that the device is handheld.
1780 sIsHandheld = true;
1781 sp<IServiceManager> serviceMgr = defaultServiceManager();
1782 sp<content::pm::IPackageManagerNative> packageMgr;
1783 if (serviceMgr.get() != nullptr) {
1784 sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
1785 packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
1786 }
1787 // if we didn't get serviceMgr, we'll leave packageMgr as default null
1788 if (packageMgr != nullptr) {
1789
1790 // MUST have these
1791 static const String16 featuresNeeded[] = {
1792 String16("android.hardware.touchscreen")
1793 };
1794 // these must be present to be a handheld
1795 for (::android::String16 required : featuresNeeded) {
1796 bool hasFeature = false;
1797 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
1798 if (!status.isOk()) {
1799 ALOGE("%s: hasSystemFeature failed: %s",
1800 __func__, status.exceptionMessage().c_str());
1801 continue;
1802 }
1803 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
1804 if (!hasFeature) {
1805 ALOGV("... which means we are not handheld");
1806 sIsHandheld = false;
1807 break;
1808 }
1809 }
1810
1811 // MUST NOT have these
1812 static const String16 featuresDisallowed[] = {
1813 String16("android.hardware.type.automotive"),
1814 String16("android.hardware.type.television"),
1815 String16("android.hardware.type.watch")
1816 };
1817 // any of these present -- we aren't a handheld
1818 for (::android::String16 forbidden : featuresDisallowed) {
1819 bool hasFeature = false;
1820 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
1821 if (!status.isOk()) {
1822 ALOGE("%s: hasSystemFeature failed: %s",
1823 __func__, status.exceptionMessage().c_str());
1824 continue;
1825 }
1826 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
1827 if (hasFeature) {
1828 ALOGV("... which means we are not handheld");
1829 sIsHandheld = false;
1830 break;
1831 }
1832 }
1833 }
1834
1835 });
1836
1837 return true;
1838 }
1839
1840
1841 #if 0
1842 // a construct to force the above dlopen() to run very early.
1843 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
1844 // failure of this means that cold start of those apps is slower by the time to dlopen()
1845 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
1846 //
1847 static bool forceEarlyLoadingShaper = connectFormatShaper();
1848 #endif
1849
1850 // parse the codec's properties: mapping, whether it meets min quality, etc
1851 // and pass them into the video quality code
1852 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)1853 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
1854 sp<MediaCodecInfo> codecInfo, AString mediaType) {
1855
1856 sp<MediaCodecInfo::Capabilities> capabilities =
1857 codecInfo->getCapabilitiesFor(mediaType.c_str());
1858 if (capabilities == nullptr) {
1859 ALOGI("no capabilities as part of the codec?");
1860 } else {
1861 const sp<AMessage> &details = capabilities->getDetails();
1862 AString mapTarget;
1863 int count = details->countEntries();
1864 for(int ix = 0; ix < count; ix++) {
1865 AMessage::Type entryType;
1866 const char *mapSrc = details->getEntryNameAt(ix, &entryType);
1867 // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
1868 //
1869 static const char *featurePrefix = "feature-";
1870 static const int featurePrefixLen = strlen(featurePrefix);
1871 static const char *tuningPrefix = "tuning-";
1872 static const int tuningPrefixLen = strlen(tuningPrefix);
1873 static const char *mappingPrefix = "mapping-";
1874 static const int mappingPrefixLen = strlen(mappingPrefix);
1875
1876 if (mapSrc == NULL) {
1877 continue;
1878 } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
1879 int32_t intValue;
1880 if (details->findInt32(mapSrc, &intValue)) {
1881 ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
1882 (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
1883 intValue);
1884 }
1885 continue;
1886 } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
1887 AString value;
1888 if (details->findString(mapSrc, &value)) {
1889 ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
1890 (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
1891 value.c_str());
1892 }
1893 continue;
1894 } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
1895 AString target;
1896 if (details->findString(mapSrc, &target)) {
1897 ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
1898 target.c_str());
1899 // key is really "kind-key"
1900 // separate that, so setMap() sees the triple kind, key, value
1901 const char *kind = &mapSrc[mappingPrefixLen];
1902 const char *sep = strchr(kind, '-');
1903 const char *key = sep+1;
1904 if (sep != NULL) {
1905 std::string xkind = std::string(kind, sep-kind);
1906 (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
1907 key, target.c_str());
1908 }
1909 }
1910 }
1911 }
1912 }
1913
1914 // we also carry in the codec description whether we are on a handheld device.
1915 // this info is eventually used by both the Codec and the C2 machinery to inform
1916 // the underlying codec whether to do any shaping.
1917 //
1918 if (sIsHandheld) {
1919 // set if we are indeed a handheld device (or in future 'any eligible device'
1920 // missing on devices that aren't eligible for minimum quality enforcement.
1921 (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
1922 // strictly speaking, it's a tuning, but those are strings and feature stores int
1923 (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
1924 }
1925 }
1926
setupFormatShaper(AString mediaType)1927 status_t MediaCodec::setupFormatShaper(AString mediaType) {
1928 ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
1929 mComponentName.c_str(), mediaType.c_str());
1930
1931 nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
1932
1933 // someone might have beaten us to it.
1934 mediaformatshaper::shaperHandle_t shaperHandle;
1935 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
1936 if (shaperHandle != nullptr) {
1937 ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
1938 return OK;
1939 }
1940
1941 // we get to build & register one
1942 shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
1943 if (shaperHandle == nullptr) {
1944 ALOGW("unable to create a shaper for cocodec %s mediaType %s",
1945 mComponentName.c_str(), mediaType.c_str());
1946 return OK;
1947 }
1948
1949 (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
1950
1951 shaperHandle = sShaperOps->registerShaper(shaperHandle,
1952 mComponentName.c_str(), mediaType.c_str());
1953
1954 nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
1955 ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
1956 mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
1957
1958 return OK;
1959 }
1960
1961
1962 // Format Shaping
1963 // Mapping and Manipulation of encoding parameters
1964 //
1965 // All of these decisions are pushed into the shaper instead of here within MediaCodec.
1966 // this includes decisions based on whether the codec implements minimum quality bars
1967 // itself or needs to be shaped outside of the codec.
1968 // This keeps all those decisions in one place.
1969 // It also means that we push some extra decision information (is this a handheld device
1970 // or one that is otherwise eligible for minimum quality manipulation, which generational
1971 // quality target is in force, etc). This allows those values to be cached in the
1972 // per-codec structures that are done 1 time within a process instead of for each
1973 // codec instantiation.
1974 //
1975
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags)1976 status_t MediaCodec::shapeMediaFormat(
1977 const sp<AMessage> &format,
1978 uint32_t flags) {
1979 ALOGV("shapeMediaFormat entry");
1980
1981 if (!(flags & CONFIGURE_FLAG_ENCODE)) {
1982 ALOGW("shapeMediaFormat: not encoder");
1983 return OK;
1984 }
1985 if (mCodecInfo == NULL) {
1986 ALOGW("shapeMediaFormat: no codecinfo");
1987 return OK;
1988 }
1989
1990 AString mediaType;
1991 if (!format->findString("mime", &mediaType)) {
1992 ALOGW("shapeMediaFormat: no mediaType information");
1993 return OK;
1994 }
1995
1996 // make sure we have the function entry points for the shaper library
1997 //
1998
1999 connectFormatShaper();
2000 if (sShaperOps == nullptr) {
2001 ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
2002 return OK;
2003 }
2004
2005 // find the shaper information for this codec+mediaType pair
2006 //
2007 mediaformatshaper::shaperHandle_t shaperHandle;
2008 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2009 if (shaperHandle == nullptr) {
2010 setupFormatShaper(mediaType);
2011 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2012 }
2013 if (shaperHandle == nullptr) {
2014 ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
2015 mComponentName.c_str(), mediaType.c_str());
2016 return OK;
2017 }
2018
2019 // run the shaper
2020 //
2021
2022 ALOGV("Shaping input: %s", format->debugString(0).c_str());
2023
2024 sp<AMessage> updatedFormat = format->dup();
2025 AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
2026
2027 int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
2028 if (result == 0) {
2029 AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
2030
2031 sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
2032 size_t changeCount = deltas->countEntries();
2033 ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
2034 if (mMetricsHandle != 0) {
2035 mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
2036 }
2037 if (changeCount > 0) {
2038 if (mMetricsHandle != 0) {
2039 // save some old properties before we fold in the new ones
2040 int32_t bitrate;
2041 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
2042 mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
2043 }
2044 int32_t qpIMin = -1;
2045 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
2046 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
2047 }
2048 int32_t qpIMax = -1;
2049 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
2050 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
2051 }
2052 int32_t qpPMin = -1;
2053 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
2054 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
2055 }
2056 int32_t qpPMax = -1;
2057 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
2058 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
2059 }
2060 int32_t qpBMin = -1;
2061 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
2062 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
2063 }
2064 int32_t qpBMax = -1;
2065 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
2066 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
2067 }
2068 }
2069 // NB: for any field in both format and deltas, the deltas copy wins
2070 format->extend(deltas);
2071 }
2072 }
2073
2074 AMediaFormat_delete(updatedNdkFormat);
2075 return OK;
2076 }
2077
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)2078 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2079 bool reverse) {
2080 AString mediaType;
2081 if (!format->findString("mime", &mediaType)) {
2082 ALOGW("mapFormat: no mediaType information");
2083 return;
2084 }
2085 ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
2086 mediaType.c_str(), kind ? kind : "<all>", reverse);
2087
2088 // make sure we have the function entry points for the shaper library
2089 //
2090
2091 #if 0
2092 // let's play the faster "only do mapping if we've already loaded the library
2093 connectFormatShaper();
2094 #endif
2095 if (sShaperOps == nullptr) {
2096 ALOGV("mapFormat: no MediaFormatShaper hooks available");
2097 return;
2098 }
2099
2100 // find the shaper information for this codec+mediaType pair
2101 //
2102 mediaformatshaper::shaperHandle_t shaperHandle;
2103 shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
2104 if (shaperHandle == nullptr) {
2105 ALOGV("mapFormat: no shaper handle");
2106 return;
2107 }
2108
2109 const char **mappings;
2110 if (reverse)
2111 mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
2112 else
2113 mappings = sShaperOps->getMappings(shaperHandle, kind);
2114
2115 if (mappings == nullptr) {
2116 ALOGV("no mappings returned");
2117 return;
2118 }
2119
2120 ALOGV("Pre-mapping: %s", format->debugString(2).c_str());
2121 // do the mapping
2122 //
2123 int entries = format->countEntries();
2124 for (int i = 0; ; i += 2) {
2125 if (mappings[i] == nullptr) {
2126 break;
2127 }
2128
2129 size_t ix = format->findEntryByName(mappings[i]);
2130 if (ix < entries) {
2131 ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
2132 status_t status = format->setEntryNameAt(ix, mappings[i+1]);
2133 if (status != OK) {
2134 ALOGW("Unable to map from '%s' to '%s': status %d",
2135 mappings[i], mappings[i+1], status);
2136 }
2137 }
2138 }
2139 ALOGV("Post-mapping: %s", format->debugString(2).c_str());
2140
2141
2142 // reclaim the mapping memory
2143 for (int i = 0; ; i += 2) {
2144 if (mappings[i] == nullptr) {
2145 break;
2146 }
2147 free((void*)mappings[i]);
2148 free((void*)mappings[i + 1]);
2149 }
2150 free(mappings);
2151 mappings = nullptr;
2152 }
2153
2154 //
2155 // end of Format Shaping hooks within MediaCodec
2156 //
2157
releaseCrypto()2158 status_t MediaCodec::releaseCrypto()
2159 {
2160 ALOGV("releaseCrypto");
2161
2162 sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
2163
2164 sp<AMessage> response;
2165 status_t status = msg->postAndAwaitResponse(&response);
2166
2167 if (status == OK && response != NULL) {
2168 CHECK(response->findInt32("status", &status));
2169 ALOGV("releaseCrypto ret: %d ", status);
2170 }
2171 else {
2172 ALOGE("releaseCrypto err: %d", status);
2173 }
2174
2175 return status;
2176 }
2177
onReleaseCrypto(const sp<AMessage> & msg)2178 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
2179 {
2180 status_t status = INVALID_OPERATION;
2181 if (mCrypto != NULL) {
2182 ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
2183 mBufferChannel->setCrypto(NULL);
2184 // TODO change to ALOGV
2185 ALOGD("onReleaseCrypto: [before clear] mCrypto: %p (%d)",
2186 mCrypto.get(), mCrypto->getStrongCount());
2187 mCrypto.clear();
2188
2189 status = OK;
2190 }
2191 else {
2192 ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
2193 }
2194
2195 sp<AMessage> response = new AMessage;
2196 response->setInt32("status", status);
2197
2198 sp<AReplyToken> replyID;
2199 CHECK(msg->senderAwaitsResponse(&replyID));
2200 response->postReply(replyID);
2201 }
2202
setInputSurface(const sp<PersistentSurface> & surface)2203 status_t MediaCodec::setInputSurface(
2204 const sp<PersistentSurface> &surface) {
2205 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
2206 msg->setObject("input-surface", surface.get());
2207
2208 sp<AMessage> response;
2209 return PostAndAwaitResponse(msg, &response);
2210 }
2211
setSurface(const sp<Surface> & surface)2212 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
2213 sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
2214 msg->setObject("surface", surface);
2215
2216 sp<AMessage> response;
2217 return PostAndAwaitResponse(msg, &response);
2218 }
2219
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)2220 status_t MediaCodec::createInputSurface(
2221 sp<IGraphicBufferProducer>* bufferProducer) {
2222 sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
2223
2224 sp<AMessage> response;
2225 status_t err = PostAndAwaitResponse(msg, &response);
2226 if (err == NO_ERROR) {
2227 // unwrap the sp<IGraphicBufferProducer>
2228 sp<RefBase> obj;
2229 bool found = response->findObject("input-surface", &obj);
2230 CHECK(found);
2231 sp<BufferProducerWrapper> wrapper(
2232 static_cast<BufferProducerWrapper*>(obj.get()));
2233 *bufferProducer = wrapper->getBufferProducer();
2234 } else {
2235 ALOGW("createInputSurface failed, err=%d", err);
2236 }
2237 return err;
2238 }
2239
getGraphicBufferSize()2240 uint64_t MediaCodec::getGraphicBufferSize() {
2241 if (!mIsVideo) {
2242 return 0;
2243 }
2244
2245 uint64_t size = 0;
2246 size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
2247 for (size_t i = 0; i < portNum; ++i) {
2248 // TODO: this is just an estimation, we should get the real buffer size from ACodec.
2249 size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2;
2250 }
2251 return size;
2252 }
2253
start()2254 status_t MediaCodec::start() {
2255 sp<AMessage> msg = new AMessage(kWhatStart, this);
2256
2257 sp<AMessage> callback;
2258
2259 status_t err;
2260 std::vector<MediaResourceParcel> resources;
2261 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
2262 // Don't know the buffer size at this point, but it's fine to use 1 because
2263 // the reclaimResource call doesn't consider the requester's buffer size for now.
2264 resources.push_back(MediaResource::GraphicMemoryResource(1));
2265 for (int i = 0; i <= kMaxRetry; ++i) {
2266 if (i > 0) {
2267 // Don't try to reclaim resource for the first time.
2268 if (!mResourceManagerProxy->reclaimResource(resources)) {
2269 break;
2270 }
2271 // Recover codec from previous error before retry start.
2272 err = reset();
2273 if (err != OK) {
2274 ALOGE("retrying start: failed to reset codec");
2275 break;
2276 }
2277 sp<AMessage> response;
2278 err = PostAndAwaitResponse(mConfigureMsg, &response);
2279 if (err != OK) {
2280 ALOGE("retrying start: failed to configure codec");
2281 break;
2282 }
2283 if (callback != nullptr) {
2284 err = setCallback(callback);
2285 if (err != OK) {
2286 ALOGE("retrying start: failed to set callback");
2287 break;
2288 }
2289 ALOGD("succeed to set callback for reclaim");
2290 }
2291 }
2292
2293 // Keep callback message after the first iteration if necessary.
2294 if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
2295 callback = mCallback;
2296 ALOGD("keep callback message for reclaim");
2297 }
2298
2299 sp<AMessage> response;
2300 err = PostAndAwaitResponse(msg, &response);
2301 if (!isResourceError(err)) {
2302 break;
2303 }
2304 }
2305 return err;
2306 }
2307
stop()2308 status_t MediaCodec::stop() {
2309 sp<AMessage> msg = new AMessage(kWhatStop, this);
2310
2311 sp<AMessage> response;
2312 return PostAndAwaitResponse(msg, &response);
2313 }
2314
hasPendingBuffer(int portIndex)2315 bool MediaCodec::hasPendingBuffer(int portIndex) {
2316 return std::any_of(
2317 mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
2318 [](const BufferInfo &info) { return info.mOwnedByClient; });
2319 }
2320
hasPendingBuffer()2321 bool MediaCodec::hasPendingBuffer() {
2322 return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
2323 }
2324
reclaim(bool force)2325 status_t MediaCodec::reclaim(bool force) {
2326 ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
2327 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2328 msg->setInt32("reclaimed", 1);
2329 msg->setInt32("force", force ? 1 : 0);
2330
2331 sp<AMessage> response;
2332 status_t ret = PostAndAwaitResponse(msg, &response);
2333 if (ret == -ENOENT) {
2334 ALOGD("MediaCodec looper is gone, skip reclaim");
2335 ret = OK;
2336 }
2337 return ret;
2338 }
2339
release()2340 status_t MediaCodec::release() {
2341 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2342 sp<AMessage> response;
2343 return PostAndAwaitResponse(msg, &response);
2344 }
2345
releaseAsync(const sp<AMessage> & notify)2346 status_t MediaCodec::releaseAsync(const sp<AMessage> ¬ify) {
2347 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2348 msg->setMessage("async", notify);
2349 sp<AMessage> response;
2350 return PostAndAwaitResponse(msg, &response);
2351 }
2352
reset()2353 status_t MediaCodec::reset() {
2354 /* When external-facing MediaCodec object is created,
2355 it is already initialized. Thus, reset is essentially
2356 release() followed by init(), plus clearing the state */
2357
2358 status_t err = release();
2359
2360 // unregister handlers
2361 if (mCodec != NULL) {
2362 if (mCodecLooper != NULL) {
2363 mCodecLooper->unregisterHandler(mCodec->id());
2364 } else {
2365 mLooper->unregisterHandler(mCodec->id());
2366 }
2367 mCodec = NULL;
2368 }
2369 mLooper->unregisterHandler(id());
2370
2371 mFlags = 0; // clear all flags
2372 mStickyError = OK;
2373
2374 // reset state not reset by setState(UNINITIALIZED)
2375 mDequeueInputReplyID = 0;
2376 mDequeueOutputReplyID = 0;
2377 mDequeueInputTimeoutGeneration = 0;
2378 mDequeueOutputTimeoutGeneration = 0;
2379 mHaveInputSurface = false;
2380
2381 if (err == OK) {
2382 err = init(mInitName);
2383 }
2384 return err;
2385 }
2386
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2387 status_t MediaCodec::queueInputBuffer(
2388 size_t index,
2389 size_t offset,
2390 size_t size,
2391 int64_t presentationTimeUs,
2392 uint32_t flags,
2393 AString *errorDetailMsg) {
2394 if (errorDetailMsg != NULL) {
2395 errorDetailMsg->clear();
2396 }
2397
2398 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2399 msg->setSize("index", index);
2400 msg->setSize("offset", offset);
2401 msg->setSize("size", size);
2402 msg->setInt64("timeUs", presentationTimeUs);
2403 msg->setInt32("flags", flags);
2404 msg->setPointer("errorDetailMsg", errorDetailMsg);
2405
2406 sp<AMessage> response;
2407 return PostAndAwaitResponse(msg, &response);
2408 }
2409
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2410 status_t MediaCodec::queueSecureInputBuffer(
2411 size_t index,
2412 size_t offset,
2413 const CryptoPlugin::SubSample *subSamples,
2414 size_t numSubSamples,
2415 const uint8_t key[16],
2416 const uint8_t iv[16],
2417 CryptoPlugin::Mode mode,
2418 const CryptoPlugin::Pattern &pattern,
2419 int64_t presentationTimeUs,
2420 uint32_t flags,
2421 AString *errorDetailMsg) {
2422 if (errorDetailMsg != NULL) {
2423 errorDetailMsg->clear();
2424 }
2425
2426 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2427 msg->setSize("index", index);
2428 msg->setSize("offset", offset);
2429 msg->setPointer("subSamples", (void *)subSamples);
2430 msg->setSize("numSubSamples", numSubSamples);
2431 msg->setPointer("key", (void *)key);
2432 msg->setPointer("iv", (void *)iv);
2433 msg->setInt32("mode", mode);
2434 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
2435 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
2436 msg->setInt64("timeUs", presentationTimeUs);
2437 msg->setInt32("flags", flags);
2438 msg->setPointer("errorDetailMsg", errorDetailMsg);
2439
2440 sp<AMessage> response;
2441 status_t err = PostAndAwaitResponse(msg, &response);
2442
2443 return err;
2444 }
2445
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)2446 status_t MediaCodec::queueBuffer(
2447 size_t index,
2448 const std::shared_ptr<C2Buffer> &buffer,
2449 int64_t presentationTimeUs,
2450 uint32_t flags,
2451 const sp<AMessage> &tunings,
2452 AString *errorDetailMsg) {
2453 if (errorDetailMsg != NULL) {
2454 errorDetailMsg->clear();
2455 }
2456
2457 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2458 msg->setSize("index", index);
2459 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
2460 new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
2461 msg->setObject("c2buffer", obj);
2462 msg->setInt64("timeUs", presentationTimeUs);
2463 msg->setInt32("flags", flags);
2464 msg->setMessage("tunings", tunings);
2465 msg->setPointer("errorDetailMsg", errorDetailMsg);
2466
2467 sp<AMessage> response;
2468 status_t err = PostAndAwaitResponse(msg, &response);
2469
2470 return err;
2471 }
2472
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)2473 status_t MediaCodec::queueEncryptedBuffer(
2474 size_t index,
2475 const sp<hardware::HidlMemory> &buffer,
2476 size_t offset,
2477 const CryptoPlugin::SubSample *subSamples,
2478 size_t numSubSamples,
2479 const uint8_t key[16],
2480 const uint8_t iv[16],
2481 CryptoPlugin::Mode mode,
2482 const CryptoPlugin::Pattern &pattern,
2483 int64_t presentationTimeUs,
2484 uint32_t flags,
2485 const sp<AMessage> &tunings,
2486 AString *errorDetailMsg) {
2487 if (errorDetailMsg != NULL) {
2488 errorDetailMsg->clear();
2489 }
2490
2491 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2492 msg->setSize("index", index);
2493 sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
2494 new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
2495 msg->setObject("memory", memory);
2496 msg->setSize("offset", offset);
2497 msg->setPointer("subSamples", (void *)subSamples);
2498 msg->setSize("numSubSamples", numSubSamples);
2499 msg->setPointer("key", (void *)key);
2500 msg->setPointer("iv", (void *)iv);
2501 msg->setInt32("mode", mode);
2502 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
2503 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
2504 msg->setInt64("timeUs", presentationTimeUs);
2505 msg->setInt32("flags", flags);
2506 msg->setMessage("tunings", tunings);
2507 msg->setPointer("errorDetailMsg", errorDetailMsg);
2508
2509 sp<AMessage> response;
2510 status_t err = PostAndAwaitResponse(msg, &response);
2511
2512 return err;
2513 }
2514
dequeueInputBuffer(size_t * index,int64_t timeoutUs)2515 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
2516 sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
2517 msg->setInt64("timeoutUs", timeoutUs);
2518
2519 sp<AMessage> response;
2520 status_t err;
2521 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2522 return err;
2523 }
2524
2525 CHECK(response->findSize("index", index));
2526
2527 return OK;
2528 }
2529
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)2530 status_t MediaCodec::dequeueOutputBuffer(
2531 size_t *index,
2532 size_t *offset,
2533 size_t *size,
2534 int64_t *presentationTimeUs,
2535 uint32_t *flags,
2536 int64_t timeoutUs) {
2537 sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
2538 msg->setInt64("timeoutUs", timeoutUs);
2539
2540 sp<AMessage> response;
2541 status_t err;
2542 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2543 return err;
2544 }
2545
2546 CHECK(response->findSize("index", index));
2547 CHECK(response->findSize("offset", offset));
2548 CHECK(response->findSize("size", size));
2549 CHECK(response->findInt64("timeUs", presentationTimeUs));
2550 CHECK(response->findInt32("flags", (int32_t *)flags));
2551
2552 return OK;
2553 }
2554
renderOutputBufferAndRelease(size_t index)2555 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
2556 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2557 msg->setSize("index", index);
2558 msg->setInt32("render", true);
2559
2560 sp<AMessage> response;
2561 return PostAndAwaitResponse(msg, &response);
2562 }
2563
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)2564 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
2565 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2566 msg->setSize("index", index);
2567 msg->setInt32("render", true);
2568 msg->setInt64("timestampNs", timestampNs);
2569
2570 sp<AMessage> response;
2571 return PostAndAwaitResponse(msg, &response);
2572 }
2573
releaseOutputBuffer(size_t index)2574 status_t MediaCodec::releaseOutputBuffer(size_t index) {
2575 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2576 msg->setSize("index", index);
2577
2578 sp<AMessage> response;
2579 return PostAndAwaitResponse(msg, &response);
2580 }
2581
signalEndOfInputStream()2582 status_t MediaCodec::signalEndOfInputStream() {
2583 sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
2584
2585 sp<AMessage> response;
2586 return PostAndAwaitResponse(msg, &response);
2587 }
2588
getOutputFormat(sp<AMessage> * format) const2589 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
2590 sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
2591
2592 sp<AMessage> response;
2593 status_t err;
2594 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2595 return err;
2596 }
2597
2598 CHECK(response->findMessage("format", format));
2599
2600 return OK;
2601 }
2602
getInputFormat(sp<AMessage> * format) const2603 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
2604 sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
2605
2606 sp<AMessage> response;
2607 status_t err;
2608 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2609 return err;
2610 }
2611
2612 CHECK(response->findMessage("format", format));
2613
2614 return OK;
2615 }
2616
getName(AString * name) const2617 status_t MediaCodec::getName(AString *name) const {
2618 sp<AMessage> msg = new AMessage(kWhatGetName, this);
2619
2620 sp<AMessage> response;
2621 status_t err;
2622 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2623 return err;
2624 }
2625
2626 CHECK(response->findString("name", name));
2627
2628 return OK;
2629 }
2630
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const2631 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
2632 sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
2633
2634 sp<AMessage> response;
2635 status_t err;
2636 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2637 return err;
2638 }
2639
2640 sp<RefBase> obj;
2641 CHECK(response->findObject("codecInfo", &obj));
2642 *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
2643
2644 return OK;
2645 }
2646
getMetrics(mediametrics_handle_t & reply)2647 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
2648
2649 reply = 0;
2650
2651 // shouldn't happen, but be safe
2652 if (mMetricsHandle == 0) {
2653 return UNKNOWN_ERROR;
2654 }
2655
2656 // update any in-flight data that's not carried within the record
2657 updateMediametrics();
2658
2659 // send it back to the caller.
2660 reply = mediametrics_dup(mMetricsHandle);
2661
2662 updateEphemeralMediametrics(reply);
2663
2664 return OK;
2665 }
2666
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const2667 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
2668 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
2669 msg->setInt32("portIndex", kPortIndexInput);
2670 msg->setPointer("buffers", buffers);
2671
2672 sp<AMessage> response;
2673 return PostAndAwaitResponse(msg, &response);
2674 }
2675
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const2676 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
2677 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
2678 msg->setInt32("portIndex", kPortIndexOutput);
2679 msg->setPointer("buffers", buffers);
2680
2681 sp<AMessage> response;
2682 return PostAndAwaitResponse(msg, &response);
2683 }
2684
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)2685 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
2686 sp<AMessage> format;
2687 return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
2688 }
2689
getOutputFormat(size_t index,sp<AMessage> * format)2690 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
2691 sp<MediaCodecBuffer> buffer;
2692 return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
2693 }
2694
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)2695 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
2696 sp<AMessage> format;
2697 return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
2698 }
2699
isExecuting() const2700 bool MediaCodec::isExecuting() const {
2701 return mState == STARTED || mState == FLUSHED;
2702 }
2703
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)2704 status_t MediaCodec::getBufferAndFormat(
2705 size_t portIndex, size_t index,
2706 sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
2707 // use mutex instead of a context switch
2708 if (mReleasedByResourceManager) {
2709 ALOGE("getBufferAndFormat - resource already released");
2710 return DEAD_OBJECT;
2711 }
2712
2713 if (buffer == NULL) {
2714 ALOGE("getBufferAndFormat - null MediaCodecBuffer");
2715 return INVALID_OPERATION;
2716 }
2717
2718 if (format == NULL) {
2719 ALOGE("getBufferAndFormat - null AMessage");
2720 return INVALID_OPERATION;
2721 }
2722
2723 buffer->clear();
2724 format->clear();
2725
2726 if (!isExecuting()) {
2727 ALOGE("getBufferAndFormat - not executing");
2728 return INVALID_OPERATION;
2729 }
2730
2731 // we do not want mPortBuffers to change during this section
2732 // we also don't want mOwnedByClient to change during this
2733 Mutex::Autolock al(mBufferLock);
2734
2735 std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
2736 if (index >= buffers.size()) {
2737 ALOGE("getBufferAndFormat - trying to get buffer with "
2738 "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
2739 return INVALID_OPERATION;
2740 }
2741
2742 const BufferInfo &info = buffers[index];
2743 if (!info.mOwnedByClient) {
2744 ALOGE("getBufferAndFormat - invalid operation "
2745 "(the index %zu is not owned by client)", index);
2746 return INVALID_OPERATION;
2747 }
2748
2749 *buffer = info.mData;
2750 *format = info.mData->format();
2751
2752 return OK;
2753 }
2754
flush()2755 status_t MediaCodec::flush() {
2756 sp<AMessage> msg = new AMessage(kWhatFlush, this);
2757
2758 sp<AMessage> response;
2759 return PostAndAwaitResponse(msg, &response);
2760 }
2761
requestIDRFrame()2762 status_t MediaCodec::requestIDRFrame() {
2763 (new AMessage(kWhatRequestIDRFrame, this))->post();
2764
2765 return OK;
2766 }
2767
querySupportedVendorParameters(std::vector<std::string> * names)2768 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
2769 return mCodec->querySupportedParameters(names);
2770 }
2771
describeParameter(const std::string & name,CodecParameterDescriptor * desc)2772 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
2773 return mCodec->describeParameter(name, desc);
2774 }
2775
subscribeToVendorParameters(const std::vector<std::string> & names)2776 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
2777 return mCodec->subscribeToParameters(names);
2778 }
2779
unsubscribeFromVendorParameters(const std::vector<std::string> & names)2780 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
2781 return mCodec->unsubscribeFromParameters(names);
2782 }
2783
requestActivityNotification(const sp<AMessage> & notify)2784 void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) {
2785 sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
2786 msg->setMessage("notify", notify);
2787 msg->post();
2788 }
2789
requestCpuBoostIfNeeded()2790 void MediaCodec::requestCpuBoostIfNeeded() {
2791 if (mCpuBoostRequested) {
2792 return;
2793 }
2794 int32_t colorFormat;
2795 if (mOutputFormat->contains("hdr-static-info")
2796 && mOutputFormat->findInt32("color-format", &colorFormat)
2797 // check format for OMX only, for C2 the format is always opaque since the
2798 // software rendering doesn't go through client
2799 && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
2800 || mOwnerName.equalsIgnoreCase("codec2::software"))) {
2801 int32_t left, top, right, bottom, width, height;
2802 int64_t totalPixel = 0;
2803 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
2804 totalPixel = (right - left + 1) * (bottom - top + 1);
2805 } else if (mOutputFormat->findInt32("width", &width)
2806 && mOutputFormat->findInt32("height", &height)) {
2807 totalPixel = width * height;
2808 }
2809 if (totalPixel >= 1920 * 1080) {
2810 mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
2811 mCpuBoostRequested = true;
2812 }
2813 }
2814 }
2815
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)2816 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
2817 : mTimeoutUs(timeoutUs)
2818 , mLastActivityTimeUs(-1ll)
2819 , mBatteryStatNotified(false)
2820 , mBatteryCheckerGeneration(0)
2821 , mIsExecuting(false)
2822 , mBatteryCheckerMsg(msg) {}
2823
onCodecActivity(std::function<void ()> batteryOnCb)2824 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
2825 if (!isExecuting()) {
2826 // ignore if not executing
2827 return;
2828 }
2829 if (!mBatteryStatNotified) {
2830 batteryOnCb();
2831 mBatteryStatNotified = true;
2832 sp<AMessage> msg = mBatteryCheckerMsg->dup();
2833 msg->setInt32("generation", mBatteryCheckerGeneration);
2834
2835 // post checker and clear last activity time
2836 msg->post(mTimeoutUs);
2837 mLastActivityTimeUs = -1ll;
2838 } else {
2839 // update last activity time
2840 mLastActivityTimeUs = ALooper::GetNowUs();
2841 }
2842 }
2843
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)2844 void BatteryChecker::onCheckBatteryTimer(
2845 const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
2846 // ignore if this checker already expired because the client resource was removed
2847 int32_t generation;
2848 if (!msg->findInt32("generation", &generation)
2849 || generation != mBatteryCheckerGeneration) {
2850 return;
2851 }
2852
2853 if (mLastActivityTimeUs < 0ll) {
2854 // timed out inactive, do not repost checker
2855 batteryOffCb();
2856 mBatteryStatNotified = false;
2857 } else {
2858 // repost checker and clear last activity time
2859 msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
2860 mLastActivityTimeUs = -1ll;
2861 }
2862 }
2863
onClientRemoved()2864 void BatteryChecker::onClientRemoved() {
2865 mBatteryStatNotified = false;
2866 mBatteryCheckerGeneration++;
2867 }
2868
2869 ////////////////////////////////////////////////////////////////////////////////
2870
cancelPendingDequeueOperations()2871 void MediaCodec::cancelPendingDequeueOperations() {
2872 if (mFlags & kFlagDequeueInputPending) {
2873 PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
2874
2875 ++mDequeueInputTimeoutGeneration;
2876 mDequeueInputReplyID = 0;
2877 mFlags &= ~kFlagDequeueInputPending;
2878 }
2879
2880 if (mFlags & kFlagDequeueOutputPending) {
2881 PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
2882
2883 ++mDequeueOutputTimeoutGeneration;
2884 mDequeueOutputReplyID = 0;
2885 mFlags &= ~kFlagDequeueOutputPending;
2886 }
2887 }
2888
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)2889 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
2890 if (!isExecuting() || (mFlags & kFlagIsAsync)
2891 || (newRequest && (mFlags & kFlagDequeueInputPending))) {
2892 PostReplyWithError(replyID, INVALID_OPERATION);
2893 return true;
2894 } else if (mFlags & kFlagStickyError) {
2895 PostReplyWithError(replyID, getStickyError());
2896 return true;
2897 }
2898
2899 ssize_t index = dequeuePortBuffer(kPortIndexInput);
2900
2901 if (index < 0) {
2902 CHECK_EQ(index, -EAGAIN);
2903 return false;
2904 }
2905
2906 sp<AMessage> response = new AMessage;
2907 response->setSize("index", index);
2908 response->postReply(replyID);
2909
2910 return true;
2911 }
2912
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)2913 bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
2914 if (!isExecuting() || (mFlags & kFlagIsAsync)
2915 || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
2916 PostReplyWithError(replyID, INVALID_OPERATION);
2917 } else if (mFlags & kFlagStickyError) {
2918 PostReplyWithError(replyID, getStickyError());
2919 } else if (mFlags & kFlagOutputBuffersChanged) {
2920 PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
2921 mFlags &= ~kFlagOutputBuffersChanged;
2922 } else {
2923 sp<AMessage> response = new AMessage;
2924 BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
2925 if (!info) {
2926 return false;
2927 }
2928
2929 // In synchronous mode, output format change should be handled
2930 // at dequeue to put the event at the correct order.
2931
2932 const sp<MediaCodecBuffer> &buffer = info->mData;
2933 handleOutputFormatChangeIfNeeded(buffer);
2934 if (mFlags & kFlagOutputFormatChanged) {
2935 PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
2936 mFlags &= ~kFlagOutputFormatChanged;
2937 return true;
2938 }
2939
2940 ssize_t index = dequeuePortBuffer(kPortIndexOutput);
2941
2942 response->setSize("index", index);
2943 response->setSize("offset", buffer->offset());
2944 response->setSize("size", buffer->size());
2945
2946 int64_t timeUs;
2947 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
2948
2949 response->setInt64("timeUs", timeUs);
2950
2951 int32_t flags;
2952 CHECK(buffer->meta()->findInt32("flags", &flags));
2953
2954 response->setInt32("flags", flags);
2955
2956 statsBufferReceived(timeUs, buffer);
2957
2958 response->postReply(replyID);
2959 }
2960
2961 return true;
2962 }
2963
onMessageReceived(const sp<AMessage> & msg)2964 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
2965 switch (msg->what()) {
2966 case kWhatCodecNotify:
2967 {
2968 int32_t what;
2969 CHECK(msg->findInt32("what", &what));
2970
2971 switch (what) {
2972 case kWhatError:
2973 {
2974 int32_t err, actionCode;
2975 CHECK(msg->findInt32("err", &err));
2976 CHECK(msg->findInt32("actionCode", &actionCode));
2977
2978 ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
2979 err, actionCode, mState, stateString(mState).c_str());
2980 if (err == DEAD_OBJECT) {
2981 mFlags |= kFlagSawMediaServerDie;
2982 mFlags &= ~kFlagIsComponentAllocated;
2983 }
2984
2985 bool sendErrorResponse = true;
2986 std::string origin{"kWhatError:"};
2987 origin += stateString(mState);
2988
2989 switch (mState) {
2990 case INITIALIZING:
2991 {
2992 setState(UNINITIALIZED);
2993 break;
2994 }
2995
2996 case CONFIGURING:
2997 {
2998 if (actionCode == ACTION_CODE_FATAL) {
2999 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3000 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3001 stateString(mState).c_str());
3002 flushMediametrics();
3003 initMediametrics();
3004 }
3005 setState(actionCode == ACTION_CODE_FATAL ?
3006 UNINITIALIZED : INITIALIZED);
3007 break;
3008 }
3009
3010 case STARTING:
3011 {
3012 if (actionCode == ACTION_CODE_FATAL) {
3013 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3014 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3015 stateString(mState).c_str());
3016 flushMediametrics();
3017 initMediametrics();
3018 }
3019 setState(actionCode == ACTION_CODE_FATAL ?
3020 UNINITIALIZED : CONFIGURED);
3021 break;
3022 }
3023
3024 case RELEASING:
3025 {
3026 // Ignore the error, assuming we'll still get
3027 // the shutdown complete notification. If we
3028 // don't, we'll timeout and force release.
3029 sendErrorResponse = false;
3030 FALLTHROUGH_INTENDED;
3031 }
3032 case STOPPING:
3033 {
3034 if (mFlags & kFlagSawMediaServerDie) {
3035 bool postPendingReplies = true;
3036 if (mState == RELEASING && !mReplyID) {
3037 ALOGD("Releasing asynchronously, so nothing to reply here.");
3038 postPendingReplies = false;
3039 }
3040 // MediaServer died, there definitely won't
3041 // be a shutdown complete notification after
3042 // all.
3043
3044 // note that we may be directly going from
3045 // STOPPING->UNINITIALIZED, instead of the
3046 // usual STOPPING->INITIALIZED state.
3047 setState(UNINITIALIZED);
3048 if (mState == RELEASING) {
3049 mComponentName.clear();
3050 }
3051 if (postPendingReplies) {
3052 postPendingRepliesAndDeferredMessages(origin + ":dead");
3053 }
3054 sendErrorResponse = false;
3055 } else if (!mReplyID) {
3056 sendErrorResponse = false;
3057 }
3058 break;
3059 }
3060
3061 case FLUSHING:
3062 {
3063 if (actionCode == ACTION_CODE_FATAL) {
3064 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3065 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3066 stateString(mState).c_str());
3067 flushMediametrics();
3068 initMediametrics();
3069
3070 setState(UNINITIALIZED);
3071 } else {
3072 setState(
3073 (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
3074 }
3075 break;
3076 }
3077
3078 case FLUSHED:
3079 case STARTED:
3080 {
3081 sendErrorResponse = (mReplyID != nullptr);
3082
3083 setStickyError(err);
3084 postActivityNotificationIfPossible();
3085
3086 cancelPendingDequeueOperations();
3087
3088 if (mFlags & kFlagIsAsync) {
3089 onError(err, actionCode);
3090 }
3091 switch (actionCode) {
3092 case ACTION_CODE_TRANSIENT:
3093 break;
3094 case ACTION_CODE_RECOVERABLE:
3095 setState(INITIALIZED);
3096 break;
3097 default:
3098 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3099 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3100 stateString(mState).c_str());
3101 flushMediametrics();
3102 initMediametrics();
3103 setState(UNINITIALIZED);
3104 break;
3105 }
3106 break;
3107 }
3108
3109 default:
3110 {
3111 sendErrorResponse = (mReplyID != nullptr);
3112
3113 setStickyError(err);
3114 postActivityNotificationIfPossible();
3115
3116 // actionCode in an uninitialized state is always fatal.
3117 if (mState == UNINITIALIZED) {
3118 actionCode = ACTION_CODE_FATAL;
3119 }
3120 if (mFlags & kFlagIsAsync) {
3121 onError(err, actionCode);
3122 }
3123 switch (actionCode) {
3124 case ACTION_CODE_TRANSIENT:
3125 break;
3126 case ACTION_CODE_RECOVERABLE:
3127 setState(INITIALIZED);
3128 break;
3129 default:
3130 setState(UNINITIALIZED);
3131 break;
3132 }
3133 break;
3134 }
3135 }
3136
3137 if (sendErrorResponse) {
3138 // TRICKY: replicate PostReplyWithError logic for
3139 // err code override
3140 int32_t finalErr = err;
3141 if (mReleasedByResourceManager) {
3142 // override the err code if MediaCodec has been
3143 // released by ResourceManager.
3144 finalErr = DEAD_OBJECT;
3145 }
3146 postPendingRepliesAndDeferredMessages(origin, finalErr);
3147 }
3148 break;
3149 }
3150
3151 case kWhatComponentAllocated:
3152 {
3153 if (mState == RELEASING || mState == UNINITIALIZED) {
3154 // In case a kWhatError or kWhatRelease message came in and replied,
3155 // we log a warning and ignore.
3156 ALOGW("allocate interrupted by error or release, current state %d/%s",
3157 mState, stateString(mState).c_str());
3158 break;
3159 }
3160 CHECK_EQ(mState, INITIALIZING);
3161 setState(INITIALIZED);
3162 mFlags |= kFlagIsComponentAllocated;
3163
3164 CHECK(msg->findString("componentName", &mComponentName));
3165
3166 if (mComponentName.c_str()) {
3167 mediametrics_setCString(mMetricsHandle, kCodecCodec,
3168 mComponentName.c_str());
3169 }
3170
3171 const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
3172 if (mComponentName.startsWith("OMX.google.")
3173 && strncmp(owner, "default", 8) == 0) {
3174 mFlags |= kFlagUsesSoftwareRenderer;
3175 } else {
3176 mFlags &= ~kFlagUsesSoftwareRenderer;
3177 }
3178 mOwnerName = owner;
3179
3180 if (mComponentName.endsWith(".secure")) {
3181 mFlags |= kFlagIsSecure;
3182 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
3183 } else {
3184 mFlags &= ~kFlagIsSecure;
3185 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
3186 }
3187
3188 if (mIsVideo) {
3189 // audio codec is currently ignored.
3190 mResourceManagerProxy->addResource(
3191 MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
3192 }
3193
3194 postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
3195 break;
3196 }
3197
3198 case kWhatComponentConfigured:
3199 {
3200 if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
3201 // In case a kWhatError or kWhatRelease message came in and replied,
3202 // we log a warning and ignore.
3203 ALOGW("configure interrupted by error or release, current state %d/%s",
3204 mState, stateString(mState).c_str());
3205 break;
3206 }
3207 CHECK_EQ(mState, CONFIGURING);
3208
3209 // reset input surface flag
3210 mHaveInputSurface = false;
3211
3212 CHECK(msg->findMessage("input-format", &mInputFormat));
3213 CHECK(msg->findMessage("output-format", &mOutputFormat));
3214
3215 // limit to confirming the opt-in behavior to minimize any behavioral change
3216 if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
3217 // signal frame dropping mode in the input format as this may also be
3218 // meaningful and confusing for an encoder in a transcoder scenario
3219 mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
3220 }
3221 sp<AMessage> interestingFormat =
3222 (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
3223 ALOGV("[%s] configured as input format: %s, output format: %s",
3224 mComponentName.c_str(),
3225 mInputFormat->debugString(4).c_str(),
3226 mOutputFormat->debugString(4).c_str());
3227 int32_t usingSwRenderer;
3228 if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
3229 && usingSwRenderer) {
3230 mFlags |= kFlagUsesSoftwareRenderer;
3231 }
3232 setState(CONFIGURED);
3233 postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
3234
3235 // augment our media metrics info, now that we know more things
3236 // such as what the codec extracted from any CSD passed in.
3237 if (mMetricsHandle != 0) {
3238 sp<AMessage> format;
3239 if (mConfigureMsg != NULL &&
3240 mConfigureMsg->findMessage("format", &format)) {
3241 // format includes: mime
3242 AString mime;
3243 if (format->findString("mime", &mime)) {
3244 mediametrics_setCString(mMetricsHandle, kCodecMime,
3245 mime.c_str());
3246 }
3247 }
3248 // perhaps video only?
3249 int32_t profile = 0;
3250 if (interestingFormat->findInt32("profile", &profile)) {
3251 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
3252 }
3253 int32_t level = 0;
3254 if (interestingFormat->findInt32("level", &level)) {
3255 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
3256 }
3257 // bitrate and bitrate mode, encoder only
3258 if (mFlags & kFlagIsEncoder) {
3259 // encoder specific values
3260 int32_t bitrate_mode = -1;
3261 if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
3262 mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
3263 asString_BitrateMode(bitrate_mode));
3264 }
3265 int32_t bitrate = -1;
3266 if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
3267 mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
3268 }
3269 } else {
3270 // decoder specific values
3271 }
3272 }
3273 break;
3274 }
3275
3276 case kWhatInputSurfaceCreated:
3277 {
3278 if (mState != CONFIGURED) {
3279 // state transitioned unexpectedly; we should have replied already.
3280 ALOGD("received kWhatInputSurfaceCreated message in state %s",
3281 stateString(mState).c_str());
3282 break;
3283 }
3284 // response to initiateCreateInputSurface()
3285 status_t err = NO_ERROR;
3286 sp<AMessage> response = new AMessage;
3287 if (!msg->findInt32("err", &err)) {
3288 sp<RefBase> obj;
3289 msg->findObject("input-surface", &obj);
3290 CHECK(msg->findMessage("input-format", &mInputFormat));
3291 CHECK(msg->findMessage("output-format", &mOutputFormat));
3292 ALOGV("[%s] input surface created as input format: %s, output format: %s",
3293 mComponentName.c_str(),
3294 mInputFormat->debugString(4).c_str(),
3295 mOutputFormat->debugString(4).c_str());
3296 CHECK(obj != NULL);
3297 response->setObject("input-surface", obj);
3298 mHaveInputSurface = true;
3299 } else {
3300 response->setInt32("err", err);
3301 }
3302 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
3303 break;
3304 }
3305
3306 case kWhatInputSurfaceAccepted:
3307 {
3308 if (mState != CONFIGURED) {
3309 // state transitioned unexpectedly; we should have replied already.
3310 ALOGD("received kWhatInputSurfaceAccepted message in state %s",
3311 stateString(mState).c_str());
3312 break;
3313 }
3314 // response to initiateSetInputSurface()
3315 status_t err = NO_ERROR;
3316 sp<AMessage> response = new AMessage();
3317 if (!msg->findInt32("err", &err)) {
3318 CHECK(msg->findMessage("input-format", &mInputFormat));
3319 CHECK(msg->findMessage("output-format", &mOutputFormat));
3320 mHaveInputSurface = true;
3321 } else {
3322 response->setInt32("err", err);
3323 }
3324 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
3325 break;
3326 }
3327
3328 case kWhatSignaledInputEOS:
3329 {
3330 if (!isExecuting()) {
3331 // state transitioned unexpectedly; we should have replied already.
3332 ALOGD("received kWhatSignaledInputEOS message in state %s",
3333 stateString(mState).c_str());
3334 break;
3335 }
3336 // response to signalEndOfInputStream()
3337 sp<AMessage> response = new AMessage;
3338 status_t err;
3339 if (msg->findInt32("err", &err)) {
3340 response->setInt32("err", err);
3341 }
3342 postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
3343 break;
3344 }
3345
3346 case kWhatStartCompleted:
3347 {
3348 if (mState == RELEASING || mState == UNINITIALIZED) {
3349 // In case a kWhatRelease message came in and replied,
3350 // we log a warning and ignore.
3351 ALOGW("start interrupted by release, current state %d/%s",
3352 mState, stateString(mState).c_str());
3353 break;
3354 }
3355
3356 CHECK_EQ(mState, STARTING);
3357 if (mIsVideo) {
3358 mResourceManagerProxy->addResource(
3359 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
3360 }
3361 setState(STARTED);
3362 postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
3363 break;
3364 }
3365
3366 case kWhatOutputBuffersChanged:
3367 {
3368 mFlags |= kFlagOutputBuffersChanged;
3369 postActivityNotificationIfPossible();
3370 break;
3371 }
3372
3373 case kWhatOutputFramesRendered:
3374 {
3375 // ignore these in all states except running
3376 if (mState != STARTED) {
3377 break;
3378 }
3379 TunnelPeekState previousState = mTunnelPeekState;
3380 mTunnelPeekState = TunnelPeekState::kBufferRendered;
3381 ALOGV("TunnelPeekState: %s -> %s",
3382 asString(previousState),
3383 asString(TunnelPeekState::kBufferRendered));
3384 updatePlaybackDuration(msg);
3385 // check that we have a notification set
3386 if (mOnFrameRenderedNotification != NULL) {
3387 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
3388 notify->setMessage("data", msg);
3389 notify->post();
3390 }
3391 break;
3392 }
3393
3394 case kWhatFirstTunnelFrameReady:
3395 {
3396 if (mState != STARTED) {
3397 break;
3398 }
3399 TunnelPeekState previousState = mTunnelPeekState;
3400 switch(mTunnelPeekState) {
3401 case TunnelPeekState::kDisabledNoBuffer:
3402 case TunnelPeekState::kDisabledQueued:
3403 mTunnelPeekState = TunnelPeekState::kBufferDecoded;
3404 ALOGV("First tunnel frame ready");
3405 ALOGV("TunnelPeekState: %s -> %s",
3406 asString(previousState),
3407 asString(mTunnelPeekState));
3408 break;
3409 case TunnelPeekState::kEnabledNoBuffer:
3410 case TunnelPeekState::kEnabledQueued:
3411 {
3412 sp<AMessage> parameters = new AMessage();
3413 parameters->setInt32("android._trigger-tunnel-peek", 1);
3414 mCodec->signalSetParameters(parameters);
3415 }
3416 mTunnelPeekState = TunnelPeekState::kBufferRendered;
3417 ALOGV("First tunnel frame ready");
3418 ALOGV("TunnelPeekState: %s -> %s",
3419 asString(previousState),
3420 asString(mTunnelPeekState));
3421 break;
3422 default:
3423 ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
3424 asString(mTunnelPeekState));
3425 break;
3426 }
3427
3428 if (mOnFirstTunnelFrameReadyNotification != nullptr) {
3429 sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
3430 notify->setMessage("data", msg);
3431 notify->post();
3432 }
3433 break;
3434 }
3435
3436 case kWhatFillThisBuffer:
3437 {
3438 /* size_t index = */updateBuffers(kPortIndexInput, msg);
3439
3440 if (mState == FLUSHING
3441 || mState == STOPPING
3442 || mState == RELEASING) {
3443 returnBuffersToCodecOnPort(kPortIndexInput);
3444 break;
3445 }
3446
3447 if (!mCSD.empty()) {
3448 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3449 CHECK_GE(index, 0);
3450
3451 // If codec specific data had been specified as
3452 // part of the format in the call to configure and
3453 // if there's more csd left, we submit it here
3454 // clients only get access to input buffers once
3455 // this data has been exhausted.
3456
3457 status_t err = queueCSDInputBuffer(index);
3458
3459 if (err != OK) {
3460 ALOGE("queueCSDInputBuffer failed w/ error %d",
3461 err);
3462
3463 setStickyError(err);
3464 postActivityNotificationIfPossible();
3465
3466 cancelPendingDequeueOperations();
3467 }
3468 break;
3469 }
3470 if (!mLeftover.empty()) {
3471 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3472 CHECK_GE(index, 0);
3473
3474 status_t err = handleLeftover(index);
3475 if (err != OK) {
3476 setStickyError(err);
3477 postActivityNotificationIfPossible();
3478 cancelPendingDequeueOperations();
3479 }
3480 break;
3481 }
3482
3483 if (mFlags & kFlagIsAsync) {
3484 if (!mHaveInputSurface) {
3485 if (mState == FLUSHED) {
3486 mHavePendingInputBuffers = true;
3487 } else {
3488 onInputBufferAvailable();
3489 }
3490 }
3491 } else if (mFlags & kFlagDequeueInputPending) {
3492 CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
3493
3494 ++mDequeueInputTimeoutGeneration;
3495 mFlags &= ~kFlagDequeueInputPending;
3496 mDequeueInputReplyID = 0;
3497 } else {
3498 postActivityNotificationIfPossible();
3499 }
3500 break;
3501 }
3502
3503 case kWhatDrainThisBuffer:
3504 {
3505 /* size_t index = */updateBuffers(kPortIndexOutput, msg);
3506
3507 if (mState == FLUSHING
3508 || mState == STOPPING
3509 || mState == RELEASING) {
3510 returnBuffersToCodecOnPort(kPortIndexOutput);
3511 break;
3512 }
3513
3514 if (mFlags & kFlagIsAsync) {
3515 sp<RefBase> obj;
3516 CHECK(msg->findObject("buffer", &obj));
3517 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
3518
3519 // In asynchronous mode, output format change is processed immediately.
3520 handleOutputFormatChangeIfNeeded(buffer);
3521 onOutputBufferAvailable();
3522 } else if (mFlags & kFlagDequeueOutputPending) {
3523 CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
3524
3525 ++mDequeueOutputTimeoutGeneration;
3526 mFlags &= ~kFlagDequeueOutputPending;
3527 mDequeueOutputReplyID = 0;
3528 } else {
3529 postActivityNotificationIfPossible();
3530 }
3531
3532 break;
3533 }
3534
3535 case kWhatEOS:
3536 {
3537 // We already notify the client of this by using the
3538 // corresponding flag in "onOutputBufferReady".
3539 break;
3540 }
3541
3542 case kWhatStopCompleted:
3543 {
3544 if (mState != STOPPING) {
3545 ALOGW("Received kWhatStopCompleted in state %d/%s",
3546 mState, stateString(mState).c_str());
3547 break;
3548 }
3549 setState(INITIALIZED);
3550 if (mReplyID) {
3551 postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
3552 } else {
3553 ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
3554 "but the operation completed anyway. (last reply origin=%s)",
3555 mLastReplyOrigin.c_str());
3556 }
3557 break;
3558 }
3559
3560 case kWhatReleaseCompleted:
3561 {
3562 if (mState != RELEASING) {
3563 ALOGW("Received kWhatReleaseCompleted in state %d/%s",
3564 mState, stateString(mState).c_str());
3565 break;
3566 }
3567 setState(UNINITIALIZED);
3568 mComponentName.clear();
3569
3570 mFlags &= ~kFlagIsComponentAllocated;
3571
3572 // off since we're removing all resources including the battery on
3573 if (mBatteryChecker != nullptr) {
3574 mBatteryChecker->onClientRemoved();
3575 }
3576
3577 mResourceManagerProxy->removeClient();
3578 mReleaseSurface.reset();
3579
3580 if (mReplyID != nullptr) {
3581 postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
3582 }
3583 if (mAsyncReleaseCompleteNotification != nullptr) {
3584 flushMediametrics();
3585 mAsyncReleaseCompleteNotification->post();
3586 mAsyncReleaseCompleteNotification.clear();
3587 }
3588 break;
3589 }
3590
3591 case kWhatFlushCompleted:
3592 {
3593 if (mState != FLUSHING) {
3594 ALOGW("received FlushCompleted message in state %d/%s",
3595 mState, stateString(mState).c_str());
3596 break;
3597 }
3598
3599 if (mFlags & kFlagIsAsync) {
3600 setState(FLUSHED);
3601 } else {
3602 setState(STARTED);
3603 mCodec->signalResume();
3604 }
3605
3606 postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
3607 break;
3608 }
3609
3610 default:
3611 TRESPASS();
3612 }
3613 break;
3614 }
3615
3616 case kWhatInit:
3617 {
3618 if (mState != UNINITIALIZED) {
3619 PostReplyWithError(msg, INVALID_OPERATION);
3620 break;
3621 }
3622
3623 if (mReplyID) {
3624 mDeferredMessages.push_back(msg);
3625 break;
3626 }
3627 sp<AReplyToken> replyID;
3628 CHECK(msg->senderAwaitsResponse(&replyID));
3629
3630 mReplyID = replyID;
3631 setState(INITIALIZING);
3632
3633 sp<RefBase> codecInfo;
3634 (void)msg->findObject("codecInfo", &codecInfo);
3635 AString name;
3636 CHECK(msg->findString("name", &name));
3637
3638 sp<AMessage> format = new AMessage;
3639 if (codecInfo) {
3640 format->setObject("codecInfo", codecInfo);
3641 }
3642 format->setString("componentName", name);
3643
3644 mCodec->initiateAllocateComponent(format);
3645 break;
3646 }
3647
3648 case kWhatSetNotification:
3649 {
3650 sp<AMessage> notify;
3651 if (msg->findMessage("on-frame-rendered", ¬ify)) {
3652 mOnFrameRenderedNotification = notify;
3653 }
3654 if (msg->findMessage("first-tunnel-frame-ready", ¬ify)) {
3655 mOnFirstTunnelFrameReadyNotification = notify;
3656 }
3657 break;
3658 }
3659
3660 case kWhatSetCallback:
3661 {
3662 sp<AReplyToken> replyID;
3663 CHECK(msg->senderAwaitsResponse(&replyID));
3664
3665 if (mState == UNINITIALIZED
3666 || mState == INITIALIZING
3667 || isExecuting()) {
3668 // callback can't be set after codec is executing,
3669 // or before it's initialized (as the callback
3670 // will be cleared when it goes to INITIALIZED)
3671 PostReplyWithError(replyID, INVALID_OPERATION);
3672 break;
3673 }
3674
3675 sp<AMessage> callback;
3676 CHECK(msg->findMessage("callback", &callback));
3677
3678 mCallback = callback;
3679
3680 if (mCallback != NULL) {
3681 ALOGI("MediaCodec will operate in async mode");
3682 mFlags |= kFlagIsAsync;
3683 } else {
3684 mFlags &= ~kFlagIsAsync;
3685 }
3686
3687 sp<AMessage> response = new AMessage;
3688 response->postReply(replyID);
3689 break;
3690 }
3691
3692 case kWhatConfigure:
3693 {
3694 if (mState != INITIALIZED) {
3695 PostReplyWithError(msg, INVALID_OPERATION);
3696 break;
3697 }
3698
3699 if (mReplyID) {
3700 mDeferredMessages.push_back(msg);
3701 break;
3702 }
3703 sp<AReplyToken> replyID;
3704 CHECK(msg->senderAwaitsResponse(&replyID));
3705
3706 sp<RefBase> obj;
3707 CHECK(msg->findObject("surface", &obj));
3708
3709 sp<AMessage> format;
3710 CHECK(msg->findMessage("format", &format));
3711
3712 int32_t push;
3713 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
3714 mFlags |= kFlagPushBlankBuffersOnShutdown;
3715 }
3716
3717 if (obj != NULL) {
3718 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
3719 // allow frame dropping by surface by default
3720 mAllowFrameDroppingBySurface = true;
3721 }
3722
3723 format->setObject("native-window", obj);
3724 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
3725 if (err != OK) {
3726 PostReplyWithError(replyID, err);
3727 break;
3728 }
3729 } else {
3730 // we are not using surface so this variable is not used, but initialize sensibly anyway
3731 mAllowFrameDroppingBySurface = false;
3732
3733 handleSetSurface(NULL);
3734 }
3735
3736 uint32_t flags;
3737 CHECK(msg->findInt32("flags", (int32_t *)&flags));
3738 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
3739 if (!(mFlags & kFlagIsAsync)) {
3740 PostReplyWithError(replyID, INVALID_OPERATION);
3741 break;
3742 }
3743 mFlags |= kFlagUseBlockModel;
3744 }
3745 mReplyID = replyID;
3746 setState(CONFIGURING);
3747
3748 void *crypto;
3749 if (!msg->findPointer("crypto", &crypto)) {
3750 crypto = NULL;
3751 }
3752
3753 ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
3754 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
3755
3756 mCrypto = static_cast<ICrypto *>(crypto);
3757 mBufferChannel->setCrypto(mCrypto);
3758
3759 ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
3760 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
3761
3762 void *descrambler;
3763 if (!msg->findPointer("descrambler", &descrambler)) {
3764 descrambler = NULL;
3765 }
3766
3767 mDescrambler = static_cast<IDescrambler *>(descrambler);
3768 mBufferChannel->setDescrambler(mDescrambler);
3769
3770 format->setInt32("flags", flags);
3771 if (flags & CONFIGURE_FLAG_ENCODE) {
3772 format->setInt32("encoder", true);
3773 mFlags |= kFlagIsEncoder;
3774 }
3775
3776 extractCSD(format);
3777
3778 int32_t tunneled;
3779 if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
3780 ALOGI("Configuring TUNNELED video playback.");
3781 mTunneled = true;
3782 } else {
3783 mTunneled = false;
3784 }
3785
3786 int32_t background = 0;
3787 if (format->findInt32("android._background-mode", &background) && background) {
3788 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
3789 }
3790
3791 mCodec->initiateConfigureComponent(format);
3792 break;
3793 }
3794
3795 case kWhatSetSurface:
3796 {
3797 sp<AReplyToken> replyID;
3798 CHECK(msg->senderAwaitsResponse(&replyID));
3799
3800 status_t err = OK;
3801
3802 switch (mState) {
3803 case CONFIGURED:
3804 case STARTED:
3805 case FLUSHED:
3806 {
3807 sp<RefBase> obj;
3808 (void)msg->findObject("surface", &obj);
3809 sp<Surface> surface = static_cast<Surface *>(obj.get());
3810 if (mSurface == NULL) {
3811 // do not support setting surface if it was not set
3812 err = INVALID_OPERATION;
3813 } else if (obj == NULL) {
3814 // do not support unsetting surface
3815 err = BAD_VALUE;
3816 } else {
3817 err = connectToSurface(surface);
3818 if (err == ALREADY_EXISTS) {
3819 // reconnecting to same surface
3820 err = OK;
3821 } else {
3822 if (err == OK) {
3823 if (mFlags & kFlagUsesSoftwareRenderer) {
3824 if (mSoftRenderer != NULL
3825 && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
3826 pushBlankBuffersToNativeWindow(mSurface.get());
3827 }
3828 surface->setDequeueTimeout(-1);
3829 mSoftRenderer = new SoftwareRenderer(surface);
3830 // TODO: check if this was successful
3831 } else {
3832 err = mCodec->setSurface(surface);
3833 }
3834 }
3835 if (err == OK) {
3836 (void)disconnectFromSurface();
3837 mSurface = surface;
3838 }
3839 }
3840 }
3841 break;
3842 }
3843
3844 default:
3845 err = INVALID_OPERATION;
3846 break;
3847 }
3848
3849 PostReplyWithError(replyID, err);
3850 break;
3851 }
3852
3853 case kWhatCreateInputSurface:
3854 case kWhatSetInputSurface:
3855 {
3856 // Must be configured, but can't have been started yet.
3857 if (mState != CONFIGURED) {
3858 PostReplyWithError(msg, INVALID_OPERATION);
3859 break;
3860 }
3861
3862 if (mReplyID) {
3863 mDeferredMessages.push_back(msg);
3864 break;
3865 }
3866 sp<AReplyToken> replyID;
3867 CHECK(msg->senderAwaitsResponse(&replyID));
3868
3869 mReplyID = replyID;
3870 if (msg->what() == kWhatCreateInputSurface) {
3871 mCodec->initiateCreateInputSurface();
3872 } else {
3873 sp<RefBase> obj;
3874 CHECK(msg->findObject("input-surface", &obj));
3875
3876 mCodec->initiateSetInputSurface(
3877 static_cast<PersistentSurface *>(obj.get()));
3878 }
3879 break;
3880 }
3881 case kWhatStart:
3882 {
3883 if (mState == FLUSHED) {
3884 setState(STARTED);
3885 if (mHavePendingInputBuffers) {
3886 onInputBufferAvailable();
3887 mHavePendingInputBuffers = false;
3888 }
3889 mCodec->signalResume();
3890 PostReplyWithError(msg, OK);
3891 break;
3892 } else if (mState != CONFIGURED) {
3893 PostReplyWithError(msg, INVALID_OPERATION);
3894 break;
3895 }
3896
3897 if (mReplyID) {
3898 mDeferredMessages.push_back(msg);
3899 break;
3900 }
3901 sp<AReplyToken> replyID;
3902 CHECK(msg->senderAwaitsResponse(&replyID));
3903 TunnelPeekState previousState = mTunnelPeekState;
3904 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
3905 ALOGV("TunnelPeekState: %s -> %s",
3906 asString(previousState),
3907 asString(TunnelPeekState::kEnabledNoBuffer));
3908
3909 mReplyID = replyID;
3910 setState(STARTING);
3911
3912 mCodec->initiateStart();
3913 break;
3914 }
3915
3916 case kWhatStop: {
3917 if (mReplyID) {
3918 mDeferredMessages.push_back(msg);
3919 break;
3920 }
3921 [[fallthrough]];
3922 }
3923 case kWhatRelease:
3924 {
3925 State targetState =
3926 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
3927
3928 if ((mState == RELEASING && targetState == UNINITIALIZED)
3929 || (mState == STOPPING && targetState == INITIALIZED)) {
3930 mDeferredMessages.push_back(msg);
3931 break;
3932 }
3933
3934 sp<AReplyToken> replyID;
3935 CHECK(msg->senderAwaitsResponse(&replyID));
3936
3937 sp<AMessage> asyncNotify;
3938 (void)msg->findMessage("async", &asyncNotify);
3939 // post asyncNotify if going out of scope.
3940 struct AsyncNotifyPost {
3941 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
3942 ~AsyncNotifyPost() {
3943 if (mAsyncNotify) {
3944 mAsyncNotify->post();
3945 }
3946 }
3947 void clear() { mAsyncNotify.clear(); }
3948 private:
3949 sp<AMessage> mAsyncNotify;
3950 } asyncNotifyPost{asyncNotify};
3951
3952 // already stopped/released
3953 if (mState == UNINITIALIZED && mReleasedByResourceManager) {
3954 sp<AMessage> response = new AMessage;
3955 response->setInt32("err", OK);
3956 response->postReply(replyID);
3957 break;
3958 }
3959
3960 int32_t reclaimed = 0;
3961 msg->findInt32("reclaimed", &reclaimed);
3962 if (reclaimed) {
3963 if (!mReleasedByResourceManager) {
3964 // notify the async client
3965 if (mFlags & kFlagIsAsync) {
3966 onError(DEAD_OBJECT, ACTION_CODE_FATAL);
3967 }
3968 mReleasedByResourceManager = true;
3969 }
3970
3971 int32_t force = 0;
3972 msg->findInt32("force", &force);
3973 if (!force && hasPendingBuffer()) {
3974 ALOGW("Can't reclaim codec right now due to pending buffers.");
3975
3976 // return WOULD_BLOCK to ask resource manager to retry later.
3977 sp<AMessage> response = new AMessage;
3978 response->setInt32("err", WOULD_BLOCK);
3979 response->postReply(replyID);
3980
3981 break;
3982 }
3983 }
3984
3985 bool isReleasingAllocatedComponent =
3986 (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
3987 if (!isReleasingAllocatedComponent // See 1
3988 && mState != INITIALIZED
3989 && mState != CONFIGURED && !isExecuting()) {
3990 // 1) Permit release to shut down the component if allocated.
3991 //
3992 // 2) We may be in "UNINITIALIZED" state already and
3993 // also shutdown the encoder/decoder without the
3994 // client being aware of this if media server died while
3995 // we were being stopped. The client would assume that
3996 // after stop() returned, it would be safe to call release()
3997 // and it should be in this case, no harm to allow a release()
3998 // if we're already uninitialized.
3999 sp<AMessage> response = new AMessage;
4000 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
4001 // the previous stop/release completes and then reply with OK.
4002 status_t err = mState == targetState ? OK : INVALID_OPERATION;
4003 response->setInt32("err", err);
4004 if (err == OK && targetState == UNINITIALIZED) {
4005 mComponentName.clear();
4006 }
4007 response->postReply(replyID);
4008 break;
4009 }
4010
4011 // If we're flushing, configuring or starting but
4012 // received a release request, post the reply for the pending call
4013 // first, and consider it done. The reply token will be replaced
4014 // after this, and we'll no longer be able to reply.
4015 if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
4016 // mReply is always set if in these states.
4017 postPendingRepliesAndDeferredMessages(
4018 std::string("kWhatRelease:") + stateString(mState));
4019 }
4020 // If we're stopping but received a release request, post the reply
4021 // for the pending call if necessary. Note that the reply may have been
4022 // already posted due to an error.
4023 if (mState == STOPPING && mReplyID) {
4024 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
4025 }
4026
4027 if (mFlags & kFlagSawMediaServerDie) {
4028 // It's dead, Jim. Don't expect initiateShutdown to yield
4029 // any useful results now...
4030 // Any pending reply would have been handled at kWhatError.
4031 setState(UNINITIALIZED);
4032 if (targetState == UNINITIALIZED) {
4033 mComponentName.clear();
4034 }
4035 (new AMessage)->postReply(replyID);
4036 break;
4037 }
4038
4039 // If we already have an error, component may not be able to
4040 // complete the shutdown properly. If we're stopping, post the
4041 // reply now with an error to unblock the client, client can
4042 // release after the failure (instead of ANR).
4043 if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
4044 // Any pending reply would have been handled at kWhatError.
4045 PostReplyWithError(replyID, getStickyError());
4046 break;
4047 }
4048
4049 if (asyncNotify != nullptr) {
4050 if (mSurface != NULL) {
4051 if (!mReleaseSurface) {
4052 uint64_t usage = 0;
4053 if (mSurface->getConsumerUsage(&usage) != OK) {
4054 usage = 0;
4055 }
4056 mReleaseSurface.reset(new ReleaseSurface(usage));
4057 }
4058 if (mSurface != mReleaseSurface->getSurface()) {
4059 status_t err = connectToSurface(mReleaseSurface->getSurface());
4060 ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
4061 if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
4062 err = mCodec->setSurface(mReleaseSurface->getSurface());
4063 ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
4064 }
4065 if (err == OK) {
4066 (void)disconnectFromSurface();
4067 mSurface = mReleaseSurface->getSurface();
4068 }
4069 }
4070 }
4071 }
4072
4073 if (mReplyID) {
4074 // State transition replies are handled above, so this reply
4075 // would not be related to state transition. As we are
4076 // shutting down the component, just fail the operation.
4077 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
4078 }
4079 mReplyID = replyID;
4080 setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
4081
4082 mCodec->initiateShutdown(
4083 msg->what() == kWhatStop /* keepComponentAllocated */);
4084
4085 returnBuffersToCodec(reclaimed);
4086
4087 if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
4088 pushBlankBuffersToNativeWindow(mSurface.get());
4089 }
4090
4091 if (asyncNotify != nullptr) {
4092 mResourceManagerProxy->markClientForPendingRemoval();
4093 postPendingRepliesAndDeferredMessages("kWhatRelease:async");
4094 asyncNotifyPost.clear();
4095 mAsyncReleaseCompleteNotification = asyncNotify;
4096 }
4097
4098 break;
4099 }
4100
4101 case kWhatDequeueInputBuffer:
4102 {
4103 sp<AReplyToken> replyID;
4104 CHECK(msg->senderAwaitsResponse(&replyID));
4105
4106 if (mFlags & kFlagIsAsync) {
4107 ALOGE("dequeueInputBuffer can't be used in async mode");
4108 PostReplyWithError(replyID, INVALID_OPERATION);
4109 break;
4110 }
4111
4112 if (mHaveInputSurface) {
4113 ALOGE("dequeueInputBuffer can't be used with input surface");
4114 PostReplyWithError(replyID, INVALID_OPERATION);
4115 break;
4116 }
4117
4118 if (handleDequeueInputBuffer(replyID, true /* new request */)) {
4119 break;
4120 }
4121
4122 int64_t timeoutUs;
4123 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
4124
4125 if (timeoutUs == 0LL) {
4126 PostReplyWithError(replyID, -EAGAIN);
4127 break;
4128 }
4129
4130 mFlags |= kFlagDequeueInputPending;
4131 mDequeueInputReplyID = replyID;
4132
4133 if (timeoutUs > 0LL) {
4134 sp<AMessage> timeoutMsg =
4135 new AMessage(kWhatDequeueInputTimedOut, this);
4136 timeoutMsg->setInt32(
4137 "generation", ++mDequeueInputTimeoutGeneration);
4138 timeoutMsg->post(timeoutUs);
4139 }
4140 break;
4141 }
4142
4143 case kWhatDequeueInputTimedOut:
4144 {
4145 int32_t generation;
4146 CHECK(msg->findInt32("generation", &generation));
4147
4148 if (generation != mDequeueInputTimeoutGeneration) {
4149 // Obsolete
4150 break;
4151 }
4152
4153 CHECK(mFlags & kFlagDequeueInputPending);
4154
4155 PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
4156
4157 mFlags &= ~kFlagDequeueInputPending;
4158 mDequeueInputReplyID = 0;
4159 break;
4160 }
4161
4162 case kWhatQueueInputBuffer:
4163 {
4164 sp<AReplyToken> replyID;
4165 CHECK(msg->senderAwaitsResponse(&replyID));
4166
4167 if (!isExecuting()) {
4168 PostReplyWithError(replyID, INVALID_OPERATION);
4169 break;
4170 } else if (mFlags & kFlagStickyError) {
4171 PostReplyWithError(replyID, getStickyError());
4172 break;
4173 }
4174
4175 status_t err = UNKNOWN_ERROR;
4176 if (!mLeftover.empty()) {
4177 mLeftover.push_back(msg);
4178 size_t index;
4179 msg->findSize("index", &index);
4180 err = handleLeftover(index);
4181 } else {
4182 err = onQueueInputBuffer(msg);
4183 }
4184
4185 PostReplyWithError(replyID, err);
4186 break;
4187 }
4188
4189 case kWhatDequeueOutputBuffer:
4190 {
4191 sp<AReplyToken> replyID;
4192 CHECK(msg->senderAwaitsResponse(&replyID));
4193
4194 if (mFlags & kFlagIsAsync) {
4195 ALOGE("dequeueOutputBuffer can't be used in async mode");
4196 PostReplyWithError(replyID, INVALID_OPERATION);
4197 break;
4198 }
4199
4200 if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
4201 break;
4202 }
4203
4204 int64_t timeoutUs;
4205 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
4206
4207 if (timeoutUs == 0LL) {
4208 PostReplyWithError(replyID, -EAGAIN);
4209 break;
4210 }
4211
4212 mFlags |= kFlagDequeueOutputPending;
4213 mDequeueOutputReplyID = replyID;
4214
4215 if (timeoutUs > 0LL) {
4216 sp<AMessage> timeoutMsg =
4217 new AMessage(kWhatDequeueOutputTimedOut, this);
4218 timeoutMsg->setInt32(
4219 "generation", ++mDequeueOutputTimeoutGeneration);
4220 timeoutMsg->post(timeoutUs);
4221 }
4222 break;
4223 }
4224
4225 case kWhatDequeueOutputTimedOut:
4226 {
4227 int32_t generation;
4228 CHECK(msg->findInt32("generation", &generation));
4229
4230 if (generation != mDequeueOutputTimeoutGeneration) {
4231 // Obsolete
4232 break;
4233 }
4234
4235 CHECK(mFlags & kFlagDequeueOutputPending);
4236
4237 PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
4238
4239 mFlags &= ~kFlagDequeueOutputPending;
4240 mDequeueOutputReplyID = 0;
4241 break;
4242 }
4243
4244 case kWhatReleaseOutputBuffer:
4245 {
4246 sp<AReplyToken> replyID;
4247 CHECK(msg->senderAwaitsResponse(&replyID));
4248
4249 if (!isExecuting()) {
4250 PostReplyWithError(replyID, INVALID_OPERATION);
4251 break;
4252 } else if (mFlags & kFlagStickyError) {
4253 PostReplyWithError(replyID, getStickyError());
4254 break;
4255 }
4256
4257 status_t err = onReleaseOutputBuffer(msg);
4258
4259 PostReplyWithError(replyID, err);
4260 break;
4261 }
4262
4263 case kWhatSignalEndOfInputStream:
4264 {
4265 if (!isExecuting() || !mHaveInputSurface) {
4266 PostReplyWithError(msg, INVALID_OPERATION);
4267 break;
4268 } else if (mFlags & kFlagStickyError) {
4269 PostReplyWithError(msg, getStickyError());
4270 break;
4271 }
4272
4273 if (mReplyID) {
4274 mDeferredMessages.push_back(msg);
4275 break;
4276 }
4277 sp<AReplyToken> replyID;
4278 CHECK(msg->senderAwaitsResponse(&replyID));
4279
4280 mReplyID = replyID;
4281 mCodec->signalEndOfInputStream();
4282 break;
4283 }
4284
4285 case kWhatGetBuffers:
4286 {
4287 sp<AReplyToken> replyID;
4288 CHECK(msg->senderAwaitsResponse(&replyID));
4289 if (!isExecuting() || (mFlags & kFlagIsAsync)) {
4290 PostReplyWithError(replyID, INVALID_OPERATION);
4291 break;
4292 } else if (mFlags & kFlagStickyError) {
4293 PostReplyWithError(replyID, getStickyError());
4294 break;
4295 }
4296
4297 int32_t portIndex;
4298 CHECK(msg->findInt32("portIndex", &portIndex));
4299
4300 Vector<sp<MediaCodecBuffer> > *dstBuffers;
4301 CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
4302
4303 dstBuffers->clear();
4304 // If we're using input surface (either non-persistent created by
4305 // createInputSurface(), or persistent set by setInputSurface()),
4306 // give the client an empty input buffers array.
4307 if (portIndex != kPortIndexInput || !mHaveInputSurface) {
4308 if (portIndex == kPortIndexInput) {
4309 mBufferChannel->getInputBufferArray(dstBuffers);
4310 } else {
4311 mBufferChannel->getOutputBufferArray(dstBuffers);
4312 }
4313 }
4314
4315 (new AMessage)->postReply(replyID);
4316 break;
4317 }
4318
4319 case kWhatFlush:
4320 {
4321 if (!isExecuting()) {
4322 PostReplyWithError(msg, INVALID_OPERATION);
4323 break;
4324 } else if (mFlags & kFlagStickyError) {
4325 PostReplyWithError(msg, getStickyError());
4326 break;
4327 }
4328
4329 if (mReplyID) {
4330 mDeferredMessages.push_back(msg);
4331 break;
4332 }
4333 sp<AReplyToken> replyID;
4334 CHECK(msg->senderAwaitsResponse(&replyID));
4335
4336 mReplyID = replyID;
4337 // TODO: skip flushing if already FLUSHED
4338 setState(FLUSHING);
4339
4340 mCodec->signalFlush();
4341 returnBuffersToCodec();
4342 TunnelPeekState previousState = mTunnelPeekState;
4343 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
4344 ALOGV("TunnelPeekState: %s -> %s",
4345 asString(previousState),
4346 asString(TunnelPeekState::kEnabledNoBuffer));
4347 break;
4348 }
4349
4350 case kWhatGetInputFormat:
4351 case kWhatGetOutputFormat:
4352 {
4353 sp<AMessage> format =
4354 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
4355
4356 sp<AReplyToken> replyID;
4357 CHECK(msg->senderAwaitsResponse(&replyID));
4358
4359 if ((mState != CONFIGURED && mState != STARTING &&
4360 mState != STARTED && mState != FLUSHING &&
4361 mState != FLUSHED)
4362 || format == NULL) {
4363 PostReplyWithError(replyID, INVALID_OPERATION);
4364 break;
4365 } else if (mFlags & kFlagStickyError) {
4366 PostReplyWithError(replyID, getStickyError());
4367 break;
4368 }
4369
4370 sp<AMessage> response = new AMessage;
4371 response->setMessage("format", format);
4372 response->postReply(replyID);
4373 break;
4374 }
4375
4376 case kWhatRequestIDRFrame:
4377 {
4378 mCodec->signalRequestIDRFrame();
4379 break;
4380 }
4381
4382 case kWhatRequestActivityNotification:
4383 {
4384 CHECK(mActivityNotify == NULL);
4385 CHECK(msg->findMessage("notify", &mActivityNotify));
4386
4387 postActivityNotificationIfPossible();
4388 break;
4389 }
4390
4391 case kWhatGetName:
4392 {
4393 sp<AReplyToken> replyID;
4394 CHECK(msg->senderAwaitsResponse(&replyID));
4395
4396 if (mComponentName.empty()) {
4397 PostReplyWithError(replyID, INVALID_OPERATION);
4398 break;
4399 }
4400
4401 sp<AMessage> response = new AMessage;
4402 response->setString("name", mComponentName.c_str());
4403 response->postReply(replyID);
4404 break;
4405 }
4406
4407 case kWhatGetCodecInfo:
4408 {
4409 sp<AReplyToken> replyID;
4410 CHECK(msg->senderAwaitsResponse(&replyID));
4411
4412 sp<AMessage> response = new AMessage;
4413 response->setObject("codecInfo", mCodecInfo);
4414 response->postReply(replyID);
4415 break;
4416 }
4417
4418 case kWhatSetParameters:
4419 {
4420 sp<AReplyToken> replyID;
4421 CHECK(msg->senderAwaitsResponse(&replyID));
4422
4423 sp<AMessage> params;
4424 CHECK(msg->findMessage("params", ¶ms));
4425
4426 status_t err = onSetParameters(params);
4427
4428 PostReplyWithError(replyID, err);
4429 break;
4430 }
4431
4432 case kWhatDrmReleaseCrypto:
4433 {
4434 onReleaseCrypto(msg);
4435 break;
4436 }
4437
4438 case kWhatCheckBatteryStats:
4439 {
4440 if (mBatteryChecker != nullptr) {
4441 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
4442 mResourceManagerProxy->removeResource(
4443 MediaResource::VideoBatteryResource());
4444 });
4445 }
4446 break;
4447 }
4448
4449 default:
4450 TRESPASS();
4451 }
4452 }
4453
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)4454 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
4455 sp<AMessage> format = buffer->format();
4456 if (mOutputFormat == format) {
4457 return;
4458 }
4459 if (mFlags & kFlagUseBlockModel) {
4460 sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
4461 sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
4462 std::set<std::string> keys;
4463 size_t numEntries = diff1->countEntries();
4464 AMessage::Type type;
4465 for (size_t i = 0; i < numEntries; ++i) {
4466 keys.emplace(diff1->getEntryNameAt(i, &type));
4467 }
4468 numEntries = diff2->countEntries();
4469 for (size_t i = 0; i < numEntries; ++i) {
4470 keys.emplace(diff2->getEntryNameAt(i, &type));
4471 }
4472 sp<WrapperObject<std::set<std::string>>> changedKeys{
4473 new WrapperObject<std::set<std::string>>{std::move(keys)}};
4474 buffer->meta()->setObject("changedKeys", changedKeys);
4475 }
4476 mOutputFormat = format;
4477 mapFormat(mComponentName, format, nullptr, true);
4478 ALOGV("[%s] output format changed to: %s",
4479 mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
4480
4481 if (mSoftRenderer == NULL &&
4482 mSurface != NULL &&
4483 (mFlags & kFlagUsesSoftwareRenderer)) {
4484 AString mime;
4485 CHECK(mOutputFormat->findString("mime", &mime));
4486
4487 // TODO: propagate color aspects to software renderer to allow better
4488 // color conversion to RGB. For now, just mark dataspace for YUV
4489 // rendering.
4490 int32_t dataSpace;
4491 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
4492 ALOGD("[%s] setting dataspace on output surface to #%x",
4493 mComponentName.c_str(), dataSpace);
4494 int err = native_window_set_buffers_data_space(
4495 mSurface.get(), (android_dataspace)dataSpace);
4496 ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
4497 }
4498 if (mOutputFormat->contains("hdr-static-info")) {
4499 HDRStaticInfo info;
4500 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
4501 setNativeWindowHdrMetadata(mSurface.get(), &info);
4502 }
4503 }
4504
4505 sp<ABuffer> hdr10PlusInfo;
4506 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
4507 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
4508 native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
4509 hdr10PlusInfo->size(), hdr10PlusInfo->data());
4510 }
4511
4512 if (mime.startsWithIgnoreCase("video/")) {
4513 mSurface->setDequeueTimeout(-1);
4514 mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
4515 }
4516 }
4517
4518 requestCpuBoostIfNeeded();
4519
4520 if (mFlags & kFlagIsEncoder) {
4521 // Before we announce the format change we should
4522 // collect codec specific data and amend the output
4523 // format as necessary.
4524 int32_t flags = 0;
4525 (void) buffer->meta()->findInt32("flags", &flags);
4526 if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
4527 && !mOwnerName.startsWith("codec2::")) {
4528 status_t err =
4529 amendOutputFormatWithCodecSpecificData(buffer);
4530
4531 if (err != OK) {
4532 ALOGE("Codec spit out malformed codec "
4533 "specific data!");
4534 }
4535 }
4536 }
4537 if (mFlags & kFlagIsAsync) {
4538 onOutputFormatChanged();
4539 } else {
4540 mFlags |= kFlagOutputFormatChanged;
4541 postActivityNotificationIfPossible();
4542 }
4543
4544 // Notify mCrypto of video resolution changes
4545 if (mCrypto != NULL) {
4546 int32_t left, top, right, bottom, width, height;
4547 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
4548 mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
4549 } else if (mOutputFormat->findInt32("width", &width)
4550 && mOutputFormat->findInt32("height", &height)) {
4551 mCrypto->notifyResolution(width, height);
4552 }
4553 }
4554 }
4555
extractCSD(const sp<AMessage> & format)4556 void MediaCodec::extractCSD(const sp<AMessage> &format) {
4557 mCSD.clear();
4558
4559 size_t i = 0;
4560 for (;;) {
4561 sp<ABuffer> csd;
4562 if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
4563 break;
4564 }
4565 if (csd->size() == 0) {
4566 ALOGW("csd-%zu size is 0", i);
4567 }
4568
4569 mCSD.push_back(csd);
4570 ++i;
4571 }
4572
4573 ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
4574 }
4575
queueCSDInputBuffer(size_t bufferIndex)4576 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
4577 CHECK(!mCSD.empty());
4578
4579 sp<ABuffer> csd = *mCSD.begin();
4580 mCSD.erase(mCSD.begin());
4581 std::shared_ptr<C2Buffer> c2Buffer;
4582 sp<hardware::HidlMemory> memory;
4583 size_t offset = 0;
4584
4585 if (mFlags & kFlagUseBlockModel) {
4586 if (hasCryptoOrDescrambler()) {
4587 constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
4588 thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
4589 kInitialDealerCapacity, "CSD(1MB)");
4590 sp<IMemory> mem = sDealer->allocate(csd->size());
4591 if (mem == nullptr) {
4592 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
4593 while (csd->size() * 2 > newDealerCapacity) {
4594 newDealerCapacity *= 2;
4595 }
4596 sDealer = new MemoryDealer(
4597 newDealerCapacity,
4598 AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
4599 mem = sDealer->allocate(csd->size());
4600 }
4601 memcpy(mem->unsecurePointer(), csd->data(), csd->size());
4602 ssize_t heapOffset;
4603 memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
4604 offset += heapOffset;
4605 } else {
4606 std::shared_ptr<C2LinearBlock> block =
4607 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
4608 C2WriteView view{block->map().get()};
4609 if (view.error() != C2_OK) {
4610 return -EINVAL;
4611 }
4612 if (csd->size() > view.capacity()) {
4613 return -EINVAL;
4614 }
4615 memcpy(view.base(), csd->data(), csd->size());
4616 c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
4617 }
4618 } else {
4619 const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
4620 const sp<MediaCodecBuffer> &codecInputData = info.mData;
4621
4622 if (csd->size() > codecInputData->capacity()) {
4623 return -EINVAL;
4624 }
4625 if (codecInputData->data() == NULL) {
4626 ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
4627 return -EINVAL;
4628 }
4629
4630 memcpy(codecInputData->data(), csd->data(), csd->size());
4631 }
4632
4633 AString errorDetailMsg;
4634
4635 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
4636 msg->setSize("index", bufferIndex);
4637 msg->setSize("offset", 0);
4638 msg->setSize("size", csd->size());
4639 msg->setInt64("timeUs", 0LL);
4640 msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
4641 msg->setPointer("errorDetailMsg", &errorDetailMsg);
4642 if (c2Buffer) {
4643 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
4644 new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
4645 msg->setObject("c2buffer", obj);
4646 msg->setMessage("tunings", new AMessage);
4647 } else if (memory) {
4648 sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
4649 new WrapperObject<sp<hardware::HidlMemory>>{memory}};
4650 msg->setObject("memory", obj);
4651 msg->setMessage("tunings", new AMessage);
4652 }
4653
4654 return onQueueInputBuffer(msg);
4655 }
4656
setState(State newState)4657 void MediaCodec::setState(State newState) {
4658 if (newState == INITIALIZED || newState == UNINITIALIZED) {
4659 delete mSoftRenderer;
4660 mSoftRenderer = NULL;
4661
4662 if ( mCrypto != NULL ) {
4663 ALOGV("setState: ~mCrypto: %p (%d)",
4664 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4665 }
4666 mCrypto.clear();
4667 mDescrambler.clear();
4668 handleSetSurface(NULL);
4669
4670 mInputFormat.clear();
4671 mOutputFormat.clear();
4672 mFlags &= ~kFlagOutputFormatChanged;
4673 mFlags &= ~kFlagOutputBuffersChanged;
4674 mFlags &= ~kFlagStickyError;
4675 mFlags &= ~kFlagIsEncoder;
4676 mFlags &= ~kFlagIsAsync;
4677 mStickyError = OK;
4678
4679 mActivityNotify.clear();
4680 mCallback.clear();
4681 }
4682
4683 if (newState == UNINITIALIZED) {
4684 // return any straggling buffers, e.g. if we got here on an error
4685 returnBuffersToCodec();
4686
4687 // The component is gone, mediaserver's probably back up already
4688 // but should definitely be back up should we try to instantiate
4689 // another component.. and the cycle continues.
4690 mFlags &= ~kFlagSawMediaServerDie;
4691 }
4692
4693 mState = newState;
4694
4695 if (mBatteryChecker != nullptr) {
4696 mBatteryChecker->setExecuting(isExecuting());
4697 }
4698
4699 cancelPendingDequeueOperations();
4700 }
4701
returnBuffersToCodec(bool isReclaim)4702 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
4703 returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
4704 returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
4705 }
4706
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)4707 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
4708 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
4709 Mutex::Autolock al(mBufferLock);
4710
4711 if (portIndex == kPortIndexInput) {
4712 mLeftover.clear();
4713 }
4714 for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
4715 BufferInfo *info = &mPortBuffers[portIndex][i];
4716
4717 if (info->mData != nullptr) {
4718 sp<MediaCodecBuffer> buffer = info->mData;
4719 if (isReclaim && info->mOwnedByClient) {
4720 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
4721 portIndex, i);
4722 } else {
4723 info->mOwnedByClient = false;
4724 info->mData.clear();
4725 }
4726 mBufferChannel->discardBuffer(buffer);
4727 }
4728 }
4729
4730 mAvailPortBuffers[portIndex].clear();
4731 }
4732
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)4733 size_t MediaCodec::updateBuffers(
4734 int32_t portIndex, const sp<AMessage> &msg) {
4735 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
4736 size_t index;
4737 CHECK(msg->findSize("index", &index));
4738 sp<RefBase> obj;
4739 CHECK(msg->findObject("buffer", &obj));
4740 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4741
4742 {
4743 Mutex::Autolock al(mBufferLock);
4744 if (mPortBuffers[portIndex].size() <= index) {
4745 mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
4746 }
4747 mPortBuffers[portIndex][index].mData = buffer;
4748 }
4749 mAvailPortBuffers[portIndex].push_back(index);
4750
4751 return index;
4752 }
4753
onQueueInputBuffer(const sp<AMessage> & msg)4754 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
4755 size_t index;
4756 size_t offset;
4757 size_t size;
4758 int64_t timeUs;
4759 uint32_t flags;
4760 CHECK(msg->findSize("index", &index));
4761 CHECK(msg->findInt64("timeUs", &timeUs));
4762 CHECK(msg->findInt32("flags", (int32_t *)&flags));
4763 std::shared_ptr<C2Buffer> c2Buffer;
4764 sp<hardware::HidlMemory> memory;
4765 sp<RefBase> obj;
4766 if (msg->findObject("c2buffer", &obj)) {
4767 CHECK(obj);
4768 c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
4769 } else if (msg->findObject("memory", &obj)) {
4770 CHECK(obj);
4771 memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
4772 CHECK(msg->findSize("offset", &offset));
4773 } else {
4774 CHECK(msg->findSize("offset", &offset));
4775 }
4776 const CryptoPlugin::SubSample *subSamples;
4777 size_t numSubSamples;
4778 const uint8_t *key;
4779 const uint8_t *iv;
4780 CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
4781
4782 // We allow the simpler queueInputBuffer API to be used even in
4783 // secure mode, by fabricating a single unencrypted subSample.
4784 CryptoPlugin::SubSample ss;
4785 CryptoPlugin::Pattern pattern;
4786
4787 if (msg->findSize("size", &size)) {
4788 if (hasCryptoOrDescrambler()) {
4789 ss.mNumBytesOfClearData = size;
4790 ss.mNumBytesOfEncryptedData = 0;
4791
4792 subSamples = &ss;
4793 numSubSamples = 1;
4794 key = NULL;
4795 iv = NULL;
4796 pattern.mEncryptBlocks = 0;
4797 pattern.mSkipBlocks = 0;
4798 }
4799 } else if (!c2Buffer) {
4800 if (!hasCryptoOrDescrambler()) {
4801 ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
4802 mComponentName.c_str());
4803 return -EINVAL;
4804 }
4805
4806 CHECK(msg->findPointer("subSamples", (void **)&subSamples));
4807 CHECK(msg->findSize("numSubSamples", &numSubSamples));
4808 CHECK(msg->findPointer("key", (void **)&key));
4809 CHECK(msg->findPointer("iv", (void **)&iv));
4810 CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
4811 CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
4812
4813 int32_t tmp;
4814 CHECK(msg->findInt32("mode", &tmp));
4815
4816 mode = (CryptoPlugin::Mode)tmp;
4817
4818 size = 0;
4819 for (size_t i = 0; i < numSubSamples; ++i) {
4820 size += subSamples[i].mNumBytesOfClearData;
4821 size += subSamples[i].mNumBytesOfEncryptedData;
4822 }
4823 }
4824
4825 if (index >= mPortBuffers[kPortIndexInput].size()) {
4826 return -ERANGE;
4827 }
4828
4829 BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
4830 sp<MediaCodecBuffer> buffer = info->mData;
4831
4832 if (c2Buffer || memory) {
4833 sp<AMessage> tunings;
4834 CHECK(msg->findMessage("tunings", &tunings));
4835 onSetParameters(tunings);
4836
4837 status_t err = OK;
4838 if (c2Buffer) {
4839 err = mBufferChannel->attachBuffer(c2Buffer, buffer);
4840 } else if (memory) {
4841 err = mBufferChannel->attachEncryptedBuffer(
4842 memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
4843 offset, subSamples, numSubSamples, buffer);
4844 } else {
4845 err = UNKNOWN_ERROR;
4846 }
4847
4848 if (err == OK && !buffer->asC2Buffer()
4849 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
4850 C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
4851 if (block.size() > buffer->size()) {
4852 C2ConstLinearBlock leftover = block.subBlock(
4853 block.offset() + buffer->size(), block.size() - buffer->size());
4854 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
4855 new WrapperObject<std::shared_ptr<C2Buffer>>{
4856 C2Buffer::CreateLinearBuffer(leftover)}};
4857 msg->setObject("c2buffer", obj);
4858 mLeftover.push_front(msg);
4859 // Not sending EOS if we have leftovers
4860 flags &= ~BUFFER_FLAG_EOS;
4861 }
4862 }
4863
4864 offset = buffer->offset();
4865 size = buffer->size();
4866 if (err != OK) {
4867 return err;
4868 }
4869 }
4870
4871 if (buffer == nullptr || !info->mOwnedByClient) {
4872 return -EACCES;
4873 }
4874
4875 if (offset + size > buffer->capacity()) {
4876 return -EINVAL;
4877 }
4878
4879 buffer->setRange(offset, size);
4880 buffer->meta()->setInt64("timeUs", timeUs);
4881 if (flags & BUFFER_FLAG_EOS) {
4882 buffer->meta()->setInt32("eos", true);
4883 }
4884
4885 if (flags & BUFFER_FLAG_CODECCONFIG) {
4886 buffer->meta()->setInt32("csd", true);
4887 }
4888
4889 if (mTunneled) {
4890 TunnelPeekState previousState = mTunnelPeekState;
4891 switch(mTunnelPeekState){
4892 case TunnelPeekState::kEnabledNoBuffer:
4893 buffer->meta()->setInt32("tunnel-first-frame", 1);
4894 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
4895 ALOGV("TunnelPeekState: %s -> %s",
4896 asString(previousState),
4897 asString(mTunnelPeekState));
4898 break;
4899 case TunnelPeekState::kDisabledNoBuffer:
4900 buffer->meta()->setInt32("tunnel-first-frame", 1);
4901 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
4902 ALOGV("TunnelPeekState: %s -> %s",
4903 asString(previousState),
4904 asString(mTunnelPeekState));
4905 break;
4906 default:
4907 break;
4908 }
4909 }
4910
4911 status_t err = OK;
4912 if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
4913 AString *errorDetailMsg;
4914 CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
4915 // Notify mCrypto of video resolution changes
4916 if (mTunneled && mCrypto != NULL) {
4917 int32_t width, height;
4918 if (mInputFormat->findInt32("width", &width) &&
4919 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
4920 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
4921 mTunneledInputWidth = width;
4922 mTunneledInputHeight = height;
4923 mCrypto->notifyResolution(width, height);
4924 }
4925 }
4926 }
4927 err = mBufferChannel->queueSecureInputBuffer(
4928 buffer,
4929 (mFlags & kFlagIsSecure),
4930 key,
4931 iv,
4932 mode,
4933 pattern,
4934 subSamples,
4935 numSubSamples,
4936 errorDetailMsg);
4937 if (err != OK) {
4938 mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
4939 ALOGW("Log queueSecureInputBuffer error: %d", err);
4940 }
4941 } else {
4942 err = mBufferChannel->queueInputBuffer(buffer);
4943 if (err != OK) {
4944 mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
4945 ALOGW("Log queueInputBuffer error: %d", err);
4946 }
4947 }
4948
4949 if (err == OK) {
4950 // synchronization boundary for getBufferAndFormat
4951 Mutex::Autolock al(mBufferLock);
4952 info->mOwnedByClient = false;
4953 info->mData.clear();
4954
4955 statsBufferSent(timeUs, buffer);
4956 }
4957
4958 return err;
4959 }
4960
handleLeftover(size_t index)4961 status_t MediaCodec::handleLeftover(size_t index) {
4962 if (mLeftover.empty()) {
4963 return OK;
4964 }
4965 sp<AMessage> msg = mLeftover.front();
4966 mLeftover.pop_front();
4967 msg->setSize("index", index);
4968 return onQueueInputBuffer(msg);
4969 }
4970
4971 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)4972 size_t MediaCodec::CreateFramesRenderedMessage(
4973 const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
4974 size_t index = 0;
4975
4976 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
4977 it != done.cend(); ++it) {
4978 if (it->getRenderTimeNs() < 0) {
4979 continue; // dropped frame from tracking
4980 }
4981 msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
4982 msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
4983 ++index;
4984 }
4985 return index;
4986 }
4987
onReleaseOutputBuffer(const sp<AMessage> & msg)4988 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
4989 size_t index;
4990 CHECK(msg->findSize("index", &index));
4991
4992 int32_t render;
4993 if (!msg->findInt32("render", &render)) {
4994 render = 0;
4995 }
4996
4997 if (!isExecuting()) {
4998 return -EINVAL;
4999 }
5000
5001 if (index >= mPortBuffers[kPortIndexOutput].size()) {
5002 return -ERANGE;
5003 }
5004
5005 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
5006
5007 if (info->mData == nullptr || !info->mOwnedByClient) {
5008 return -EACCES;
5009 }
5010
5011 // synchronization boundary for getBufferAndFormat
5012 sp<MediaCodecBuffer> buffer;
5013 {
5014 Mutex::Autolock al(mBufferLock);
5015 info->mOwnedByClient = false;
5016 buffer = info->mData;
5017 info->mData.clear();
5018 }
5019
5020 if (render && buffer->size() != 0) {
5021 int64_t mediaTimeUs = -1;
5022 buffer->meta()->findInt64("timeUs", &mediaTimeUs);
5023
5024 int64_t renderTimeNs = 0;
5025 if (!msg->findInt64("timestampNs", &renderTimeNs)) {
5026 // use media timestamp if client did not request a specific render timestamp
5027 ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
5028 renderTimeNs = mediaTimeUs * 1000;
5029 }
5030
5031 if (mSoftRenderer != NULL) {
5032 std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
5033 buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
5034 mPortBuffers[kPortIndexOutput].size(), buffer->format());
5035
5036 // if we are running, notify rendered frames
5037 if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
5038 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
5039 sp<AMessage> data = new AMessage;
5040 if (CreateFramesRenderedMessage(doneFrames, data)) {
5041 notify->setMessage("data", data);
5042 notify->post();
5043 }
5044 }
5045 }
5046 status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
5047
5048 if (err == NO_INIT) {
5049 ALOGE("rendering to non-initilized(obsolete) surface");
5050 return err;
5051 }
5052 if (err != OK) {
5053 ALOGI("rendring output error %d", err);
5054 }
5055 } else {
5056 mBufferChannel->discardBuffer(buffer);
5057 }
5058
5059 return OK;
5060 }
5061
peekNextPortBuffer(int32_t portIndex)5062 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
5063 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5064
5065 List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
5066
5067 if (availBuffers->empty()) {
5068 return nullptr;
5069 }
5070
5071 return &mPortBuffers[portIndex][*availBuffers->begin()];
5072 }
5073
dequeuePortBuffer(int32_t portIndex)5074 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
5075 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5076
5077 BufferInfo *info = peekNextPortBuffer(portIndex);
5078 if (!info) {
5079 return -EAGAIN;
5080 }
5081
5082 List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
5083 size_t index = *availBuffers->begin();
5084 CHECK_EQ(info, &mPortBuffers[portIndex][index]);
5085 availBuffers->erase(availBuffers->begin());
5086
5087 CHECK(!info->mOwnedByClient);
5088 {
5089 Mutex::Autolock al(mBufferLock);
5090 info->mOwnedByClient = true;
5091
5092 // set image-data
5093 if (info->mData->format() != NULL) {
5094 sp<ABuffer> imageData;
5095 if (info->mData->format()->findBuffer("image-data", &imageData)) {
5096 info->mData->meta()->setBuffer("image-data", imageData);
5097 }
5098 int32_t left, top, right, bottom;
5099 if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
5100 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
5101 }
5102 }
5103 }
5104
5105 return index;
5106 }
5107
connectToSurface(const sp<Surface> & surface)5108 status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {
5109 status_t err = OK;
5110 if (surface != NULL) {
5111 uint64_t oldId, newId;
5112 if (mSurface != NULL
5113 && surface->getUniqueId(&newId) == NO_ERROR
5114 && mSurface->getUniqueId(&oldId) == NO_ERROR
5115 && newId == oldId) {
5116 ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
5117 return ALREADY_EXISTS;
5118 }
5119
5120 // in case we don't connect, ensure that we don't signal the surface is
5121 // connected to the screen
5122 mIsSurfaceToScreen = false;
5123
5124 err = nativeWindowConnect(surface.get(), "connectToSurface");
5125 if (err == OK) {
5126 // Require a fresh set of buffers after each connect by using a unique generation
5127 // number. Rely on the fact that max supported process id by Linux is 2^22.
5128 // PID is never 0 so we don't have to worry that we use the default generation of 0.
5129 // TODO: come up with a unique scheme if other producers also set the generation number.
5130 static uint32_t mSurfaceGeneration = 0;
5131 uint32_t generation = (getpid() << 10) | (++mSurfaceGeneration & ((1 << 10) - 1));
5132 surface->setGenerationNumber(generation);
5133 ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), generation);
5134
5135 // HACK: clear any free buffers. Remove when connect will automatically do this.
5136 // This is needed as the consumer may be holding onto stale frames that it can reattach
5137 // to this surface after disconnect/connect, and those free frames would inherit the new
5138 // generation number. Disconnecting after setting a unique generation prevents this.
5139 nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
5140 err = nativeWindowConnect(surface.get(), "connectToSurface(reconnect)");
5141 }
5142
5143 if (err != OK) {
5144 ALOGE("nativeWindowConnect returned an error: %s (%d)", strerror(-err), err);
5145 } else {
5146 if (!mAllowFrameDroppingBySurface) {
5147 disableLegacyBufferDropPostQ(surface);
5148 }
5149 // keep track whether or not the buffers of the connected surface go to the screen
5150 int result = 0;
5151 surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
5152 mIsSurfaceToScreen = result != 0;
5153 }
5154 }
5155 // do not return ALREADY_EXISTS unless surfaces are the same
5156 return err == ALREADY_EXISTS ? BAD_VALUE : err;
5157 }
5158
disconnectFromSurface()5159 status_t MediaCodec::disconnectFromSurface() {
5160 status_t err = OK;
5161 if (mSurface != NULL) {
5162 // Resetting generation is not technically needed, but there is no need to keep it either
5163 mSurface->setGenerationNumber(0);
5164 err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
5165 if (err != OK) {
5166 ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
5167 }
5168 // assume disconnected even on error
5169 mSurface.clear();
5170 mIsSurfaceToScreen = false;
5171 }
5172 return err;
5173 }
5174
handleSetSurface(const sp<Surface> & surface)5175 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
5176 status_t err = OK;
5177 if (mSurface != NULL) {
5178 (void)disconnectFromSurface();
5179 }
5180 if (surface != NULL) {
5181 err = connectToSurface(surface);
5182 if (err == OK) {
5183 mSurface = surface;
5184 }
5185 }
5186 return err;
5187 }
5188
onInputBufferAvailable()5189 void MediaCodec::onInputBufferAvailable() {
5190 int32_t index;
5191 while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
5192 sp<AMessage> msg = mCallback->dup();
5193 msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
5194 msg->setInt32("index", index);
5195 msg->post();
5196 }
5197 }
5198
onOutputBufferAvailable()5199 void MediaCodec::onOutputBufferAvailable() {
5200 int32_t index;
5201 while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
5202 const sp<MediaCodecBuffer> &buffer =
5203 mPortBuffers[kPortIndexOutput][index].mData;
5204 sp<AMessage> msg = mCallback->dup();
5205 msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
5206 msg->setInt32("index", index);
5207 msg->setSize("offset", buffer->offset());
5208 msg->setSize("size", buffer->size());
5209
5210 int64_t timeUs;
5211 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
5212
5213 msg->setInt64("timeUs", timeUs);
5214
5215 int32_t flags;
5216 CHECK(buffer->meta()->findInt32("flags", &flags));
5217
5218 msg->setInt32("flags", flags);
5219
5220 statsBufferReceived(timeUs, buffer);
5221
5222 msg->post();
5223 }
5224 }
5225
onError(status_t err,int32_t actionCode,const char * detail)5226 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
5227 if (mCallback != NULL) {
5228 sp<AMessage> msg = mCallback->dup();
5229 msg->setInt32("callbackID", CB_ERROR);
5230 msg->setInt32("err", err);
5231 msg->setInt32("actionCode", actionCode);
5232
5233 if (detail != NULL) {
5234 msg->setString("detail", detail);
5235 }
5236
5237 msg->post();
5238 }
5239 }
5240
onOutputFormatChanged()5241 void MediaCodec::onOutputFormatChanged() {
5242 if (mCallback != NULL) {
5243 sp<AMessage> msg = mCallback->dup();
5244 msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
5245 msg->setMessage("format", mOutputFormat);
5246 msg->post();
5247 }
5248 }
5249
postActivityNotificationIfPossible()5250 void MediaCodec::postActivityNotificationIfPossible() {
5251 if (mActivityNotify == NULL) {
5252 return;
5253 }
5254
5255 bool isErrorOrOutputChanged =
5256 (mFlags & (kFlagStickyError
5257 | kFlagOutputBuffersChanged
5258 | kFlagOutputFormatChanged));
5259
5260 if (isErrorOrOutputChanged
5261 || !mAvailPortBuffers[kPortIndexInput].empty()
5262 || !mAvailPortBuffers[kPortIndexOutput].empty()) {
5263 mActivityNotify->setInt32("input-buffers",
5264 mAvailPortBuffers[kPortIndexInput].size());
5265
5266 if (isErrorOrOutputChanged) {
5267 // we want consumer to dequeue as many times as it can
5268 mActivityNotify->setInt32("output-buffers", INT32_MAX);
5269 } else {
5270 mActivityNotify->setInt32("output-buffers",
5271 mAvailPortBuffers[kPortIndexOutput].size());
5272 }
5273 mActivityNotify->post();
5274 mActivityNotify.clear();
5275 }
5276 }
5277
setParameters(const sp<AMessage> & params)5278 status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) {
5279 sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
5280 msg->setMessage("params", params);
5281
5282 sp<AMessage> response;
5283 return PostAndAwaitResponse(msg, &response);
5284 }
5285
onSetParameters(const sp<AMessage> & params)5286 status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) {
5287 updateLowLatency(params);
5288 mapFormat(mComponentName, params, nullptr, false);
5289 updateTunnelPeek(params);
5290 mCodec->signalSetParameters(params);
5291
5292 return OK;
5293 }
5294
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)5295 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
5296 const sp<MediaCodecBuffer> &buffer) {
5297 AString mime;
5298 CHECK(mOutputFormat->findString("mime", &mime));
5299
5300 if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
5301 // Codec specific data should be SPS and PPS in a single buffer,
5302 // each prefixed by a startcode (0x00 0x00 0x00 0x01).
5303 // We separate the two and put them into the output format
5304 // under the keys "csd-0" and "csd-1".
5305
5306 unsigned csdIndex = 0;
5307
5308 const uint8_t *data = buffer->data();
5309 size_t size = buffer->size();
5310
5311 const uint8_t *nalStart;
5312 size_t nalSize;
5313 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
5314 sp<ABuffer> csd = new ABuffer(nalSize + 4);
5315 memcpy(csd->data(), "\x00\x00\x00\x01", 4);
5316 memcpy(csd->data() + 4, nalStart, nalSize);
5317
5318 mOutputFormat->setBuffer(
5319 AStringPrintf("csd-%u", csdIndex).c_str(), csd);
5320
5321 ++csdIndex;
5322 }
5323
5324 if (csdIndex != 2) {
5325 return ERROR_MALFORMED;
5326 }
5327 } else {
5328 // For everything else we just stash the codec specific data into
5329 // the output format as a single piece of csd under "csd-0".
5330 sp<ABuffer> csd = new ABuffer(buffer->size());
5331 memcpy(csd->data(), buffer->data(), buffer->size());
5332 csd->setRange(0, buffer->size());
5333 mOutputFormat->setBuffer("csd-0", csd);
5334 }
5335
5336 return OK;
5337 }
5338
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)5339 void MediaCodec::postPendingRepliesAndDeferredMessages(
5340 std::string origin, status_t err /* = OK */) {
5341 sp<AMessage> response{new AMessage};
5342 if (err != OK) {
5343 response->setInt32("err", err);
5344 }
5345 postPendingRepliesAndDeferredMessages(origin, response);
5346 }
5347
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)5348 void MediaCodec::postPendingRepliesAndDeferredMessages(
5349 std::string origin, const sp<AMessage> &response) {
5350 LOG_ALWAYS_FATAL_IF(
5351 !mReplyID,
5352 "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
5353 origin.c_str(),
5354 mLastReplyOrigin.c_str());
5355 mLastReplyOrigin = origin;
5356 response->postReply(mReplyID);
5357 mReplyID.clear();
5358 ALOGV_IF(!mDeferredMessages.empty(),
5359 "posting %zu deferred messages", mDeferredMessages.size());
5360 for (sp<AMessage> msg : mDeferredMessages) {
5361 msg->post();
5362 }
5363 mDeferredMessages.clear();
5364 }
5365
stateString(State state)5366 std::string MediaCodec::stateString(State state) {
5367 const char *rval = NULL;
5368 char rawbuffer[16]; // room for "%d"
5369
5370 switch (state) {
5371 case UNINITIALIZED: rval = "UNINITIALIZED"; break;
5372 case INITIALIZING: rval = "INITIALIZING"; break;
5373 case INITIALIZED: rval = "INITIALIZED"; break;
5374 case CONFIGURING: rval = "CONFIGURING"; break;
5375 case CONFIGURED: rval = "CONFIGURED"; break;
5376 case STARTING: rval = "STARTING"; break;
5377 case STARTED: rval = "STARTED"; break;
5378 case FLUSHING: rval = "FLUSHING"; break;
5379 case FLUSHED: rval = "FLUSHED"; break;
5380 case STOPPING: rval = "STOPPING"; break;
5381 case RELEASING: rval = "RELEASING"; break;
5382 default:
5383 snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
5384 rval = rawbuffer;
5385 break;
5386 }
5387 return rval;
5388 }
5389
5390 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)5391 status_t MediaCodec::CanFetchLinearBlock(
5392 const std::vector<std::string> &names, bool *isCompatible) {
5393 *isCompatible = false;
5394 if (names.size() == 0) {
5395 *isCompatible = true;
5396 return OK;
5397 }
5398 const CodecListCache &cache = GetCodecListCache();
5399 for (const std::string &name : names) {
5400 auto it = cache.mCodecInfoMap.find(name);
5401 if (it == cache.mCodecInfoMap.end()) {
5402 return NAME_NOT_FOUND;
5403 }
5404 const char *owner = it->second->getOwnerName();
5405 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
5406 *isCompatible = false;
5407 return OK;
5408 } else if (strncmp(owner, "codec2::", 8) != 0) {
5409 return NAME_NOT_FOUND;
5410 }
5411 }
5412 return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
5413 }
5414
5415 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)5416 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
5417 size_t capacity, const std::vector<std::string> &names) {
5418 return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
5419 }
5420
5421 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)5422 status_t MediaCodec::CanFetchGraphicBlock(
5423 const std::vector<std::string> &names, bool *isCompatible) {
5424 *isCompatible = false;
5425 if (names.size() == 0) {
5426 *isCompatible = true;
5427 return OK;
5428 }
5429 const CodecListCache &cache = GetCodecListCache();
5430 for (const std::string &name : names) {
5431 auto it = cache.mCodecInfoMap.find(name);
5432 if (it == cache.mCodecInfoMap.end()) {
5433 return NAME_NOT_FOUND;
5434 }
5435 const char *owner = it->second->getOwnerName();
5436 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
5437 *isCompatible = false;
5438 return OK;
5439 } else if (strncmp(owner, "codec2.", 7) != 0) {
5440 return NAME_NOT_FOUND;
5441 }
5442 }
5443 return CCodec::CanFetchGraphicBlock(names, isCompatible);
5444 }
5445
5446 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)5447 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
5448 int32_t width,
5449 int32_t height,
5450 int32_t format,
5451 uint64_t usage,
5452 const std::vector<std::string> &names) {
5453 return CCodec::FetchGraphicBlock(width, height, format, usage, names);
5454 }
5455
5456 } // namespace android
5457