1 /*
2 * Copyright 2012, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #include <utils/Log.h>
21
22 #include <set>
23 #include <stdlib.h>
24
25 #include <inttypes.h>
26 #include <stdlib.h>
27 #include <dlfcn.h>
28
29 #include <C2Buffer.h>
30
31 #include "include/SoftwareRenderer.h"
32 #include "PlaybackDurationAccumulator.h"
33
34 #include <android/binder_manager.h>
35 #include <android/content/pm/IPackageManagerNative.h>
36 #include <android/hardware/cas/native/1.0/IDescrambler.h>
37 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
38
39 #include <aidl/android/media/BnResourceManagerClient.h>
40 #include <aidl/android/media/IResourceManagerService.h>
41 #include <android/binder_ibinder.h>
42 #include <android/binder_manager.h>
43 #include <android/dlext.h>
44 #include <binder/IMemory.h>
45 #include <binder/IServiceManager.h>
46 #include <binder/MemoryDealer.h>
47 #include <cutils/properties.h>
48 #include <gui/BufferQueue.h>
49 #include <gui/Surface.h>
50 #include <hidlmemory/FrameworkUtils.h>
51 #include <mediadrm/ICrypto.h>
52 #include <media/IOMX.h>
53 #include <media/MediaCodecBuffer.h>
54 #include <media/MediaCodecInfo.h>
55 #include <media/MediaMetricsItem.h>
56 #include <media/MediaResource.h>
57 #include <media/NdkMediaErrorPriv.h>
58 #include <media/NdkMediaFormat.h>
59 #include <media/NdkMediaFormatPriv.h>
60 #include <media/formatshaper/FormatShaper.h>
61 #include <media/stagefright/foundation/ABuffer.h>
62 #include <media/stagefright/foundation/ADebug.h>
63 #include <media/stagefright/foundation/AMessage.h>
64 #include <media/stagefright/foundation/AString.h>
65 #include <media/stagefright/foundation/AUtils.h>
66 #include <media/stagefright/foundation/avc_utils.h>
67 #include <media/stagefright/foundation/hexdump.h>
68 #include <media/stagefright/ACodec.h>
69 #include <media/stagefright/BatteryChecker.h>
70 #include <media/stagefright/BufferProducerWrapper.h>
71 #include <media/stagefright/CCodec.h>
72 #include <media/stagefright/MediaCodec.h>
73 #include <media/stagefright/MediaCodecConstants.h>
74 #include <media/stagefright/MediaCodecList.h>
75 #include <media/stagefright/MediaCodecConstants.h>
76 #include <media/stagefright/MediaDefs.h>
77 #include <media/stagefright/MediaErrors.h>
78 #include <media/stagefright/MediaFilter.h>
79 #include <media/stagefright/OMXClient.h>
80 #include <media/stagefright/PersistentSurface.h>
81 #include <media/stagefright/SurfaceUtils.h>
82 #include <nativeloader/dlext_namespaces.h>
83 #include <private/android_filesystem_config.h>
84 #include <utils/Singleton.h>
85
86 namespace android {
87
88 using Status = ::ndk::ScopedAStatus;
89 using aidl::android::media::BnResourceManagerClient;
90 using aidl::android::media::IResourceManagerClient;
91 using aidl::android::media::IResourceManagerService;
92
93 // key for media statistics
94 static const char *kCodecKeyName = "codec";
95 // attrs for media statistics
96 // NB: these are matched with public Java API constants defined
97 // in frameworks/base/media/java/android/media/MediaCodec.java
98 // These must be kept synchronized with the constants there.
99 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
100 static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
101 static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
102 static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
103 static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
104 static const char *kCodecModeAudio = "audio";
105 static const char *kCodecModeImage = "image";
106 static const char *kCodecModeUnknown = "unknown";
107 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
108 static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
109 static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
110 static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
111 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
112 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
113 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
114 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
115 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
116 static const char *kCodecPriority = "android.media.mediacodec.priority";
117 static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
118 static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
119 static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
120 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
121 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
122 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
123 static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
124 static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
125 static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
126
127 // Min/Max QP before shaping
128 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
129 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
130 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
131 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
132 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
133 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
134
135 // Min/Max QP after shaping
136 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
137 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
138 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
139 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
140 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
141 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
142
143 // NB: These are not yet exposed as public Java API constants.
144 static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
145 static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
146 static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
147 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
148 static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
149 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
150 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
151 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
152 static const char *kCodecError = "android.media.mediacodec.errcode";
153 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
154 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
155 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
156 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
157 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
158 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
159 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
160 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
161 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
162 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
163
164 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
165 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
166 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
167 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
168 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
169 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
170 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
171 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
172 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
173 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
174
175 // the kCodecRecent* fields appear only in getMetrics() results
176 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
177 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
178 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
179 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
180 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
181 static const char *kCodecPlaybackDurationSec =
182 "android.media.mediacodec.playback-duration-sec"; /* in sec */
183
184 /* -1: shaper disabled
185 >=0: number of fields changed */
186 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
187
188 // XXX suppress until we get our representation right
189 static bool kEmitHistogram = false;
190
getId(IResourceManagerClient const * client)191 static int64_t getId(IResourceManagerClient const * client) {
192 return (int64_t) client;
193 }
194
getId(const std::shared_ptr<IResourceManagerClient> & client)195 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
196 return getId(client.get());
197 }
198
isResourceError(status_t err)199 static bool isResourceError(status_t err) {
200 return (err == NO_MEMORY);
201 }
202
203 static const int kMaxRetry = 2;
204 static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
205 static const int kNumBuffersAlign = 16;
206
207 static const C2MemoryUsage kDefaultReadWriteUsage{
208 C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
209
210 ////////////////////////////////////////////////////////////////////////////////
211
212 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient213 explicit ResourceManagerClient(MediaCodec* codec, int32_t pid) :
214 mMediaCodec(codec), mPid(pid) {}
215
reclaimResourceandroid::ResourceManagerClient216 Status reclaimResource(bool* _aidl_return) override {
217 sp<MediaCodec> codec = mMediaCodec.promote();
218 if (codec == NULL) {
219 // Codec is already gone, so remove the resources as well
220 ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
221 std::shared_ptr<IResourceManagerService> service =
222 IResourceManagerService::fromBinder(binder);
223 if (service == nullptr) {
224 ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
225 }
226 service->removeClient(mPid, getId(this));
227 *_aidl_return = true;
228 return Status::ok();
229 }
230 status_t err = codec->reclaim();
231 if (err == WOULD_BLOCK) {
232 ALOGD("Wait for the client to release codec.");
233 usleep(kMaxReclaimWaitTimeInUs);
234 ALOGD("Try to reclaim again.");
235 err = codec->reclaim(true /* force */);
236 }
237 if (err != OK) {
238 ALOGW("ResourceManagerClient failed to release codec with err %d", err);
239 }
240 *_aidl_return = (err == OK);
241 return Status::ok();
242 }
243
getNameandroid::ResourceManagerClient244 Status getName(::std::string* _aidl_return) override {
245 _aidl_return->clear();
246 sp<MediaCodec> codec = mMediaCodec.promote();
247 if (codec == NULL) {
248 // codec is already gone.
249 return Status::ok();
250 }
251
252 AString name;
253 if (codec->getName(&name) == OK) {
254 *_aidl_return = name.c_str();
255 }
256 return Status::ok();
257 }
258
~ResourceManagerClientandroid::ResourceManagerClient259 virtual ~ResourceManagerClient() {}
260
261 private:
262 wp<MediaCodec> mMediaCodec;
263 int32_t mPid;
264
265 DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
266 };
267
268 struct MediaCodec::ResourceManagerServiceProxy : public RefBase {
269 ResourceManagerServiceProxy(pid_t pid, uid_t uid,
270 const std::shared_ptr<IResourceManagerClient> &client);
271 virtual ~ResourceManagerServiceProxy();
272
273 status_t init();
274
275 // implements DeathRecipient
276 static void BinderDiedCallback(void* cookie);
277 void binderDied();
278 static Mutex sLockCookies;
279 static std::set<void*> sCookies;
280 static void addCookie(void* cookie);
281 static void removeCookie(void* cookie);
282
283 void addResource(const MediaResourceParcel &resource);
284 void removeResource(const MediaResourceParcel &resource);
285 void removeClient();
286 void markClientForPendingRemoval();
287 bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
288
289 private:
290 Mutex mLock;
291 pid_t mPid;
292 uid_t mUid;
293 std::shared_ptr<IResourceManagerService> mService;
294 std::shared_ptr<IResourceManagerClient> mClient;
295 ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
296 };
297
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)298 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
299 pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
300 : mPid(pid), mUid(uid), mClient(client),
301 mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
302 if (mUid == MediaCodec::kNoUid) {
303 mUid = AIBinder_getCallingUid();
304 }
305 if (mPid == MediaCodec::kNoPid) {
306 mPid = AIBinder_getCallingPid();
307 }
308 }
309
~ResourceManagerServiceProxy()310 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
311
312 // remove the cookie, so any in-flight death notification will get dropped
313 // by our handler.
314 removeCookie(this);
315
316 Mutex::Autolock _l(mLock);
317 if (mService != nullptr) {
318 AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
319 mService = nullptr;
320 }
321 }
322
init()323 status_t MediaCodec::ResourceManagerServiceProxy::init() {
324 ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
325 mService = IResourceManagerService::fromBinder(binder);
326 if (mService == nullptr) {
327 ALOGE("Failed to get ResourceManagerService");
328 return UNKNOWN_ERROR;
329 }
330
331 int callerPid = AIBinder_getCallingPid();
332 int callerUid = AIBinder_getCallingUid();
333 if (mPid != callerPid || mUid != callerUid) {
334 // Media processes don't need special permissions to act on behalf of other processes.
335 if (callerUid != AID_MEDIA) {
336 char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
337 if (!checkCallingPermission(String16(permission))) {
338 ALOGW("%s is required to override the caller's PID for media resource management.",
339 permission);
340 return PERMISSION_DENIED;
341 }
342 }
343 }
344
345 // Kill clients pending removal.
346 mService->reclaimResourcesFromClientsPendingRemoval(mPid);
347
348 // so our handler will process the death notifications
349 addCookie(this);
350
351 // after this, require mLock whenever using mService
352 AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
353 return OK;
354 }
355
356 //static
357 // these are no_destroy to keep them from being destroyed at process exit
358 // where some thread calls exit() while other threads are still running.
359 // see b/194783918
360 [[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
361 [[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
362
363 //static
addCookie(void * cookie)364 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
365 Mutex::Autolock _l(sLockCookies);
366 sCookies.insert(cookie);
367 }
368
369 //static
removeCookie(void * cookie)370 void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
371 Mutex::Autolock _l(sLockCookies);
372 sCookies.erase(cookie);
373 }
374
375 //static
BinderDiedCallback(void * cookie)376 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
377 Mutex::Autolock _l(sLockCookies);
378 if (sCookies.find(cookie) != sCookies.end()) {
379 auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
380 thiz->binderDied();
381 }
382 }
383
binderDied()384 void MediaCodec::ResourceManagerServiceProxy::binderDied() {
385 ALOGW("ResourceManagerService died.");
386 Mutex::Autolock _l(mLock);
387 mService = nullptr;
388 }
389
addResource(const MediaResourceParcel & resource)390 void MediaCodec::ResourceManagerServiceProxy::addResource(
391 const MediaResourceParcel &resource) {
392 std::vector<MediaResourceParcel> resources;
393 resources.push_back(resource);
394
395 Mutex::Autolock _l(mLock);
396 if (mService == nullptr) {
397 return;
398 }
399 mService->addResource(mPid, mUid, getId(mClient), mClient, resources);
400 }
401
removeResource(const MediaResourceParcel & resource)402 void MediaCodec::ResourceManagerServiceProxy::removeResource(
403 const MediaResourceParcel &resource) {
404 std::vector<MediaResourceParcel> resources;
405 resources.push_back(resource);
406
407 Mutex::Autolock _l(mLock);
408 if (mService == nullptr) {
409 return;
410 }
411 mService->removeResource(mPid, getId(mClient), resources);
412 }
413
removeClient()414 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
415 Mutex::Autolock _l(mLock);
416 if (mService == nullptr) {
417 return;
418 }
419 mService->removeClient(mPid, getId(mClient));
420 }
421
markClientForPendingRemoval()422 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
423 Mutex::Autolock _l(mLock);
424 if (mService == nullptr) {
425 return;
426 }
427 mService->markClientForPendingRemoval(mPid, getId(mClient));
428 }
429
reclaimResource(const std::vector<MediaResourceParcel> & resources)430 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
431 const std::vector<MediaResourceParcel> &resources) {
432 Mutex::Autolock _l(mLock);
433 if (mService == NULL) {
434 return false;
435 }
436 bool success;
437 Status status = mService->reclaimResource(mPid, resources, &success);
438 return status.isOk() && success;
439 }
440
441 ////////////////////////////////////////////////////////////////////////////////
442
BufferInfo()443 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
444
445 ////////////////////////////////////////////////////////////////////////////////
446
447 class MediaCodec::ReleaseSurface {
448 public:
ReleaseSurface(uint64_t usage)449 explicit ReleaseSurface(uint64_t usage) {
450 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
451 mSurface = new Surface(mProducer, false /* controlledByApp */);
452 struct ConsumerListener : public BnConsumerListener {
453 ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
454 mConsumer = consumer;
455 }
456 void onFrameAvailable(const BufferItem&) override {
457 BufferItem buffer;
458 // consume buffer
459 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
460 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
461 consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
462 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
463 }
464 }
465
466 wp<IGraphicBufferConsumer> mConsumer;
467 void onBuffersReleased() override {}
468 void onSidebandStreamChanged() override {}
469 };
470 sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
471 mConsumer->consumerConnect(listener, false);
472 mConsumer->setConsumerName(String8{"MediaCodec.release"});
473 mConsumer->setConsumerUsageBits(usage);
474 }
475
getSurface()476 const sp<Surface> &getSurface() {
477 return mSurface;
478 }
479
480 private:
481 sp<IGraphicBufferProducer> mProducer;
482 sp<IGraphicBufferConsumer> mConsumer;
483 sp<Surface> mSurface;
484 };
485
486 ////////////////////////////////////////////////////////////////////////////////
487
488 namespace {
489
490 enum {
491 kWhatFillThisBuffer = 'fill',
492 kWhatDrainThisBuffer = 'drai',
493 kWhatEOS = 'eos ',
494 kWhatStartCompleted = 'Scom',
495 kWhatStopCompleted = 'scom',
496 kWhatReleaseCompleted = 'rcom',
497 kWhatFlushCompleted = 'fcom',
498 kWhatError = 'erro',
499 kWhatComponentAllocated = 'cAll',
500 kWhatComponentConfigured = 'cCon',
501 kWhatInputSurfaceCreated = 'isfc',
502 kWhatInputSurfaceAccepted = 'isfa',
503 kWhatSignaledInputEOS = 'seos',
504 kWhatOutputFramesRendered = 'outR',
505 kWhatOutputBuffersChanged = 'outC',
506 kWhatFirstTunnelFrameReady = 'ftfR',
507 };
508
509 class BufferCallback : public CodecBase::BufferCallback {
510 public:
511 explicit BufferCallback(const sp<AMessage> ¬ify);
512 virtual ~BufferCallback() = default;
513
514 virtual void onInputBufferAvailable(
515 size_t index, const sp<MediaCodecBuffer> &buffer) override;
516 virtual void onOutputBufferAvailable(
517 size_t index, const sp<MediaCodecBuffer> &buffer) override;
518 private:
519 const sp<AMessage> mNotify;
520 };
521
BufferCallback(const sp<AMessage> & notify)522 BufferCallback::BufferCallback(const sp<AMessage> ¬ify)
523 : mNotify(notify) {}
524
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)525 void BufferCallback::onInputBufferAvailable(
526 size_t index, const sp<MediaCodecBuffer> &buffer) {
527 sp<AMessage> notify(mNotify->dup());
528 notify->setInt32("what", kWhatFillThisBuffer);
529 notify->setSize("index", index);
530 notify->setObject("buffer", buffer);
531 notify->post();
532 }
533
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)534 void BufferCallback::onOutputBufferAvailable(
535 size_t index, const sp<MediaCodecBuffer> &buffer) {
536 sp<AMessage> notify(mNotify->dup());
537 notify->setInt32("what", kWhatDrainThisBuffer);
538 notify->setSize("index", index);
539 notify->setObject("buffer", buffer);
540 notify->post();
541 }
542
543 class CodecCallback : public CodecBase::CodecCallback {
544 public:
545 explicit CodecCallback(const sp<AMessage> ¬ify);
546 virtual ~CodecCallback() = default;
547
548 virtual void onEos(status_t err) override;
549 virtual void onStartCompleted() override;
550 virtual void onStopCompleted() override;
551 virtual void onReleaseCompleted() override;
552 virtual void onFlushCompleted() override;
553 virtual void onError(status_t err, enum ActionCode actionCode) override;
554 virtual void onComponentAllocated(const char *componentName) override;
555 virtual void onComponentConfigured(
556 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
557 virtual void onInputSurfaceCreated(
558 const sp<AMessage> &inputFormat,
559 const sp<AMessage> &outputFormat,
560 const sp<BufferProducerWrapper> &inputSurface) override;
561 virtual void onInputSurfaceCreationFailed(status_t err) override;
562 virtual void onInputSurfaceAccepted(
563 const sp<AMessage> &inputFormat,
564 const sp<AMessage> &outputFormat) override;
565 virtual void onInputSurfaceDeclined(status_t err) override;
566 virtual void onSignaledInputEOS(status_t err) override;
567 virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
568 virtual void onOutputBuffersChanged() override;
569 virtual void onFirstTunnelFrameReady() override;
570 private:
571 const sp<AMessage> mNotify;
572 };
573
CodecCallback(const sp<AMessage> & notify)574 CodecCallback::CodecCallback(const sp<AMessage> ¬ify) : mNotify(notify) {}
575
onEos(status_t err)576 void CodecCallback::onEos(status_t err) {
577 sp<AMessage> notify(mNotify->dup());
578 notify->setInt32("what", kWhatEOS);
579 notify->setInt32("err", err);
580 notify->post();
581 }
582
onStartCompleted()583 void CodecCallback::onStartCompleted() {
584 sp<AMessage> notify(mNotify->dup());
585 notify->setInt32("what", kWhatStartCompleted);
586 notify->post();
587 }
588
onStopCompleted()589 void CodecCallback::onStopCompleted() {
590 sp<AMessage> notify(mNotify->dup());
591 notify->setInt32("what", kWhatStopCompleted);
592 notify->post();
593 }
594
onReleaseCompleted()595 void CodecCallback::onReleaseCompleted() {
596 sp<AMessage> notify(mNotify->dup());
597 notify->setInt32("what", kWhatReleaseCompleted);
598 notify->post();
599 }
600
onFlushCompleted()601 void CodecCallback::onFlushCompleted() {
602 sp<AMessage> notify(mNotify->dup());
603 notify->setInt32("what", kWhatFlushCompleted);
604 notify->post();
605 }
606
onError(status_t err,enum ActionCode actionCode)607 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
608 sp<AMessage> notify(mNotify->dup());
609 notify->setInt32("what", kWhatError);
610 notify->setInt32("err", err);
611 notify->setInt32("actionCode", actionCode);
612 notify->post();
613 }
614
onComponentAllocated(const char * componentName)615 void CodecCallback::onComponentAllocated(const char *componentName) {
616 sp<AMessage> notify(mNotify->dup());
617 notify->setInt32("what", kWhatComponentAllocated);
618 notify->setString("componentName", componentName);
619 notify->post();
620 }
621
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)622 void CodecCallback::onComponentConfigured(
623 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
624 sp<AMessage> notify(mNotify->dup());
625 notify->setInt32("what", kWhatComponentConfigured);
626 notify->setMessage("input-format", inputFormat);
627 notify->setMessage("output-format", outputFormat);
628 notify->post();
629 }
630
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)631 void CodecCallback::onInputSurfaceCreated(
632 const sp<AMessage> &inputFormat,
633 const sp<AMessage> &outputFormat,
634 const sp<BufferProducerWrapper> &inputSurface) {
635 sp<AMessage> notify(mNotify->dup());
636 notify->setInt32("what", kWhatInputSurfaceCreated);
637 notify->setMessage("input-format", inputFormat);
638 notify->setMessage("output-format", outputFormat);
639 notify->setObject("input-surface", inputSurface);
640 notify->post();
641 }
642
onInputSurfaceCreationFailed(status_t err)643 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
644 sp<AMessage> notify(mNotify->dup());
645 notify->setInt32("what", kWhatInputSurfaceCreated);
646 notify->setInt32("err", err);
647 notify->post();
648 }
649
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)650 void CodecCallback::onInputSurfaceAccepted(
651 const sp<AMessage> &inputFormat,
652 const sp<AMessage> &outputFormat) {
653 sp<AMessage> notify(mNotify->dup());
654 notify->setInt32("what", kWhatInputSurfaceAccepted);
655 notify->setMessage("input-format", inputFormat);
656 notify->setMessage("output-format", outputFormat);
657 notify->post();
658 }
659
onInputSurfaceDeclined(status_t err)660 void CodecCallback::onInputSurfaceDeclined(status_t err) {
661 sp<AMessage> notify(mNotify->dup());
662 notify->setInt32("what", kWhatInputSurfaceAccepted);
663 notify->setInt32("err", err);
664 notify->post();
665 }
666
onSignaledInputEOS(status_t err)667 void CodecCallback::onSignaledInputEOS(status_t err) {
668 sp<AMessage> notify(mNotify->dup());
669 notify->setInt32("what", kWhatSignaledInputEOS);
670 if (err != OK) {
671 notify->setInt32("err", err);
672 }
673 notify->post();
674 }
675
onOutputFramesRendered(const std::list<FrameRenderTracker::Info> & done)676 void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
677 sp<AMessage> notify(mNotify->dup());
678 notify->setInt32("what", kWhatOutputFramesRendered);
679 if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
680 notify->post();
681 }
682 }
683
onOutputBuffersChanged()684 void CodecCallback::onOutputBuffersChanged() {
685 sp<AMessage> notify(mNotify->dup());
686 notify->setInt32("what", kWhatOutputBuffersChanged);
687 notify->post();
688 }
689
onFirstTunnelFrameReady()690 void CodecCallback::onFirstTunnelFrameReady() {
691 sp<AMessage> notify(mNotify->dup());
692 notify->setInt32("what", kWhatFirstTunnelFrameReady);
693 notify->post();
694 }
695
toMediaResourceSubType(MediaCodec::Domain domain)696 static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
697 switch (domain) {
698 case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
699 case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
700 case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
701 default: return MediaResourceSubType::kUnspecifiedSubType;
702 }
703 }
704
toCodecMode(MediaCodec::Domain domain)705 static const char * toCodecMode(MediaCodec::Domain domain) {
706 switch (domain) {
707 case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
708 case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
709 case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
710 default: return kCodecModeUnknown;
711 }
712 }
713
714 } // namespace
715
716 ////////////////////////////////////////////////////////////////////////////////
717
718 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)719 sp<MediaCodec> MediaCodec::CreateByType(
720 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
721 uid_t uid) {
722 sp<AMessage> format;
723 return CreateByType(looper, mime, encoder, err, pid, uid, format);
724 }
725
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)726 sp<MediaCodec> MediaCodec::CreateByType(
727 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
728 uid_t uid, sp<AMessage> format) {
729 Vector<AString> matchingCodecs;
730
731 MediaCodecList::findMatchingCodecs(
732 mime.c_str(),
733 encoder,
734 0,
735 format,
736 &matchingCodecs);
737
738 if (err != NULL) {
739 *err = NAME_NOT_FOUND;
740 }
741 for (size_t i = 0; i < matchingCodecs.size(); ++i) {
742 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
743 AString componentName = matchingCodecs[i];
744 status_t ret = codec->init(componentName);
745 if (err != NULL) {
746 *err = ret;
747 }
748 if (ret == OK) {
749 return codec;
750 }
751 ALOGD("Allocating component '%s' failed (%d), try next one.",
752 componentName.c_str(), ret);
753 }
754 return NULL;
755 }
756
757 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)758 sp<MediaCodec> MediaCodec::CreateByComponentName(
759 const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
760 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
761
762 const status_t ret = codec->init(name);
763 if (err != NULL) {
764 *err = ret;
765 }
766 return ret == OK ? codec : NULL; // NULL deallocates codec.
767 }
768
769 // static
CreatePersistentInputSurface()770 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
771 sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
772 if (pluginSurface != nullptr) {
773 return pluginSurface;
774 }
775
776 OMXClient client;
777 if (client.connect() != OK) {
778 ALOGE("Failed to connect to OMX to create persistent input surface.");
779 return NULL;
780 }
781
782 sp<IOMX> omx = client.interface();
783
784 sp<IGraphicBufferProducer> bufferProducer;
785 sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
786
787 status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
788
789 if (err != OK) {
790 ALOGE("Failed to create persistent input surface.");
791 return NULL;
792 }
793
794 return new PersistentSurface(bufferProducer, bufferSource);
795 }
796
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)797 MediaCodec::MediaCodec(
798 const sp<ALooper> &looper, pid_t pid, uid_t uid,
799 std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
800 std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
801 : mState(UNINITIALIZED),
802 mReleasedByResourceManager(false),
803 mLooper(looper),
804 mCodec(NULL),
805 mReplyID(0),
806 mFlags(0),
807 mStickyError(OK),
808 mSoftRenderer(NULL),
809 mDomain(DOMAIN_UNKNOWN),
810 mWidth(0),
811 mHeight(0),
812 mRotationDegrees(0),
813 mHdrInfoFlags(0),
814 mDequeueInputTimeoutGeneration(0),
815 mDequeueInputReplyID(0),
816 mDequeueOutputTimeoutGeneration(0),
817 mDequeueOutputReplyID(0),
818 mTunneledInputWidth(0),
819 mTunneledInputHeight(0),
820 mTunneled(false),
821 mTunnelPeekState(TunnelPeekState::kLegacyMode),
822 mHaveInputSurface(false),
823 mHavePendingInputBuffers(false),
824 mCpuBoostRequested(false),
825 mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
826 mIsSurfaceToScreen(false),
827 mLatencyUnknown(0),
828 mBytesEncoded(0),
829 mEarliestEncodedPtsUs(INT64_MAX),
830 mLatestEncodedPtsUs(INT64_MIN),
831 mFramesEncoded(0),
832 mNumLowLatencyEnables(0),
833 mNumLowLatencyDisables(0),
834 mIsLowLatencyModeOn(false),
835 mIndexOfFirstFrameWhenLowLatencyOn(-1),
836 mInputBufferCounter(0),
837 mGetCodecBase(getCodecBase),
838 mGetCodecInfo(getCodecInfo) {
839 mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
840 ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid));
841 if (!mGetCodecBase) {
842 mGetCodecBase = [](const AString &name, const char *owner) {
843 return GetCodecBase(name, owner);
844 };
845 }
846 if (!mGetCodecInfo) {
847 mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
848 *info = nullptr;
849 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
850 if (!mcl) {
851 return NO_INIT; // if called from Java should raise IOException
852 }
853 AString tmp = name;
854 if (tmp.endsWith(".secure")) {
855 tmp.erase(tmp.size() - 7, 7);
856 }
857 for (const AString &codecName : { name, tmp }) {
858 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
859 if (codecIdx < 0) {
860 continue;
861 }
862 *info = mcl->getCodecInfo(codecIdx);
863 return OK;
864 }
865 return NAME_NOT_FOUND;
866 };
867 }
868
869 // we want an empty metrics record for any early getMetrics() call
870 // this should be the *only* initMediametrics() call that's not on the Looper thread
871 initMediametrics();
872 }
873
~MediaCodec()874 MediaCodec::~MediaCodec() {
875 CHECK_EQ(mState, UNINITIALIZED);
876 mResourceManagerProxy->removeClient();
877
878 flushMediametrics();
879
880 // clean any saved metrics info we stored as part of configure()
881 if (mConfigureMsg != nullptr) {
882 mediametrics_handle_t metricsHandle;
883 if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
884 mediametrics_delete(metricsHandle);
885 }
886 }
887 }
888
889 // except for in constructor, called from the looper thread (and therefore mutexed)
initMediametrics()890 void MediaCodec::initMediametrics() {
891 if (mMetricsHandle == 0) {
892 mMetricsHandle = mediametrics_create(kCodecKeyName);
893 }
894
895 mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
896
897 {
898 Mutex::Autolock al(mRecentLock);
899 for (int i = 0; i<kRecentLatencyFrames; i++) {
900 mRecentSamples[i] = kRecentSampleInvalid;
901 }
902 mRecentHead = 0;
903 }
904
905 {
906 Mutex::Autolock al(mLatencyLock);
907 mBuffersInFlight.clear();
908 mNumLowLatencyEnables = 0;
909 mNumLowLatencyDisables = 0;
910 mIsLowLatencyModeOn = false;
911 mIndexOfFirstFrameWhenLowLatencyOn = -1;
912 mInputBufferCounter = 0;
913 }
914
915 mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
916 }
917
updateMediametrics()918 void MediaCodec::updateMediametrics() {
919 if (mMetricsHandle == 0) {
920 return;
921 }
922
923 Mutex::Autolock _lock(mMetricsLock);
924
925 if (mLatencyHist.getCount() != 0 ) {
926 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
927 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
928 mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
929 mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
930
931 if (kEmitHistogram) {
932 // and the histogram itself
933 std::string hist = mLatencyHist.emit();
934 mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
935 }
936 }
937 if (mLatencyUnknown > 0) {
938 mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
939 }
940 int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
941 if (playbackDurationSec > 0) {
942 mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
943 }
944 if (mLifetimeStartNs > 0) {
945 nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
946 lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
947 mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
948 }
949
950 if (mBytesEncoded) {
951 Mutex::Autolock al(mOutputStatsLock);
952
953 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
954 int64_t duration = 0;
955 if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
956 duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
957 }
958 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
959 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
960 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
961 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
962 }
963
964 {
965 Mutex::Autolock al(mLatencyLock);
966 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
967 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
968 mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
969 mIndexOfFirstFrameWhenLowLatencyOn);
970 }
971
972 #if 0
973 // enable for short term, only while debugging
974 updateEphemeralMediametrics(mMetricsHandle);
975 #endif
976 }
977
updateHdrMetrics(bool isConfig)978 void MediaCodec::updateHdrMetrics(bool isConfig) {
979 if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
980 return;
981 }
982
983 int32_t colorStandard = -1;
984 if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
985 mediametrics_setInt32(mMetricsHandle,
986 isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
987 }
988 int32_t colorRange = -1;
989 if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
990 mediametrics_setInt32(mMetricsHandle,
991 isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
992 }
993 int32_t colorTransfer = -1;
994 if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
995 mediametrics_setInt32(mMetricsHandle,
996 isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
997 }
998 HDRStaticInfo info;
999 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
1000 && ColorUtils::isHDRStaticInfoValid(&info)) {
1001 mHdrInfoFlags |= kFlagHasHdrStaticInfo;
1002 }
1003 mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo,
1004 (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
1005 sp<ABuffer> hdr10PlusInfo;
1006 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
1007 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
1008 mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
1009 }
1010 mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo,
1011 (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
1012
1013 // hdr format
1014 sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
1015
1016 AString mime;
1017 int32_t profile = -1;
1018
1019 if (codedFormat->findString("mime", &mime)
1020 && codedFormat->findInt32(KEY_PROFILE, &profile)
1021 && colorTransfer != -1) {
1022 hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
1023 mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
1024 }
1025 }
1026
getHdrFormat(const AString & mime,const int32_t profile,const int32_t colorTransfer)1027 hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
1028 const int32_t colorTransfer) {
1029 return (mFlags & kFlagIsEncoder)
1030 ? getHdrFormatForEncoder(mime, profile, colorTransfer)
1031 : getHdrFormatForDecoder(mime, profile, colorTransfer);
1032 }
1033
getHdrFormatForEncoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1034 hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
1035 const int32_t colorTransfer) {
1036 switch (colorTransfer) {
1037 case COLOR_TRANSFER_ST2084:
1038 if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1039 switch (profile) {
1040 case VP9Profile2HDR:
1041 return HDR_FORMAT_HDR10;
1042 case VP9Profile2HDR10Plus:
1043 return HDR_FORMAT_HDR10PLUS;
1044 default:
1045 return HDR_FORMAT_NONE;
1046 }
1047 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1048 switch (profile) {
1049 case AV1ProfileMain10HDR10:
1050 return HDR_FORMAT_HDR10;
1051 case AV1ProfileMain10HDR10Plus:
1052 return HDR_FORMAT_HDR10PLUS;
1053 default:
1054 return HDR_FORMAT_NONE;
1055 }
1056 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1057 switch (profile) {
1058 case HEVCProfileMain10HDR10:
1059 return HDR_FORMAT_HDR10;
1060 case HEVCProfileMain10HDR10Plus:
1061 return HDR_FORMAT_HDR10PLUS;
1062 default:
1063 return HDR_FORMAT_NONE;
1064 }
1065 } else {
1066 return HDR_FORMAT_NONE;
1067 }
1068 case COLOR_TRANSFER_HLG:
1069 if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1070 return HDR_FORMAT_HLG;
1071 } else {
1072 // TODO: DOLBY format
1073 return HDR_FORMAT_NONE;
1074 }
1075 default:
1076 return HDR_FORMAT_NONE;
1077 }
1078 }
1079
getHdrFormatForDecoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1080 hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
1081 const int32_t colorTransfer) {
1082 switch (colorTransfer) {
1083 case COLOR_TRANSFER_ST2084:
1084 if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
1085 return HDR_FORMAT_NONE;
1086 }
1087 return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
1088 case COLOR_TRANSFER_HLG:
1089 if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1090 return HDR_FORMAT_HLG;
1091 }
1092 // TODO: DOLBY format
1093 }
1094 return HDR_FORMAT_NONE;
1095 }
1096
profileSupport10Bits(const AString & mime,const int32_t profile)1097 bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
1098 if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1099 return true;
1100 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1101 switch (profile) {
1102 case VP9Profile2:
1103 case VP9Profile3:
1104 case VP9Profile2HDR:
1105 case VP9Profile3HDR:
1106 case VP9Profile2HDR10Plus:
1107 case VP9Profile3HDR10Plus:
1108 return true;
1109 }
1110 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1111 switch (profile) {
1112 case HEVCProfileMain10:
1113 case HEVCProfileMain10HDR10:
1114 case HEVCProfileMain10HDR10Plus:
1115 return true;
1116 }
1117 }
1118 return false;
1119 }
1120
1121
1122 // called to update info being passed back via getMetrics(), which is a
1123 // unique copy for that call, no concurrent access worries.
updateEphemeralMediametrics(mediametrics_handle_t item)1124 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
1125 ALOGD("MediaCodec::updateEphemeralMediametrics()");
1126
1127 if (item == 0) {
1128 return;
1129 }
1130
1131 Histogram recentHist;
1132
1133 // build an empty histogram
1134 recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1135
1136 // stuff it with the samples in the ring buffer
1137 {
1138 Mutex::Autolock al(mRecentLock);
1139
1140 for (int i=0; i<kRecentLatencyFrames; i++) {
1141 if (mRecentSamples[i] != kRecentSampleInvalid) {
1142 recentHist.insert(mRecentSamples[i]);
1143 }
1144 }
1145 }
1146
1147 // spit the data (if any) into the supplied analytics record
1148 if (recentHist.getCount()!= 0 ) {
1149 mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
1150 mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
1151 mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
1152 mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
1153
1154 if (kEmitHistogram) {
1155 // and the histogram itself
1156 std::string hist = recentHist.emit();
1157 mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
1158 }
1159 }
1160 }
1161
flushMediametrics()1162 void MediaCodec::flushMediametrics() {
1163 ALOGD("flushMediametrics");
1164
1165 // update does its own mutex locking
1166 updateMediametrics();
1167
1168 // ensure mutex while we do our own work
1169 Mutex::Autolock _lock(mMetricsLock);
1170 mHdrInfoFlags = 0;
1171 if (mMetricsHandle != 0) {
1172 if (mediametrics_count(mMetricsHandle) > 0) {
1173 mediametrics_selfRecord(mMetricsHandle);
1174 }
1175 mediametrics_delete(mMetricsHandle);
1176 mMetricsHandle = 0;
1177 }
1178 }
1179
updateLowLatency(const sp<AMessage> & msg)1180 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
1181 int32_t lowLatency = 0;
1182 if (msg->findInt32("low-latency", &lowLatency)) {
1183 Mutex::Autolock al(mLatencyLock);
1184 if (lowLatency > 0) {
1185 ++mNumLowLatencyEnables;
1186 // This is just an estimate since low latency mode change happens ONLY at key frame
1187 mIsLowLatencyModeOn = true;
1188 } else if (lowLatency == 0) {
1189 ++mNumLowLatencyDisables;
1190 // This is just an estimate since low latency mode change happens ONLY at key frame
1191 mIsLowLatencyModeOn = false;
1192 }
1193 }
1194 }
1195
asString(TunnelPeekState state,const char * default_string)1196 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
1197 switch(state) {
1198 case TunnelPeekState::kLegacyMode:
1199 return "LegacyMode";
1200 case TunnelPeekState::kEnabledNoBuffer:
1201 return "EnabledNoBuffer";
1202 case TunnelPeekState::kDisabledNoBuffer:
1203 return "DisabledNoBuffer";
1204 case TunnelPeekState::kBufferDecoded:
1205 return "BufferDecoded";
1206 case TunnelPeekState::kBufferRendered:
1207 return "BufferRendered";
1208 case TunnelPeekState::kDisabledQueued:
1209 return "DisabledQueued";
1210 case TunnelPeekState::kEnabledQueued:
1211 return "EnabledQueued";
1212 default:
1213 return default_string;
1214 }
1215 }
1216
updateTunnelPeek(const sp<AMessage> & msg)1217 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
1218 int32_t tunnelPeek = 0;
1219 if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
1220 return;
1221 }
1222
1223 TunnelPeekState previousState = mTunnelPeekState;
1224 if(tunnelPeek == 0){
1225 switch (mTunnelPeekState) {
1226 case TunnelPeekState::kLegacyMode:
1227 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1228 [[fallthrough]];
1229 case TunnelPeekState::kEnabledNoBuffer:
1230 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
1231 break;
1232 case TunnelPeekState::kEnabledQueued:
1233 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
1234 break;
1235 default:
1236 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1237 return;
1238 }
1239 } else {
1240 switch (mTunnelPeekState) {
1241 case TunnelPeekState::kLegacyMode:
1242 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1243 [[fallthrough]];
1244 case TunnelPeekState::kDisabledNoBuffer:
1245 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
1246 break;
1247 case TunnelPeekState::kDisabledQueued:
1248 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
1249 break;
1250 case TunnelPeekState::kBufferDecoded:
1251 msg->setInt32("android._trigger-tunnel-peek", 1);
1252 mTunnelPeekState = TunnelPeekState::kBufferRendered;
1253 break;
1254 default:
1255 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1256 return;
1257 }
1258 }
1259
1260 ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
1261 }
1262
updatePlaybackDuration(const sp<AMessage> & msg)1263 void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
1264 int what = 0;
1265 msg->findInt32("what", &what);
1266 if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
1267 static bool logged = false;
1268 if (!logged) {
1269 logged = true;
1270 ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
1271 }
1272 return;
1273 }
1274 // Playback duration only counts if the buffers are going to the screen.
1275 if (!mIsSurfaceToScreen) {
1276 return;
1277 }
1278 int64_t renderTimeNs;
1279 size_t index = 0;
1280 while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
1281 mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
1282 }
1283 }
1284
setup(int nbuckets,int64_t width,int64_t floor)1285 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
1286 {
1287 if (nbuckets <= 0 || width <= 0) {
1288 return false;
1289 }
1290
1291 // get histogram buckets
1292 if (nbuckets == mBucketCount && mBuckets != NULL) {
1293 // reuse our existing buffer
1294 memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
1295 } else {
1296 // get a new pre-zeroed buffer
1297 int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
1298 if (newbuckets == NULL) {
1299 goto bad;
1300 }
1301 if (mBuckets != NULL)
1302 free(mBuckets);
1303 mBuckets = newbuckets;
1304 }
1305
1306 mWidth = width;
1307 mFloor = floor;
1308 mCeiling = floor + nbuckets * width;
1309 mBucketCount = nbuckets;
1310
1311 mMin = INT64_MAX;
1312 mMax = INT64_MIN;
1313 mSum = 0;
1314 mCount = 0;
1315 mBelow = mAbove = 0;
1316
1317 return true;
1318
1319 bad:
1320 if (mBuckets != NULL) {
1321 free(mBuckets);
1322 mBuckets = NULL;
1323 }
1324
1325 return false;
1326 }
1327
insert(int64_t sample)1328 void MediaCodec::Histogram::insert(int64_t sample)
1329 {
1330 // histogram is not set up
1331 if (mBuckets == NULL) {
1332 return;
1333 }
1334
1335 mCount++;
1336 mSum += sample;
1337 if (mMin > sample) mMin = sample;
1338 if (mMax < sample) mMax = sample;
1339
1340 if (sample < mFloor) {
1341 mBelow++;
1342 } else if (sample >= mCeiling) {
1343 mAbove++;
1344 } else {
1345 int64_t slot = (sample - mFloor) / mWidth;
1346 CHECK(slot < mBucketCount);
1347 mBuckets[slot]++;
1348 }
1349 return;
1350 }
1351
emit()1352 std::string MediaCodec::Histogram::emit()
1353 {
1354 std::string value;
1355 char buffer[64];
1356
1357 // emits: width,Below{bucket0,bucket1,...., bucketN}above
1358 // unconfigured will emit: 0,0{}0
1359 // XXX: is this best representation?
1360 snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
1361 mFloor, mWidth, mBelow);
1362 value = buffer;
1363 for (int i = 0; i < mBucketCount; i++) {
1364 if (i != 0) {
1365 value = value + ",";
1366 }
1367 snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
1368 value = value + buffer;
1369 }
1370 snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
1371 value = value + buffer;
1372 return value;
1373 }
1374
1375 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1376 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1377
1378 // only enqueue if we have a legitimate time
1379 if (presentationUs <= 0) {
1380 ALOGV("presentation time: %" PRId64, presentationUs);
1381 return;
1382 }
1383
1384 if (mBatteryChecker != nullptr) {
1385 mBatteryChecker->onCodecActivity([this] () {
1386 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1387 });
1388 }
1389
1390 if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
1391 mBytesInput += buffer->size();
1392 mFramesInput++;
1393 }
1394
1395 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1396 BufferFlightTiming_t startdata = { presentationUs, nowNs };
1397
1398 {
1399 // mutex access to mBuffersInFlight and other stats
1400 Mutex::Autolock al(mLatencyLock);
1401
1402
1403 // XXX: we *could* make sure that the time is later than the end of queue
1404 // as part of a consistency check...
1405 mBuffersInFlight.push_back(startdata);
1406
1407 if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
1408 mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
1409 }
1410 ++mInputBufferCounter;
1411 }
1412 }
1413
1414 // when we get a buffer back from the codec
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1415 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1416
1417 CHECK_NE(mState, UNINITIALIZED);
1418
1419 if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
1420 int32_t flags = 0;
1421 (void) buffer->meta()->findInt32("flags", &flags);
1422
1423 // some of these frames, we don't want to count
1424 // standalone EOS.... has an invalid timestamp
1425 if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
1426 mBytesEncoded += buffer->size();
1427 mFramesEncoded++;
1428
1429 Mutex::Autolock al(mOutputStatsLock);
1430 int64_t timeUs = 0;
1431 if (buffer->meta()->findInt64("timeUs", &timeUs)) {
1432 if (timeUs > mLatestEncodedPtsUs) {
1433 mLatestEncodedPtsUs = timeUs;
1434 }
1435 // can't chain as an else-if or this never triggers
1436 if (timeUs < mEarliestEncodedPtsUs) {
1437 mEarliestEncodedPtsUs = timeUs;
1438 }
1439 }
1440 }
1441 }
1442
1443 // mutex access to mBuffersInFlight and other stats
1444 Mutex::Autolock al(mLatencyLock);
1445
1446 // how long this buffer took for the round trip through the codec
1447 // NB: pipelining can/will make these times larger. e.g., if each packet
1448 // is always 2 msec and we have 3 in flight at any given time, we're going to
1449 // see "6 msec" as an answer.
1450
1451 // ignore stuff with no presentation time
1452 if (presentationUs <= 0) {
1453 ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
1454 mLatencyUnknown++;
1455 return;
1456 }
1457
1458 if (mBatteryChecker != nullptr) {
1459 mBatteryChecker->onCodecActivity([this] () {
1460 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1461 });
1462 }
1463
1464 BufferFlightTiming_t startdata;
1465 bool valid = false;
1466 while (mBuffersInFlight.size() > 0) {
1467 startdata = *mBuffersInFlight.begin();
1468 ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
1469 startdata.presentationUs, startdata.startedNs);
1470 if (startdata.presentationUs == presentationUs) {
1471 // a match
1472 ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
1473 startdata.presentationUs, presentationUs);
1474 mBuffersInFlight.pop_front();
1475 valid = true;
1476 break;
1477 } else if (startdata.presentationUs < presentationUs) {
1478 // we must have missed the match for this, drop it and keep looking
1479 ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
1480 startdata.presentationUs, presentationUs);
1481 mBuffersInFlight.pop_front();
1482 continue;
1483 } else {
1484 // head is after, so we don't have a frame for ourselves
1485 ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
1486 " we have nothing to pair with",
1487 startdata.presentationUs, presentationUs);
1488 mLatencyUnknown++;
1489 return;
1490 }
1491 }
1492 if (!valid) {
1493 ALOGV("-- empty queue, so ignore that.");
1494 mLatencyUnknown++;
1495 return;
1496 }
1497
1498 // now start our calculations
1499 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1500 int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
1501
1502 mLatencyHist.insert(latencyUs);
1503
1504 // push into the recent samples
1505 {
1506 Mutex::Autolock al(mRecentLock);
1507
1508 if (mRecentHead >= kRecentLatencyFrames) {
1509 mRecentHead = 0;
1510 }
1511 mRecentSamples[mRecentHead++] = latencyUs;
1512 }
1513 }
1514
1515 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)1516 status_t MediaCodec::PostAndAwaitResponse(
1517 const sp<AMessage> &msg, sp<AMessage> *response) {
1518 status_t err = msg->postAndAwaitResponse(response);
1519
1520 if (err != OK) {
1521 return err;
1522 }
1523
1524 if (!(*response)->findInt32("err", &err)) {
1525 err = OK;
1526 }
1527
1528 return err;
1529 }
1530
PostReplyWithError(const sp<AMessage> & msg,int32_t err)1531 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
1532 sp<AReplyToken> replyID;
1533 CHECK(msg->senderAwaitsResponse(&replyID));
1534 PostReplyWithError(replyID, err);
1535 }
1536
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)1537 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
1538 int32_t finalErr = err;
1539 if (mReleasedByResourceManager) {
1540 // override the err code if MediaCodec has been released by ResourceManager.
1541 finalErr = DEAD_OBJECT;
1542 }
1543
1544 sp<AMessage> response = new AMessage;
1545 response->setInt32("err", finalErr);
1546 response->postReply(replyID);
1547 }
1548
CreateCCodec()1549 static CodecBase *CreateCCodec() {
1550 return new CCodec;
1551 }
1552
1553 //static
GetCodecBase(const AString & name,const char * owner)1554 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
1555 if (owner) {
1556 if (strcmp(owner, "default") == 0) {
1557 return new ACodec;
1558 } else if (strncmp(owner, "codec2", 6) == 0) {
1559 return CreateCCodec();
1560 }
1561 }
1562
1563 if (name.startsWithIgnoreCase("c2.")) {
1564 return CreateCCodec();
1565 } else if (name.startsWithIgnoreCase("omx.")) {
1566 // at this time only ACodec specifies a mime type.
1567 return new ACodec;
1568 } else if (name.startsWithIgnoreCase("android.filter.")) {
1569 return new MediaFilter;
1570 } else {
1571 return NULL;
1572 }
1573 }
1574
1575 struct CodecListCache {
CodecListCacheandroid::CodecListCache1576 CodecListCache()
1577 : mCodecInfoMap{[] {
1578 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1579 size_t count = mcl->countCodecs();
1580 std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
1581 for (size_t i = 0; i < count; ++i) {
1582 sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
1583 codecInfoMap.emplace(info->getCodecName(), info);
1584 }
1585 return codecInfoMap;
1586 }()} {
1587 }
1588
1589 const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
1590 };
1591
GetCodecListCache()1592 static const CodecListCache &GetCodecListCache() {
1593 static CodecListCache sCache{};
1594 return sCache;
1595 }
1596
init(const AString & name)1597 status_t MediaCodec::init(const AString &name) {
1598 status_t err = mResourceManagerProxy->init();
1599 if (err != OK) {
1600 mCodec = NULL; // remove the codec
1601 return err;
1602 }
1603
1604 // save init parameters for reset
1605 mInitName = name;
1606
1607 // Current video decoders do not return from OMX_FillThisBuffer
1608 // quickly, violating the OpenMAX specs, until that is remedied
1609 // we need to invest in an extra looper to free the main event
1610 // queue.
1611
1612 mCodecInfo.clear();
1613
1614 bool secureCodec = false;
1615 const char *owner = "";
1616 if (!name.startsWith("android.filter.")) {
1617 err = mGetCodecInfo(name, &mCodecInfo);
1618 if (err != OK) {
1619 mCodec = NULL; // remove the codec.
1620 return err;
1621 }
1622 if (mCodecInfo == nullptr) {
1623 ALOGE("Getting codec info with name '%s' failed", name.c_str());
1624 return NAME_NOT_FOUND;
1625 }
1626 secureCodec = name.endsWith(".secure");
1627 Vector<AString> mediaTypes;
1628 mCodecInfo->getSupportedMediaTypes(&mediaTypes);
1629 for (size_t i = 0; i < mediaTypes.size(); ++i) {
1630 if (mediaTypes[i].startsWith("video/")) {
1631 mDomain = DOMAIN_VIDEO;
1632 break;
1633 } else if (mediaTypes[i].startsWith("audio/")) {
1634 mDomain = DOMAIN_AUDIO;
1635 break;
1636 } else if (mediaTypes[i].startsWith("image/")) {
1637 mDomain = DOMAIN_IMAGE;
1638 break;
1639 }
1640 }
1641 owner = mCodecInfo->getOwnerName();
1642 }
1643
1644 mCodec = mGetCodecBase(name, owner);
1645 if (mCodec == NULL) {
1646 ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
1647 return NAME_NOT_FOUND;
1648 }
1649
1650 if (mDomain == DOMAIN_VIDEO) {
1651 // video codec needs dedicated looper
1652 if (mCodecLooper == NULL) {
1653 status_t err = OK;
1654 mCodecLooper = new ALooper;
1655 mCodecLooper->setName("CodecLooper");
1656 err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
1657 if (OK != err) {
1658 ALOGE("Codec Looper failed to start");
1659 return err;
1660 }
1661 }
1662
1663 mCodecLooper->registerHandler(mCodec);
1664 } else {
1665 mLooper->registerHandler(mCodec);
1666 }
1667
1668 mLooper->registerHandler(this);
1669
1670 mCodec->setCallback(
1671 std::unique_ptr<CodecBase::CodecCallback>(
1672 new CodecCallback(new AMessage(kWhatCodecNotify, this))));
1673 mBufferChannel = mCodec->getBufferChannel();
1674 mBufferChannel->setCallback(
1675 std::unique_ptr<CodecBase::BufferCallback>(
1676 new BufferCallback(new AMessage(kWhatCodecNotify, this))));
1677
1678 sp<AMessage> msg = new AMessage(kWhatInit, this);
1679 if (mCodecInfo) {
1680 msg->setObject("codecInfo", mCodecInfo);
1681 // name may be different from mCodecInfo->getCodecName() if we stripped
1682 // ".secure"
1683 }
1684 msg->setString("name", name);
1685
1686 // initial naming setup covers the period before the first call to ::configure().
1687 // after that, we manage this through ::configure() and the setup message.
1688 if (mMetricsHandle != 0) {
1689 mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
1690 mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
1691 }
1692
1693 if (mDomain == DOMAIN_VIDEO) {
1694 mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
1695 }
1696
1697 std::vector<MediaResourceParcel> resources;
1698 resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
1699 for (int i = 0; i <= kMaxRetry; ++i) {
1700 if (i > 0) {
1701 // Don't try to reclaim resource for the first time.
1702 if (!mResourceManagerProxy->reclaimResource(resources)) {
1703 break;
1704 }
1705 }
1706
1707 sp<AMessage> response;
1708 err = PostAndAwaitResponse(msg, &response);
1709 if (!isResourceError(err)) {
1710 break;
1711 }
1712 }
1713 return err;
1714 }
1715
setCallback(const sp<AMessage> & callback)1716 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
1717 sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
1718 msg->setMessage("callback", callback);
1719
1720 sp<AMessage> response;
1721 return PostAndAwaitResponse(msg, &response);
1722 }
1723
setOnFrameRenderedNotification(const sp<AMessage> & notify)1724 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) {
1725 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
1726 msg->setMessage("on-frame-rendered", notify);
1727 return msg->post();
1728 }
1729
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)1730 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> ¬ify) {
1731 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
1732 msg->setMessage("first-tunnel-frame-ready", notify);
1733 return msg->post();
1734 }
1735
1736 /*
1737 * MediaFormat Shaping forward declarations
1738 * including the property name we use for control.
1739 */
1740 static int enableMediaFormatShapingDefault = 1;
1741 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
1742 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
1743 bool reverse);
1744
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)1745 status_t MediaCodec::configure(
1746 const sp<AMessage> &format,
1747 const sp<Surface> &nativeWindow,
1748 const sp<ICrypto> &crypto,
1749 uint32_t flags) {
1750 return configure(format, nativeWindow, crypto, NULL, flags);
1751 }
1752
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)1753 status_t MediaCodec::configure(
1754 const sp<AMessage> &format,
1755 const sp<Surface> &surface,
1756 const sp<ICrypto> &crypto,
1757 const sp<IDescrambler> &descrambler,
1758 uint32_t flags) {
1759 sp<AMessage> msg = new AMessage(kWhatConfigure, this);
1760 mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
1761
1762 // TODO: validity check log-session-id: it should be a 32-hex-digit.
1763 format->findString("log-session-id", &mLogSessionId);
1764
1765 if (nextMetricsHandle != 0) {
1766 int32_t profile = 0;
1767 if (format->findInt32("profile", &profile)) {
1768 mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
1769 }
1770 int32_t level = 0;
1771 if (format->findInt32("level", &level)) {
1772 mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
1773 }
1774 mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
1775 (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
1776
1777 mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
1778
1779 // moved here from ::init()
1780 mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
1781 mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
1782 }
1783
1784 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
1785 format->findInt32("width", &mWidth);
1786 format->findInt32("height", &mHeight);
1787 if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
1788 mRotationDegrees = 0;
1789 }
1790
1791 if (nextMetricsHandle != 0) {
1792 mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
1793 mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
1794 mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
1795 int32_t maxWidth = 0;
1796 if (format->findInt32("max-width", &maxWidth)) {
1797 mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
1798 }
1799 int32_t maxHeight = 0;
1800 if (format->findInt32("max-height", &maxHeight)) {
1801 mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
1802 }
1803 int32_t colorFormat = -1;
1804 if (format->findInt32("color-format", &colorFormat)) {
1805 mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
1806 }
1807 if (mDomain == DOMAIN_VIDEO) {
1808 float frameRate = -1.0;
1809 if (format->findFloat("frame-rate", &frameRate)) {
1810 mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
1811 }
1812 float captureRate = -1.0;
1813 if (format->findFloat("capture-rate", &captureRate)) {
1814 mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
1815 }
1816 float operatingRate = -1.0;
1817 if (format->findFloat("operating-rate", &operatingRate)) {
1818 mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
1819 }
1820 int32_t priority = -1;
1821 if (format->findInt32("priority", &priority)) {
1822 mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
1823 }
1824 }
1825 }
1826
1827 // Prevent possible integer overflow in downstream code.
1828 if (mWidth < 0 || mHeight < 0 ||
1829 (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
1830 ALOGE("Invalid size(s), width=%d, height=%d", mWidth, mHeight);
1831 return BAD_VALUE;
1832 }
1833
1834 } else {
1835 if (nextMetricsHandle != 0) {
1836 int32_t channelCount;
1837 if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
1838 mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
1839 }
1840 int32_t sampleRate;
1841 if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
1842 mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
1843 }
1844 }
1845 }
1846
1847 if (flags & CONFIGURE_FLAG_ENCODE) {
1848 int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
1849 enableMediaFormatShapingDefault);
1850 if (!enableShaping) {
1851 ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
1852 if (nextMetricsHandle != 0) {
1853 mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
1854 }
1855 } else {
1856 (void) shapeMediaFormat(format, flags, nextMetricsHandle);
1857 // XXX: do we want to do this regardless of shaping enablement?
1858 mapFormat(mComponentName, format, nullptr, false);
1859 }
1860 }
1861
1862 // push min/max QP to MediaMetrics after shaping
1863 if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
1864 int32_t qpIMin = -1;
1865 if (format->findInt32("video-qp-i-min", &qpIMin)) {
1866 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
1867 }
1868 int32_t qpIMax = -1;
1869 if (format->findInt32("video-qp-i-max", &qpIMax)) {
1870 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
1871 }
1872 int32_t qpPMin = -1;
1873 if (format->findInt32("video-qp-p-min", &qpPMin)) {
1874 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
1875 }
1876 int32_t qpPMax = -1;
1877 if (format->findInt32("video-qp-p-max", &qpPMax)) {
1878 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
1879 }
1880 int32_t qpBMin = -1;
1881 if (format->findInt32("video-qp-b-min", &qpBMin)) {
1882 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
1883 }
1884 int32_t qpBMax = -1;
1885 if (format->findInt32("video-qp-b-max", &qpBMax)) {
1886 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
1887 }
1888 }
1889
1890 updateLowLatency(format);
1891
1892 msg->setMessage("format", format);
1893 msg->setInt32("flags", flags);
1894 msg->setObject("surface", surface);
1895
1896 if (crypto != NULL || descrambler != NULL) {
1897 if (crypto != NULL) {
1898 msg->setPointer("crypto", crypto.get());
1899 } else {
1900 msg->setPointer("descrambler", descrambler.get());
1901 }
1902 if (nextMetricsHandle != 0) {
1903 mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
1904 }
1905 } else if (mFlags & kFlagIsSecure) {
1906 ALOGW("Crypto or descrambler should be given for secure codec");
1907 }
1908
1909 if (mConfigureMsg != nullptr) {
1910 // if re-configuring, we have one of these from before.
1911 // Recover the space before we discard the old mConfigureMsg
1912 mediametrics_handle_t metricsHandle;
1913 if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
1914 mediametrics_delete(metricsHandle);
1915 }
1916 }
1917 msg->setInt64("metrics", nextMetricsHandle);
1918
1919 // save msg for reset
1920 mConfigureMsg = msg;
1921
1922 sp<AMessage> callback = mCallback;
1923
1924 status_t err;
1925 std::vector<MediaResourceParcel> resources;
1926 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
1927 toMediaResourceSubType(mDomain)));
1928 // Don't know the buffer size at this point, but it's fine to use 1 because
1929 // the reclaimResource call doesn't consider the requester's buffer size for now.
1930 resources.push_back(MediaResource::GraphicMemoryResource(1));
1931 for (int i = 0; i <= kMaxRetry; ++i) {
1932 sp<AMessage> response;
1933 err = PostAndAwaitResponse(msg, &response);
1934 if (err != OK && err != INVALID_OPERATION) {
1935 if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
1936 break;
1937 }
1938 // MediaCodec now set state to UNINITIALIZED upon any fatal error.
1939 // To maintain backward-compatibility, do a reset() to put codec
1940 // back into INITIALIZED state.
1941 // But don't reset if the err is INVALID_OPERATION, which means
1942 // the configure failure is due to wrong state.
1943
1944 ALOGE("configure failed with err 0x%08x, resetting...", err);
1945 status_t err2 = reset();
1946 if (err2 != OK) {
1947 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
1948 break;
1949 }
1950 if (callback != nullptr) {
1951 err2 = setCallback(callback);
1952 if (err2 != OK) {
1953 ALOGE("retrying configure: failed to set callback (%08x)", err2);
1954 break;
1955 }
1956 }
1957 }
1958 if (!isResourceError(err)) {
1959 break;
1960 }
1961 }
1962
1963 return err;
1964 }
1965
1966 // Media Format Shaping support
1967 //
1968
1969 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
1970 static bool sIsHandheld = true;
1971
connectFormatShaper()1972 static bool connectFormatShaper() {
1973 static std::once_flag sCheckOnce;
1974
1975 ALOGV("connectFormatShaper...");
1976
1977 std::call_once(sCheckOnce, [&](){
1978
1979 void *libHandle = NULL;
1980 nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
1981
1982 // prefer any copy in the mainline module
1983 //
1984 android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
1985 AString libraryName = "libmediaformatshaper.so";
1986
1987 if (mediaNs != NULL) {
1988 static const android_dlextinfo dlextinfo = {
1989 .flags = ANDROID_DLEXT_USE_NAMESPACE,
1990 .library_namespace = mediaNs,
1991 };
1992
1993 AString libraryMainline = "/apex/com.android.media/";
1994 #if __LP64__
1995 libraryMainline.append("lib64/");
1996 #else
1997 libraryMainline.append("lib/");
1998 #endif
1999 libraryMainline.append(libraryName);
2000
2001 libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
2002 &dlextinfo);
2003
2004 if (libHandle != NULL) {
2005 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2006 dlsym(libHandle, "shaper_ops");
2007 } else {
2008 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
2009 libraryMainline.c_str());
2010 }
2011 } else {
2012 ALOGV("connectFormatShaper: couldn't find media namespace.");
2013 }
2014
2015 // fall back to the system partition, if present.
2016 //
2017 if (sShaperOps == NULL) {
2018
2019 libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
2020
2021 if (libHandle != NULL) {
2022 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2023 dlsym(libHandle, "shaper_ops");
2024 } else {
2025 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
2026 }
2027 }
2028
2029 if (sShaperOps != nullptr
2030 && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
2031 ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
2032 sShaperOps->version);
2033 sShaperOps = nullptr;
2034 }
2035
2036 if (sShaperOps != nullptr) {
2037 ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
2038 }
2039
2040 nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2041 ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
2042 (loading_finished - loading_started)/1000);
2043
2044
2045 // we also want to know whether this is a handheld device
2046 // start with assumption that the device is handheld.
2047 sIsHandheld = true;
2048 sp<IServiceManager> serviceMgr = defaultServiceManager();
2049 sp<content::pm::IPackageManagerNative> packageMgr;
2050 if (serviceMgr.get() != nullptr) {
2051 sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
2052 packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
2053 }
2054 // if we didn't get serviceMgr, we'll leave packageMgr as default null
2055 if (packageMgr != nullptr) {
2056
2057 // MUST have these
2058 static const String16 featuresNeeded[] = {
2059 String16("android.hardware.touchscreen")
2060 };
2061 // these must be present to be a handheld
2062 for (::android::String16 required : featuresNeeded) {
2063 bool hasFeature = false;
2064 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
2065 if (!status.isOk()) {
2066 ALOGE("%s: hasSystemFeature failed: %s",
2067 __func__, status.exceptionMessage().c_str());
2068 continue;
2069 }
2070 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
2071 if (!hasFeature) {
2072 ALOGV("... which means we are not handheld");
2073 sIsHandheld = false;
2074 break;
2075 }
2076 }
2077
2078 // MUST NOT have these
2079 static const String16 featuresDisallowed[] = {
2080 String16("android.hardware.type.automotive"),
2081 String16("android.hardware.type.television"),
2082 String16("android.hardware.type.watch")
2083 };
2084 // any of these present -- we aren't a handheld
2085 for (::android::String16 forbidden : featuresDisallowed) {
2086 bool hasFeature = false;
2087 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
2088 if (!status.isOk()) {
2089 ALOGE("%s: hasSystemFeature failed: %s",
2090 __func__, status.exceptionMessage().c_str());
2091 continue;
2092 }
2093 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
2094 if (hasFeature) {
2095 ALOGV("... which means we are not handheld");
2096 sIsHandheld = false;
2097 break;
2098 }
2099 }
2100 }
2101
2102 });
2103
2104 return true;
2105 }
2106
2107
2108 #if 0
2109 // a construct to force the above dlopen() to run very early.
2110 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
2111 // failure of this means that cold start of those apps is slower by the time to dlopen()
2112 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
2113 //
2114 static bool forceEarlyLoadingShaper = connectFormatShaper();
2115 #endif
2116
2117 // parse the codec's properties: mapping, whether it meets min quality, etc
2118 // and pass them into the video quality code
2119 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)2120 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
2121 sp<MediaCodecInfo> codecInfo, AString mediaType) {
2122
2123 sp<MediaCodecInfo::Capabilities> capabilities =
2124 codecInfo->getCapabilitiesFor(mediaType.c_str());
2125 if (capabilities == nullptr) {
2126 ALOGI("no capabilities as part of the codec?");
2127 } else {
2128 const sp<AMessage> &details = capabilities->getDetails();
2129 AString mapTarget;
2130 int count = details->countEntries();
2131 for(int ix = 0; ix < count; ix++) {
2132 AMessage::Type entryType;
2133 const char *mapSrc = details->getEntryNameAt(ix, &entryType);
2134 // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
2135 //
2136 static const char *featurePrefix = "feature-";
2137 static const int featurePrefixLen = strlen(featurePrefix);
2138 static const char *tuningPrefix = "tuning-";
2139 static const int tuningPrefixLen = strlen(tuningPrefix);
2140 static const char *mappingPrefix = "mapping-";
2141 static const int mappingPrefixLen = strlen(mappingPrefix);
2142
2143 if (mapSrc == NULL) {
2144 continue;
2145 } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
2146 int32_t intValue;
2147 if (details->findInt32(mapSrc, &intValue)) {
2148 ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
2149 (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
2150 intValue);
2151 }
2152 continue;
2153 } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
2154 AString value;
2155 if (details->findString(mapSrc, &value)) {
2156 ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
2157 (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
2158 value.c_str());
2159 }
2160 continue;
2161 } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
2162 AString target;
2163 if (details->findString(mapSrc, &target)) {
2164 ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
2165 target.c_str());
2166 // key is really "kind-key"
2167 // separate that, so setMap() sees the triple kind, key, value
2168 const char *kind = &mapSrc[mappingPrefixLen];
2169 const char *sep = strchr(kind, '-');
2170 const char *key = sep+1;
2171 if (sep != NULL) {
2172 std::string xkind = std::string(kind, sep-kind);
2173 (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
2174 key, target.c_str());
2175 }
2176 }
2177 }
2178 }
2179 }
2180
2181 // we also carry in the codec description whether we are on a handheld device.
2182 // this info is eventually used by both the Codec and the C2 machinery to inform
2183 // the underlying codec whether to do any shaping.
2184 //
2185 if (sIsHandheld) {
2186 // set if we are indeed a handheld device (or in future 'any eligible device'
2187 // missing on devices that aren't eligible for minimum quality enforcement.
2188 (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
2189 // strictly speaking, it's a tuning, but those are strings and feature stores int
2190 (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
2191 }
2192 }
2193
setupFormatShaper(AString mediaType)2194 status_t MediaCodec::setupFormatShaper(AString mediaType) {
2195 ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
2196 mComponentName.c_str(), mediaType.c_str());
2197
2198 nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
2199
2200 // someone might have beaten us to it.
2201 mediaformatshaper::shaperHandle_t shaperHandle;
2202 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2203 if (shaperHandle != nullptr) {
2204 ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
2205 return OK;
2206 }
2207
2208 // we get to build & register one
2209 shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
2210 if (shaperHandle == nullptr) {
2211 ALOGW("unable to create a shaper for cocodec %s mediaType %s",
2212 mComponentName.c_str(), mediaType.c_str());
2213 return OK;
2214 }
2215
2216 (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
2217
2218 shaperHandle = sShaperOps->registerShaper(shaperHandle,
2219 mComponentName.c_str(), mediaType.c_str());
2220
2221 nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2222 ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
2223 mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
2224
2225 return OK;
2226 }
2227
2228
2229 // Format Shaping
2230 // Mapping and Manipulation of encoding parameters
2231 //
2232 // All of these decisions are pushed into the shaper instead of here within MediaCodec.
2233 // this includes decisions based on whether the codec implements minimum quality bars
2234 // itself or needs to be shaped outside of the codec.
2235 // This keeps all those decisions in one place.
2236 // It also means that we push some extra decision information (is this a handheld device
2237 // or one that is otherwise eligible for minimum quality manipulation, which generational
2238 // quality target is in force, etc). This allows those values to be cached in the
2239 // per-codec structures that are done 1 time within a process instead of for each
2240 // codec instantiation.
2241 //
2242
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags,mediametrics_handle_t metricsHandle)2243 status_t MediaCodec::shapeMediaFormat(
2244 const sp<AMessage> &format,
2245 uint32_t flags,
2246 mediametrics_handle_t metricsHandle) {
2247 ALOGV("shapeMediaFormat entry");
2248
2249 if (!(flags & CONFIGURE_FLAG_ENCODE)) {
2250 ALOGW("shapeMediaFormat: not encoder");
2251 return OK;
2252 }
2253 if (mCodecInfo == NULL) {
2254 ALOGW("shapeMediaFormat: no codecinfo");
2255 return OK;
2256 }
2257
2258 AString mediaType;
2259 if (!format->findString("mime", &mediaType)) {
2260 ALOGW("shapeMediaFormat: no mediaType information");
2261 return OK;
2262 }
2263
2264 // make sure we have the function entry points for the shaper library
2265 //
2266
2267 connectFormatShaper();
2268 if (sShaperOps == nullptr) {
2269 ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
2270 return OK;
2271 }
2272
2273 // find the shaper information for this codec+mediaType pair
2274 //
2275 mediaformatshaper::shaperHandle_t shaperHandle;
2276 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2277 if (shaperHandle == nullptr) {
2278 setupFormatShaper(mediaType);
2279 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2280 }
2281 if (shaperHandle == nullptr) {
2282 ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
2283 mComponentName.c_str(), mediaType.c_str());
2284 return OK;
2285 }
2286
2287 // run the shaper
2288 //
2289
2290 ALOGV("Shaping input: %s", format->debugString(0).c_str());
2291
2292 sp<AMessage> updatedFormat = format->dup();
2293 AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
2294
2295 int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
2296 if (result == 0) {
2297 AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
2298
2299 sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
2300 size_t changeCount = deltas->countEntries();
2301 ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
2302 if (metricsHandle != 0) {
2303 mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
2304 }
2305 if (changeCount > 0) {
2306 if (metricsHandle != 0) {
2307 // save some old properties before we fold in the new ones
2308 int32_t bitrate;
2309 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
2310 mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
2311 }
2312 int32_t qpIMin = -1;
2313 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
2314 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
2315 }
2316 int32_t qpIMax = -1;
2317 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
2318 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
2319 }
2320 int32_t qpPMin = -1;
2321 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
2322 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
2323 }
2324 int32_t qpPMax = -1;
2325 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
2326 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
2327 }
2328 int32_t qpBMin = -1;
2329 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
2330 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
2331 }
2332 int32_t qpBMax = -1;
2333 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
2334 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
2335 }
2336 }
2337 // NB: for any field in both format and deltas, the deltas copy wins
2338 format->extend(deltas);
2339 }
2340 }
2341
2342 AMediaFormat_delete(updatedNdkFormat);
2343 return OK;
2344 }
2345
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)2346 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2347 bool reverse) {
2348 AString mediaType;
2349 if (!format->findString("mime", &mediaType)) {
2350 ALOGV("mapFormat: no mediaType information");
2351 return;
2352 }
2353 ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
2354 mediaType.c_str(), kind ? kind : "<all>", reverse);
2355
2356 // make sure we have the function entry points for the shaper library
2357 //
2358
2359 #if 0
2360 // let's play the faster "only do mapping if we've already loaded the library
2361 connectFormatShaper();
2362 #endif
2363 if (sShaperOps == nullptr) {
2364 ALOGV("mapFormat: no MediaFormatShaper hooks available");
2365 return;
2366 }
2367
2368 // find the shaper information for this codec+mediaType pair
2369 //
2370 mediaformatshaper::shaperHandle_t shaperHandle;
2371 shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
2372 if (shaperHandle == nullptr) {
2373 ALOGV("mapFormat: no shaper handle");
2374 return;
2375 }
2376
2377 const char **mappings;
2378 if (reverse)
2379 mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
2380 else
2381 mappings = sShaperOps->getMappings(shaperHandle, kind);
2382
2383 if (mappings == nullptr) {
2384 ALOGV("no mappings returned");
2385 return;
2386 }
2387
2388 ALOGV("Pre-mapping: %s", format->debugString(2).c_str());
2389 // do the mapping
2390 //
2391 int entries = format->countEntries();
2392 for (int i = 0; ; i += 2) {
2393 if (mappings[i] == nullptr) {
2394 break;
2395 }
2396
2397 size_t ix = format->findEntryByName(mappings[i]);
2398 if (ix < entries) {
2399 ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
2400 status_t status = format->setEntryNameAt(ix, mappings[i+1]);
2401 if (status != OK) {
2402 ALOGW("Unable to map from '%s' to '%s': status %d",
2403 mappings[i], mappings[i+1], status);
2404 }
2405 }
2406 }
2407 ALOGV("Post-mapping: %s", format->debugString(2).c_str());
2408
2409
2410 // reclaim the mapping memory
2411 for (int i = 0; ; i += 2) {
2412 if (mappings[i] == nullptr) {
2413 break;
2414 }
2415 free((void*)mappings[i]);
2416 free((void*)mappings[i + 1]);
2417 }
2418 free(mappings);
2419 mappings = nullptr;
2420 }
2421
2422 //
2423 // end of Format Shaping hooks within MediaCodec
2424 //
2425
releaseCrypto()2426 status_t MediaCodec::releaseCrypto()
2427 {
2428 ALOGV("releaseCrypto");
2429
2430 sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
2431
2432 sp<AMessage> response;
2433 status_t status = msg->postAndAwaitResponse(&response);
2434
2435 if (status == OK && response != NULL) {
2436 CHECK(response->findInt32("status", &status));
2437 ALOGV("releaseCrypto ret: %d ", status);
2438 }
2439 else {
2440 ALOGE("releaseCrypto err: %d", status);
2441 }
2442
2443 return status;
2444 }
2445
onReleaseCrypto(const sp<AMessage> & msg)2446 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
2447 {
2448 status_t status = INVALID_OPERATION;
2449 if (mCrypto != NULL) {
2450 ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
2451 mBufferChannel->setCrypto(NULL);
2452 // TODO change to ALOGV
2453 ALOGD("onReleaseCrypto: [before clear] mCrypto: %p (%d)",
2454 mCrypto.get(), mCrypto->getStrongCount());
2455 mCrypto.clear();
2456
2457 status = OK;
2458 }
2459 else {
2460 ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
2461 }
2462
2463 sp<AMessage> response = new AMessage;
2464 response->setInt32("status", status);
2465
2466 sp<AReplyToken> replyID;
2467 CHECK(msg->senderAwaitsResponse(&replyID));
2468 response->postReply(replyID);
2469 }
2470
setInputSurface(const sp<PersistentSurface> & surface)2471 status_t MediaCodec::setInputSurface(
2472 const sp<PersistentSurface> &surface) {
2473 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
2474 msg->setObject("input-surface", surface.get());
2475
2476 sp<AMessage> response;
2477 return PostAndAwaitResponse(msg, &response);
2478 }
2479
setSurface(const sp<Surface> & surface)2480 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
2481 sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
2482 msg->setObject("surface", surface);
2483
2484 sp<AMessage> response;
2485 return PostAndAwaitResponse(msg, &response);
2486 }
2487
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)2488 status_t MediaCodec::createInputSurface(
2489 sp<IGraphicBufferProducer>* bufferProducer) {
2490 sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
2491
2492 sp<AMessage> response;
2493 status_t err = PostAndAwaitResponse(msg, &response);
2494 if (err == NO_ERROR) {
2495 // unwrap the sp<IGraphicBufferProducer>
2496 sp<RefBase> obj;
2497 bool found = response->findObject("input-surface", &obj);
2498 CHECK(found);
2499 sp<BufferProducerWrapper> wrapper(
2500 static_cast<BufferProducerWrapper*>(obj.get()));
2501 *bufferProducer = wrapper->getBufferProducer();
2502 } else {
2503 ALOGW("createInputSurface failed, err=%d", err);
2504 }
2505 return err;
2506 }
2507
getGraphicBufferSize()2508 uint64_t MediaCodec::getGraphicBufferSize() {
2509 if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
2510 return 0;
2511 }
2512
2513 uint64_t size = 0;
2514 size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
2515 for (size_t i = 0; i < portNum; ++i) {
2516 // TODO: this is just an estimation, we should get the real buffer size from ACodec.
2517 size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
2518 }
2519 return size;
2520 }
2521
start()2522 status_t MediaCodec::start() {
2523 sp<AMessage> msg = new AMessage(kWhatStart, this);
2524
2525 sp<AMessage> callback;
2526
2527 status_t err;
2528 std::vector<MediaResourceParcel> resources;
2529 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
2530 toMediaResourceSubType(mDomain)));
2531 // Don't know the buffer size at this point, but it's fine to use 1 because
2532 // the reclaimResource call doesn't consider the requester's buffer size for now.
2533 resources.push_back(MediaResource::GraphicMemoryResource(1));
2534 for (int i = 0; i <= kMaxRetry; ++i) {
2535 if (i > 0) {
2536 // Don't try to reclaim resource for the first time.
2537 if (!mResourceManagerProxy->reclaimResource(resources)) {
2538 break;
2539 }
2540 // Recover codec from previous error before retry start.
2541 err = reset();
2542 if (err != OK) {
2543 ALOGE("retrying start: failed to reset codec");
2544 break;
2545 }
2546 sp<AMessage> response;
2547 err = PostAndAwaitResponse(mConfigureMsg, &response);
2548 if (err != OK) {
2549 ALOGE("retrying start: failed to configure codec");
2550 break;
2551 }
2552 if (callback != nullptr) {
2553 err = setCallback(callback);
2554 if (err != OK) {
2555 ALOGE("retrying start: failed to set callback");
2556 break;
2557 }
2558 ALOGD("succeed to set callback for reclaim");
2559 }
2560 }
2561
2562 // Keep callback message after the first iteration if necessary.
2563 if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
2564 callback = mCallback;
2565 ALOGD("keep callback message for reclaim");
2566 }
2567
2568 sp<AMessage> response;
2569 err = PostAndAwaitResponse(msg, &response);
2570 if (!isResourceError(err)) {
2571 break;
2572 }
2573 }
2574 return err;
2575 }
2576
stop()2577 status_t MediaCodec::stop() {
2578 sp<AMessage> msg = new AMessage(kWhatStop, this);
2579
2580 sp<AMessage> response;
2581 return PostAndAwaitResponse(msg, &response);
2582 }
2583
hasPendingBuffer(int portIndex)2584 bool MediaCodec::hasPendingBuffer(int portIndex) {
2585 return std::any_of(
2586 mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
2587 [](const BufferInfo &info) { return info.mOwnedByClient; });
2588 }
2589
hasPendingBuffer()2590 bool MediaCodec::hasPendingBuffer() {
2591 return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
2592 }
2593
reclaim(bool force)2594 status_t MediaCodec::reclaim(bool force) {
2595 ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
2596 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2597 msg->setInt32("reclaimed", 1);
2598 msg->setInt32("force", force ? 1 : 0);
2599
2600 sp<AMessage> response;
2601 status_t ret = PostAndAwaitResponse(msg, &response);
2602 if (ret == -ENOENT) {
2603 ALOGD("MediaCodec looper is gone, skip reclaim");
2604 ret = OK;
2605 }
2606 return ret;
2607 }
2608
release()2609 status_t MediaCodec::release() {
2610 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2611 sp<AMessage> response;
2612 return PostAndAwaitResponse(msg, &response);
2613 }
2614
releaseAsync(const sp<AMessage> & notify)2615 status_t MediaCodec::releaseAsync(const sp<AMessage> ¬ify) {
2616 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2617 msg->setMessage("async", notify);
2618 sp<AMessage> response;
2619 return PostAndAwaitResponse(msg, &response);
2620 }
2621
reset()2622 status_t MediaCodec::reset() {
2623 /* When external-facing MediaCodec object is created,
2624 it is already initialized. Thus, reset is essentially
2625 release() followed by init(), plus clearing the state */
2626
2627 status_t err = release();
2628
2629 // unregister handlers
2630 if (mCodec != NULL) {
2631 if (mCodecLooper != NULL) {
2632 mCodecLooper->unregisterHandler(mCodec->id());
2633 } else {
2634 mLooper->unregisterHandler(mCodec->id());
2635 }
2636 mCodec = NULL;
2637 }
2638 mLooper->unregisterHandler(id());
2639
2640 mFlags = 0; // clear all flags
2641 mStickyError = OK;
2642
2643 // reset state not reset by setState(UNINITIALIZED)
2644 mDequeueInputReplyID = 0;
2645 mDequeueOutputReplyID = 0;
2646 mDequeueInputTimeoutGeneration = 0;
2647 mDequeueOutputTimeoutGeneration = 0;
2648 mHaveInputSurface = false;
2649
2650 if (err == OK) {
2651 err = init(mInitName);
2652 }
2653 return err;
2654 }
2655
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2656 status_t MediaCodec::queueInputBuffer(
2657 size_t index,
2658 size_t offset,
2659 size_t size,
2660 int64_t presentationTimeUs,
2661 uint32_t flags,
2662 AString *errorDetailMsg) {
2663 if (errorDetailMsg != NULL) {
2664 errorDetailMsg->clear();
2665 }
2666
2667 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2668 msg->setSize("index", index);
2669 msg->setSize("offset", offset);
2670 msg->setSize("size", size);
2671 msg->setInt64("timeUs", presentationTimeUs);
2672 msg->setInt32("flags", flags);
2673 msg->setPointer("errorDetailMsg", errorDetailMsg);
2674
2675 sp<AMessage> response;
2676 return PostAndAwaitResponse(msg, &response);
2677 }
2678
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2679 status_t MediaCodec::queueSecureInputBuffer(
2680 size_t index,
2681 size_t offset,
2682 const CryptoPlugin::SubSample *subSamples,
2683 size_t numSubSamples,
2684 const uint8_t key[16],
2685 const uint8_t iv[16],
2686 CryptoPlugin::Mode mode,
2687 const CryptoPlugin::Pattern &pattern,
2688 int64_t presentationTimeUs,
2689 uint32_t flags,
2690 AString *errorDetailMsg) {
2691 if (errorDetailMsg != NULL) {
2692 errorDetailMsg->clear();
2693 }
2694
2695 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2696 msg->setSize("index", index);
2697 msg->setSize("offset", offset);
2698 msg->setPointer("subSamples", (void *)subSamples);
2699 msg->setSize("numSubSamples", numSubSamples);
2700 msg->setPointer("key", (void *)key);
2701 msg->setPointer("iv", (void *)iv);
2702 msg->setInt32("mode", mode);
2703 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
2704 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
2705 msg->setInt64("timeUs", presentationTimeUs);
2706 msg->setInt32("flags", flags);
2707 msg->setPointer("errorDetailMsg", errorDetailMsg);
2708
2709 sp<AMessage> response;
2710 status_t err = PostAndAwaitResponse(msg, &response);
2711
2712 return err;
2713 }
2714
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)2715 status_t MediaCodec::queueBuffer(
2716 size_t index,
2717 const std::shared_ptr<C2Buffer> &buffer,
2718 int64_t presentationTimeUs,
2719 uint32_t flags,
2720 const sp<AMessage> &tunings,
2721 AString *errorDetailMsg) {
2722 if (errorDetailMsg != NULL) {
2723 errorDetailMsg->clear();
2724 }
2725
2726 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2727 msg->setSize("index", index);
2728 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
2729 new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
2730 msg->setObject("c2buffer", obj);
2731 msg->setInt64("timeUs", presentationTimeUs);
2732 msg->setInt32("flags", flags);
2733 if (tunings && tunings->countEntries() > 0) {
2734 msg->setMessage("tunings", tunings);
2735 }
2736 msg->setPointer("errorDetailMsg", errorDetailMsg);
2737
2738 sp<AMessage> response;
2739 status_t err = PostAndAwaitResponse(msg, &response);
2740
2741 return err;
2742 }
2743
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)2744 status_t MediaCodec::queueEncryptedBuffer(
2745 size_t index,
2746 const sp<hardware::HidlMemory> &buffer,
2747 size_t offset,
2748 const CryptoPlugin::SubSample *subSamples,
2749 size_t numSubSamples,
2750 const uint8_t key[16],
2751 const uint8_t iv[16],
2752 CryptoPlugin::Mode mode,
2753 const CryptoPlugin::Pattern &pattern,
2754 int64_t presentationTimeUs,
2755 uint32_t flags,
2756 const sp<AMessage> &tunings,
2757 AString *errorDetailMsg) {
2758 if (errorDetailMsg != NULL) {
2759 errorDetailMsg->clear();
2760 }
2761
2762 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2763 msg->setSize("index", index);
2764 sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
2765 new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
2766 msg->setObject("memory", memory);
2767 msg->setSize("offset", offset);
2768 msg->setPointer("subSamples", (void *)subSamples);
2769 msg->setSize("numSubSamples", numSubSamples);
2770 msg->setPointer("key", (void *)key);
2771 msg->setPointer("iv", (void *)iv);
2772 msg->setInt32("mode", mode);
2773 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
2774 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
2775 msg->setInt64("timeUs", presentationTimeUs);
2776 msg->setInt32("flags", flags);
2777 if (tunings && tunings->countEntries() > 0) {
2778 msg->setMessage("tunings", tunings);
2779 }
2780 msg->setPointer("errorDetailMsg", errorDetailMsg);
2781
2782 sp<AMessage> response;
2783 status_t err = PostAndAwaitResponse(msg, &response);
2784
2785 return err;
2786 }
2787
dequeueInputBuffer(size_t * index,int64_t timeoutUs)2788 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
2789 sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
2790 msg->setInt64("timeoutUs", timeoutUs);
2791
2792 sp<AMessage> response;
2793 status_t err;
2794 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2795 return err;
2796 }
2797
2798 CHECK(response->findSize("index", index));
2799
2800 return OK;
2801 }
2802
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)2803 status_t MediaCodec::dequeueOutputBuffer(
2804 size_t *index,
2805 size_t *offset,
2806 size_t *size,
2807 int64_t *presentationTimeUs,
2808 uint32_t *flags,
2809 int64_t timeoutUs) {
2810 sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
2811 msg->setInt64("timeoutUs", timeoutUs);
2812
2813 sp<AMessage> response;
2814 status_t err;
2815 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2816 return err;
2817 }
2818
2819 CHECK(response->findSize("index", index));
2820 CHECK(response->findSize("offset", offset));
2821 CHECK(response->findSize("size", size));
2822 CHECK(response->findInt64("timeUs", presentationTimeUs));
2823 CHECK(response->findInt32("flags", (int32_t *)flags));
2824
2825 return OK;
2826 }
2827
renderOutputBufferAndRelease(size_t index)2828 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
2829 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2830 msg->setSize("index", index);
2831 msg->setInt32("render", true);
2832
2833 sp<AMessage> response;
2834 return PostAndAwaitResponse(msg, &response);
2835 }
2836
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)2837 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
2838 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2839 msg->setSize("index", index);
2840 msg->setInt32("render", true);
2841 msg->setInt64("timestampNs", timestampNs);
2842
2843 sp<AMessage> response;
2844 return PostAndAwaitResponse(msg, &response);
2845 }
2846
releaseOutputBuffer(size_t index)2847 status_t MediaCodec::releaseOutputBuffer(size_t index) {
2848 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2849 msg->setSize("index", index);
2850
2851 sp<AMessage> response;
2852 return PostAndAwaitResponse(msg, &response);
2853 }
2854
signalEndOfInputStream()2855 status_t MediaCodec::signalEndOfInputStream() {
2856 sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
2857
2858 sp<AMessage> response;
2859 return PostAndAwaitResponse(msg, &response);
2860 }
2861
getOutputFormat(sp<AMessage> * format) const2862 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
2863 sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
2864
2865 sp<AMessage> response;
2866 status_t err;
2867 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2868 return err;
2869 }
2870
2871 CHECK(response->findMessage("format", format));
2872
2873 return OK;
2874 }
2875
getInputFormat(sp<AMessage> * format) const2876 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
2877 sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
2878
2879 sp<AMessage> response;
2880 status_t err;
2881 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2882 return err;
2883 }
2884
2885 CHECK(response->findMessage("format", format));
2886
2887 return OK;
2888 }
2889
getName(AString * name) const2890 status_t MediaCodec::getName(AString *name) const {
2891 sp<AMessage> msg = new AMessage(kWhatGetName, this);
2892
2893 sp<AMessage> response;
2894 status_t err;
2895 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2896 return err;
2897 }
2898
2899 CHECK(response->findString("name", name));
2900
2901 return OK;
2902 }
2903
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const2904 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
2905 sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
2906
2907 sp<AMessage> response;
2908 status_t err;
2909 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2910 return err;
2911 }
2912
2913 sp<RefBase> obj;
2914 CHECK(response->findObject("codecInfo", &obj));
2915 *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
2916
2917 return OK;
2918 }
2919
2920 // this is the user-callable entry point
getMetrics(mediametrics_handle_t & reply)2921 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
2922
2923 reply = 0;
2924
2925 sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
2926 sp<AMessage> response;
2927 status_t err;
2928 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2929 return err;
2930 }
2931
2932 CHECK(response->findInt64("metrics", &reply));
2933
2934 return OK;
2935 }
2936
2937 // runs on the looper thread (for mutex purposes)
onGetMetrics(const sp<AMessage> & msg)2938 void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
2939
2940 mediametrics_handle_t results = 0;
2941
2942 sp<AReplyToken> replyID;
2943 CHECK(msg->senderAwaitsResponse(&replyID));
2944
2945 if (mMetricsHandle != 0) {
2946 updateMediametrics();
2947 results = mediametrics_dup(mMetricsHandle);
2948 updateEphemeralMediametrics(results);
2949 } else {
2950 results = mediametrics_dup(mMetricsHandle);
2951 }
2952
2953 sp<AMessage> response = new AMessage;
2954 response->setInt64("metrics", results);
2955 response->postReply(replyID);
2956 }
2957
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const2958 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
2959 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
2960 msg->setInt32("portIndex", kPortIndexInput);
2961 msg->setPointer("buffers", buffers);
2962
2963 sp<AMessage> response;
2964 return PostAndAwaitResponse(msg, &response);
2965 }
2966
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const2967 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
2968 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
2969 msg->setInt32("portIndex", kPortIndexOutput);
2970 msg->setPointer("buffers", buffers);
2971
2972 sp<AMessage> response;
2973 return PostAndAwaitResponse(msg, &response);
2974 }
2975
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)2976 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
2977 sp<AMessage> format;
2978 return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
2979 }
2980
getOutputFormat(size_t index,sp<AMessage> * format)2981 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
2982 sp<MediaCodecBuffer> buffer;
2983 return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
2984 }
2985
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)2986 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
2987 sp<AMessage> format;
2988 return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
2989 }
2990
isExecuting() const2991 bool MediaCodec::isExecuting() const {
2992 return mState == STARTED || mState == FLUSHED;
2993 }
2994
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)2995 status_t MediaCodec::getBufferAndFormat(
2996 size_t portIndex, size_t index,
2997 sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
2998 // use mutex instead of a context switch
2999 if (mReleasedByResourceManager) {
3000 ALOGE("getBufferAndFormat - resource already released");
3001 return DEAD_OBJECT;
3002 }
3003
3004 if (buffer == NULL) {
3005 ALOGE("getBufferAndFormat - null MediaCodecBuffer");
3006 return INVALID_OPERATION;
3007 }
3008
3009 if (format == NULL) {
3010 ALOGE("getBufferAndFormat - null AMessage");
3011 return INVALID_OPERATION;
3012 }
3013
3014 buffer->clear();
3015 format->clear();
3016
3017 if (!isExecuting()) {
3018 ALOGE("getBufferAndFormat - not executing");
3019 return INVALID_OPERATION;
3020 }
3021
3022 // we do not want mPortBuffers to change during this section
3023 // we also don't want mOwnedByClient to change during this
3024 Mutex::Autolock al(mBufferLock);
3025
3026 std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
3027 if (index >= buffers.size()) {
3028 ALOGE("getBufferAndFormat - trying to get buffer with "
3029 "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
3030 return INVALID_OPERATION;
3031 }
3032
3033 const BufferInfo &info = buffers[index];
3034 if (!info.mOwnedByClient) {
3035 ALOGE("getBufferAndFormat - invalid operation "
3036 "(the index %zu is not owned by client)", index);
3037 return INVALID_OPERATION;
3038 }
3039
3040 *buffer = info.mData;
3041 *format = info.mData->format();
3042
3043 return OK;
3044 }
3045
flush()3046 status_t MediaCodec::flush() {
3047 sp<AMessage> msg = new AMessage(kWhatFlush, this);
3048
3049 sp<AMessage> response;
3050 return PostAndAwaitResponse(msg, &response);
3051 }
3052
requestIDRFrame()3053 status_t MediaCodec::requestIDRFrame() {
3054 (new AMessage(kWhatRequestIDRFrame, this))->post();
3055
3056 return OK;
3057 }
3058
querySupportedVendorParameters(std::vector<std::string> * names)3059 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
3060 return mCodec->querySupportedParameters(names);
3061 }
3062
describeParameter(const std::string & name,CodecParameterDescriptor * desc)3063 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
3064 return mCodec->describeParameter(name, desc);
3065 }
3066
subscribeToVendorParameters(const std::vector<std::string> & names)3067 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
3068 return mCodec->subscribeToParameters(names);
3069 }
3070
unsubscribeFromVendorParameters(const std::vector<std::string> & names)3071 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
3072 return mCodec->unsubscribeFromParameters(names);
3073 }
3074
requestActivityNotification(const sp<AMessage> & notify)3075 void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) {
3076 sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
3077 msg->setMessage("notify", notify);
3078 msg->post();
3079 }
3080
requestCpuBoostIfNeeded()3081 void MediaCodec::requestCpuBoostIfNeeded() {
3082 if (mCpuBoostRequested) {
3083 return;
3084 }
3085 int32_t colorFormat;
3086 if (mOutputFormat->contains("hdr-static-info")
3087 && mOutputFormat->findInt32("color-format", &colorFormat)
3088 // check format for OMX only, for C2 the format is always opaque since the
3089 // software rendering doesn't go through client
3090 && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
3091 || mOwnerName.equalsIgnoreCase("codec2::software"))) {
3092 int32_t left, top, right, bottom, width, height;
3093 int64_t totalPixel = 0;
3094 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
3095 totalPixel = (right - left + 1) * (bottom - top + 1);
3096 } else if (mOutputFormat->findInt32("width", &width)
3097 && mOutputFormat->findInt32("height", &height)) {
3098 totalPixel = width * height;
3099 }
3100 if (totalPixel >= 1920 * 1080) {
3101 mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
3102 mCpuBoostRequested = true;
3103 }
3104 }
3105 }
3106
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)3107 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
3108 : mTimeoutUs(timeoutUs)
3109 , mLastActivityTimeUs(-1ll)
3110 , mBatteryStatNotified(false)
3111 , mBatteryCheckerGeneration(0)
3112 , mIsExecuting(false)
3113 , mBatteryCheckerMsg(msg) {}
3114
onCodecActivity(std::function<void ()> batteryOnCb)3115 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
3116 if (!isExecuting()) {
3117 // ignore if not executing
3118 return;
3119 }
3120 if (!mBatteryStatNotified) {
3121 batteryOnCb();
3122 mBatteryStatNotified = true;
3123 sp<AMessage> msg = mBatteryCheckerMsg->dup();
3124 msg->setInt32("generation", mBatteryCheckerGeneration);
3125
3126 // post checker and clear last activity time
3127 msg->post(mTimeoutUs);
3128 mLastActivityTimeUs = -1ll;
3129 } else {
3130 // update last activity time
3131 mLastActivityTimeUs = ALooper::GetNowUs();
3132 }
3133 }
3134
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)3135 void BatteryChecker::onCheckBatteryTimer(
3136 const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
3137 // ignore if this checker already expired because the client resource was removed
3138 int32_t generation;
3139 if (!msg->findInt32("generation", &generation)
3140 || generation != mBatteryCheckerGeneration) {
3141 return;
3142 }
3143
3144 if (mLastActivityTimeUs < 0ll) {
3145 // timed out inactive, do not repost checker
3146 batteryOffCb();
3147 mBatteryStatNotified = false;
3148 } else {
3149 // repost checker and clear last activity time
3150 msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
3151 mLastActivityTimeUs = -1ll;
3152 }
3153 }
3154
onClientRemoved()3155 void BatteryChecker::onClientRemoved() {
3156 mBatteryStatNotified = false;
3157 mBatteryCheckerGeneration++;
3158 }
3159
3160 ////////////////////////////////////////////////////////////////////////////////
3161
cancelPendingDequeueOperations()3162 void MediaCodec::cancelPendingDequeueOperations() {
3163 if (mFlags & kFlagDequeueInputPending) {
3164 PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
3165
3166 ++mDequeueInputTimeoutGeneration;
3167 mDequeueInputReplyID = 0;
3168 mFlags &= ~kFlagDequeueInputPending;
3169 }
3170
3171 if (mFlags & kFlagDequeueOutputPending) {
3172 PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
3173
3174 ++mDequeueOutputTimeoutGeneration;
3175 mDequeueOutputReplyID = 0;
3176 mFlags &= ~kFlagDequeueOutputPending;
3177 }
3178 }
3179
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3180 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
3181 if (!isExecuting() || (mFlags & kFlagIsAsync)
3182 || (newRequest && (mFlags & kFlagDequeueInputPending))) {
3183 PostReplyWithError(replyID, INVALID_OPERATION);
3184 return true;
3185 } else if (mFlags & kFlagStickyError) {
3186 PostReplyWithError(replyID, getStickyError());
3187 return true;
3188 }
3189
3190 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3191
3192 if (index < 0) {
3193 CHECK_EQ(index, -EAGAIN);
3194 return false;
3195 }
3196
3197 sp<AMessage> response = new AMessage;
3198 response->setSize("index", index);
3199 response->postReply(replyID);
3200
3201 return true;
3202 }
3203
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3204 bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
3205 if (!isExecuting() || (mFlags & kFlagIsAsync)
3206 || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
3207 PostReplyWithError(replyID, INVALID_OPERATION);
3208 } else if (mFlags & kFlagStickyError) {
3209 PostReplyWithError(replyID, getStickyError());
3210 } else if (mFlags & kFlagOutputBuffersChanged) {
3211 PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
3212 mFlags &= ~kFlagOutputBuffersChanged;
3213 } else {
3214 sp<AMessage> response = new AMessage;
3215 BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
3216 if (!info) {
3217 return false;
3218 }
3219
3220 // In synchronous mode, output format change should be handled
3221 // at dequeue to put the event at the correct order.
3222
3223 const sp<MediaCodecBuffer> &buffer = info->mData;
3224 handleOutputFormatChangeIfNeeded(buffer);
3225 if (mFlags & kFlagOutputFormatChanged) {
3226 PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
3227 mFlags &= ~kFlagOutputFormatChanged;
3228 return true;
3229 }
3230
3231 ssize_t index = dequeuePortBuffer(kPortIndexOutput);
3232
3233 response->setSize("index", index);
3234 response->setSize("offset", buffer->offset());
3235 response->setSize("size", buffer->size());
3236
3237 int64_t timeUs;
3238 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
3239
3240 response->setInt64("timeUs", timeUs);
3241
3242 int32_t flags;
3243 CHECK(buffer->meta()->findInt32("flags", &flags));
3244
3245 response->setInt32("flags", flags);
3246
3247 statsBufferReceived(timeUs, buffer);
3248
3249 response->postReply(replyID);
3250 }
3251
3252 return true;
3253 }
3254
onMessageReceived(const sp<AMessage> & msg)3255 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
3256 switch (msg->what()) {
3257 case kWhatCodecNotify:
3258 {
3259 int32_t what;
3260 CHECK(msg->findInt32("what", &what));
3261
3262 switch (what) {
3263 case kWhatError:
3264 {
3265 int32_t err, actionCode;
3266 CHECK(msg->findInt32("err", &err));
3267 CHECK(msg->findInt32("actionCode", &actionCode));
3268
3269 ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
3270 err, StrMediaError(err).c_str(), actionCode,
3271 mState, stateString(mState).c_str());
3272 if (err == DEAD_OBJECT) {
3273 mFlags |= kFlagSawMediaServerDie;
3274 mFlags &= ~kFlagIsComponentAllocated;
3275 }
3276
3277 bool sendErrorResponse = true;
3278 std::string origin{"kWhatError:"};
3279 origin += stateString(mState);
3280
3281 switch (mState) {
3282 case INITIALIZING:
3283 {
3284 setState(UNINITIALIZED);
3285 break;
3286 }
3287
3288 case CONFIGURING:
3289 {
3290 if (actionCode == ACTION_CODE_FATAL) {
3291 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3292 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3293 stateString(mState).c_str());
3294 flushMediametrics();
3295 initMediametrics();
3296 }
3297 setState(actionCode == ACTION_CODE_FATAL ?
3298 UNINITIALIZED : INITIALIZED);
3299 break;
3300 }
3301
3302 case STARTING:
3303 {
3304 if (actionCode == ACTION_CODE_FATAL) {
3305 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3306 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3307 stateString(mState).c_str());
3308 flushMediametrics();
3309 initMediametrics();
3310 }
3311 setState(actionCode == ACTION_CODE_FATAL ?
3312 UNINITIALIZED : CONFIGURED);
3313 break;
3314 }
3315
3316 case RELEASING:
3317 {
3318 // Ignore the error, assuming we'll still get
3319 // the shutdown complete notification. If we
3320 // don't, we'll timeout and force release.
3321 sendErrorResponse = false;
3322 FALLTHROUGH_INTENDED;
3323 }
3324 case STOPPING:
3325 {
3326 if (mFlags & kFlagSawMediaServerDie) {
3327 if (mState == RELEASING && !mReplyID) {
3328 ALOGD("Releasing asynchronously, so nothing to reply here.");
3329 }
3330 // MediaServer died, there definitely won't
3331 // be a shutdown complete notification after
3332 // all.
3333
3334 // note that we may be directly going from
3335 // STOPPING->UNINITIALIZED, instead of the
3336 // usual STOPPING->INITIALIZED state.
3337 setState(UNINITIALIZED);
3338 if (mState == RELEASING) {
3339 mComponentName.clear();
3340 }
3341 if (mReplyID) {
3342 postPendingRepliesAndDeferredMessages(origin + ":dead");
3343 } else {
3344 ALOGD("no pending replies: %s:dead following %s",
3345 origin.c_str(), mLastReplyOrigin.c_str());
3346 }
3347 sendErrorResponse = false;
3348 } else if (!mReplyID) {
3349 sendErrorResponse = false;
3350 }
3351 break;
3352 }
3353
3354 case FLUSHING:
3355 {
3356 if (actionCode == ACTION_CODE_FATAL) {
3357 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3358 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3359 stateString(mState).c_str());
3360 flushMediametrics();
3361 initMediametrics();
3362
3363 setState(UNINITIALIZED);
3364 } else {
3365 setState(
3366 (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
3367 }
3368 break;
3369 }
3370
3371 case FLUSHED:
3372 case STARTED:
3373 {
3374 sendErrorResponse = (mReplyID != nullptr);
3375
3376 setStickyError(err);
3377 postActivityNotificationIfPossible();
3378
3379 cancelPendingDequeueOperations();
3380
3381 if (mFlags & kFlagIsAsync) {
3382 onError(err, actionCode);
3383 }
3384 switch (actionCode) {
3385 case ACTION_CODE_TRANSIENT:
3386 break;
3387 case ACTION_CODE_RECOVERABLE:
3388 setState(INITIALIZED);
3389 break;
3390 default:
3391 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3392 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3393 stateString(mState).c_str());
3394 flushMediametrics();
3395 initMediametrics();
3396 setState(UNINITIALIZED);
3397 break;
3398 }
3399 break;
3400 }
3401
3402 default:
3403 {
3404 sendErrorResponse = (mReplyID != nullptr);
3405
3406 setStickyError(err);
3407 postActivityNotificationIfPossible();
3408
3409 // actionCode in an uninitialized state is always fatal.
3410 if (mState == UNINITIALIZED) {
3411 actionCode = ACTION_CODE_FATAL;
3412 }
3413 if (mFlags & kFlagIsAsync) {
3414 onError(err, actionCode);
3415 }
3416 switch (actionCode) {
3417 case ACTION_CODE_TRANSIENT:
3418 break;
3419 case ACTION_CODE_RECOVERABLE:
3420 setState(INITIALIZED);
3421 break;
3422 default:
3423 setState(UNINITIALIZED);
3424 break;
3425 }
3426 break;
3427 }
3428 }
3429
3430 if (sendErrorResponse) {
3431 // TRICKY: replicate PostReplyWithError logic for
3432 // err code override
3433 int32_t finalErr = err;
3434 if (mReleasedByResourceManager) {
3435 // override the err code if MediaCodec has been
3436 // released by ResourceManager.
3437 finalErr = DEAD_OBJECT;
3438 }
3439 postPendingRepliesAndDeferredMessages(origin, finalErr);
3440 }
3441 break;
3442 }
3443
3444 case kWhatComponentAllocated:
3445 {
3446 if (mState == RELEASING || mState == UNINITIALIZED) {
3447 // In case a kWhatError or kWhatRelease message came in and replied,
3448 // we log a warning and ignore.
3449 ALOGW("allocate interrupted by error or release, current state %d/%s",
3450 mState, stateString(mState).c_str());
3451 break;
3452 }
3453 CHECK_EQ(mState, INITIALIZING);
3454 setState(INITIALIZED);
3455 mFlags |= kFlagIsComponentAllocated;
3456
3457 CHECK(msg->findString("componentName", &mComponentName));
3458
3459 if (mComponentName.c_str()) {
3460 mediametrics_setCString(mMetricsHandle, kCodecCodec,
3461 mComponentName.c_str());
3462 }
3463
3464 const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
3465 if (mComponentName.startsWith("OMX.google.")
3466 && strncmp(owner, "default", 8) == 0) {
3467 mFlags |= kFlagUsesSoftwareRenderer;
3468 } else {
3469 mFlags &= ~kFlagUsesSoftwareRenderer;
3470 }
3471 mOwnerName = owner;
3472
3473 if (mComponentName.endsWith(".secure")) {
3474 mFlags |= kFlagIsSecure;
3475 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
3476 } else {
3477 mFlags &= ~kFlagIsSecure;
3478 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
3479 }
3480
3481 MediaCodecInfo::Attributes attr = mCodecInfo
3482 ? mCodecInfo->getAttributes()
3483 : MediaCodecInfo::Attributes(0);
3484 if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
3485 // software codec is currently ignored.
3486 mResourceManagerProxy->addResource(MediaResource::CodecResource(
3487 mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
3488 }
3489
3490 postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
3491 break;
3492 }
3493
3494 case kWhatComponentConfigured:
3495 {
3496 if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
3497 // In case a kWhatError or kWhatRelease message came in and replied,
3498 // we log a warning and ignore.
3499 ALOGW("configure interrupted by error or release, current state %d/%s",
3500 mState, stateString(mState).c_str());
3501 break;
3502 }
3503 CHECK_EQ(mState, CONFIGURING);
3504
3505 // reset input surface flag
3506 mHaveInputSurface = false;
3507
3508 CHECK(msg->findMessage("input-format", &mInputFormat));
3509 CHECK(msg->findMessage("output-format", &mOutputFormat));
3510
3511 // limit to confirming the opt-in behavior to minimize any behavioral change
3512 if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
3513 // signal frame dropping mode in the input format as this may also be
3514 // meaningful and confusing for an encoder in a transcoder scenario
3515 mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
3516 }
3517 sp<AMessage> interestingFormat =
3518 (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
3519 ALOGV("[%s] configured as input format: %s, output format: %s",
3520 mComponentName.c_str(),
3521 mInputFormat->debugString(4).c_str(),
3522 mOutputFormat->debugString(4).c_str());
3523 int32_t usingSwRenderer;
3524 if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
3525 && usingSwRenderer) {
3526 mFlags |= kFlagUsesSoftwareRenderer;
3527 }
3528 setState(CONFIGURED);
3529 postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
3530
3531 // augment our media metrics info, now that we know more things
3532 // such as what the codec extracted from any CSD passed in.
3533 if (mMetricsHandle != 0) {
3534 sp<AMessage> format;
3535 if (mConfigureMsg != NULL &&
3536 mConfigureMsg->findMessage("format", &format)) {
3537 // format includes: mime
3538 AString mime;
3539 if (format->findString("mime", &mime)) {
3540 mediametrics_setCString(mMetricsHandle, kCodecMime,
3541 mime.c_str());
3542 }
3543 }
3544 // perhaps video only?
3545 int32_t profile = 0;
3546 if (interestingFormat->findInt32("profile", &profile)) {
3547 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
3548 }
3549 int32_t level = 0;
3550 if (interestingFormat->findInt32("level", &level)) {
3551 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
3552 }
3553 updateHdrMetrics(true /* isConfig */);
3554 // bitrate and bitrate mode, encoder only
3555 if (mFlags & kFlagIsEncoder) {
3556 // encoder specific values
3557 int32_t bitrate_mode = -1;
3558 if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
3559 mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
3560 asString_BitrateMode(bitrate_mode));
3561 }
3562 int32_t bitrate = -1;
3563 if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
3564 mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
3565 }
3566 } else {
3567 // decoder specific values
3568 }
3569 }
3570 break;
3571 }
3572
3573 case kWhatInputSurfaceCreated:
3574 {
3575 if (mState != CONFIGURED) {
3576 // state transitioned unexpectedly; we should have replied already.
3577 ALOGD("received kWhatInputSurfaceCreated message in state %s",
3578 stateString(mState).c_str());
3579 break;
3580 }
3581 // response to initiateCreateInputSurface()
3582 status_t err = NO_ERROR;
3583 sp<AMessage> response = new AMessage;
3584 if (!msg->findInt32("err", &err)) {
3585 sp<RefBase> obj;
3586 msg->findObject("input-surface", &obj);
3587 CHECK(msg->findMessage("input-format", &mInputFormat));
3588 CHECK(msg->findMessage("output-format", &mOutputFormat));
3589 ALOGV("[%s] input surface created as input format: %s, output format: %s",
3590 mComponentName.c_str(),
3591 mInputFormat->debugString(4).c_str(),
3592 mOutputFormat->debugString(4).c_str());
3593 CHECK(obj != NULL);
3594 response->setObject("input-surface", obj);
3595 mHaveInputSurface = true;
3596 } else {
3597 response->setInt32("err", err);
3598 }
3599 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
3600 break;
3601 }
3602
3603 case kWhatInputSurfaceAccepted:
3604 {
3605 if (mState != CONFIGURED) {
3606 // state transitioned unexpectedly; we should have replied already.
3607 ALOGD("received kWhatInputSurfaceAccepted message in state %s",
3608 stateString(mState).c_str());
3609 break;
3610 }
3611 // response to initiateSetInputSurface()
3612 status_t err = NO_ERROR;
3613 sp<AMessage> response = new AMessage();
3614 if (!msg->findInt32("err", &err)) {
3615 CHECK(msg->findMessage("input-format", &mInputFormat));
3616 CHECK(msg->findMessage("output-format", &mOutputFormat));
3617 mHaveInputSurface = true;
3618 } else {
3619 response->setInt32("err", err);
3620 }
3621 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
3622 break;
3623 }
3624
3625 case kWhatSignaledInputEOS:
3626 {
3627 if (!isExecuting()) {
3628 // state transitioned unexpectedly; we should have replied already.
3629 ALOGD("received kWhatSignaledInputEOS message in state %s",
3630 stateString(mState).c_str());
3631 break;
3632 }
3633 // response to signalEndOfInputStream()
3634 sp<AMessage> response = new AMessage;
3635 status_t err;
3636 if (msg->findInt32("err", &err)) {
3637 response->setInt32("err", err);
3638 }
3639 postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
3640 break;
3641 }
3642
3643 case kWhatStartCompleted:
3644 {
3645 if (mState == RELEASING || mState == UNINITIALIZED) {
3646 // In case a kWhatRelease message came in and replied,
3647 // we log a warning and ignore.
3648 ALOGW("start interrupted by release, current state %d/%s",
3649 mState, stateString(mState).c_str());
3650 break;
3651 }
3652
3653 CHECK_EQ(mState, STARTING);
3654 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
3655 mResourceManagerProxy->addResource(
3656 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
3657 }
3658 setState(STARTED);
3659 postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
3660
3661 // Now that the codec has started, configure, by default, the peek behavior to
3662 // be undefined for backwards compatibility with older releases. Later, if an
3663 // app explicitly enables or disables peek, the parameter will be turned off and
3664 // the legacy undefined behavior is disallowed.
3665 // See updateTunnelPeek called in onSetParameters for more details.
3666 if (mTunneled && mTunnelPeekState == TunnelPeekState::kLegacyMode) {
3667 sp<AMessage> params = new AMessage;
3668 params->setInt32("android._tunnel-peek-set-legacy", 1);
3669 mCodec->signalSetParameters(params);
3670 }
3671 break;
3672 }
3673
3674 case kWhatOutputBuffersChanged:
3675 {
3676 mFlags |= kFlagOutputBuffersChanged;
3677 postActivityNotificationIfPossible();
3678 break;
3679 }
3680
3681 case kWhatOutputFramesRendered:
3682 {
3683 // ignore these in all states except running
3684 if (mState != STARTED) {
3685 break;
3686 }
3687 TunnelPeekState previousState = mTunnelPeekState;
3688 if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
3689 mTunnelPeekState = TunnelPeekState::kBufferRendered;
3690 ALOGV("TunnelPeekState: %s -> %s",
3691 asString(previousState),
3692 asString(TunnelPeekState::kBufferRendered));
3693 }
3694 updatePlaybackDuration(msg);
3695 // check that we have a notification set
3696 if (mOnFrameRenderedNotification != NULL) {
3697 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
3698 notify->setMessage("data", msg);
3699 notify->post();
3700 }
3701 break;
3702 }
3703
3704 case kWhatFirstTunnelFrameReady:
3705 {
3706 if (mState != STARTED) {
3707 break;
3708 }
3709 TunnelPeekState previousState = mTunnelPeekState;
3710 switch(mTunnelPeekState) {
3711 case TunnelPeekState::kDisabledNoBuffer:
3712 case TunnelPeekState::kDisabledQueued:
3713 mTunnelPeekState = TunnelPeekState::kBufferDecoded;
3714 ALOGV("First tunnel frame ready");
3715 ALOGV("TunnelPeekState: %s -> %s",
3716 asString(previousState),
3717 asString(mTunnelPeekState));
3718 break;
3719 case TunnelPeekState::kEnabledNoBuffer:
3720 case TunnelPeekState::kEnabledQueued:
3721 {
3722 sp<AMessage> parameters = new AMessage();
3723 parameters->setInt32("android._trigger-tunnel-peek", 1);
3724 mCodec->signalSetParameters(parameters);
3725 }
3726 mTunnelPeekState = TunnelPeekState::kBufferRendered;
3727 ALOGV("First tunnel frame ready");
3728 ALOGV("TunnelPeekState: %s -> %s",
3729 asString(previousState),
3730 asString(mTunnelPeekState));
3731 break;
3732 default:
3733 ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
3734 asString(mTunnelPeekState));
3735 break;
3736 }
3737
3738 if (mOnFirstTunnelFrameReadyNotification != nullptr) {
3739 sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
3740 notify->setMessage("data", msg);
3741 notify->post();
3742 }
3743 break;
3744 }
3745
3746 case kWhatFillThisBuffer:
3747 {
3748 /* size_t index = */updateBuffers(kPortIndexInput, msg);
3749
3750 if (mState == FLUSHING
3751 || mState == STOPPING
3752 || mState == RELEASING) {
3753 returnBuffersToCodecOnPort(kPortIndexInput);
3754 break;
3755 }
3756
3757 if (!mCSD.empty()) {
3758 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3759 CHECK_GE(index, 0);
3760
3761 // If codec specific data had been specified as
3762 // part of the format in the call to configure and
3763 // if there's more csd left, we submit it here
3764 // clients only get access to input buffers once
3765 // this data has been exhausted.
3766
3767 status_t err = queueCSDInputBuffer(index);
3768
3769 if (err != OK) {
3770 ALOGE("queueCSDInputBuffer failed w/ error %d",
3771 err);
3772
3773 setStickyError(err);
3774 postActivityNotificationIfPossible();
3775
3776 cancelPendingDequeueOperations();
3777 }
3778 break;
3779 }
3780 if (!mLeftover.empty()) {
3781 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3782 CHECK_GE(index, 0);
3783
3784 status_t err = handleLeftover(index);
3785 if (err != OK) {
3786 setStickyError(err);
3787 postActivityNotificationIfPossible();
3788 cancelPendingDequeueOperations();
3789 }
3790 break;
3791 }
3792
3793 if (mFlags & kFlagIsAsync) {
3794 if (!mHaveInputSurface) {
3795 if (mState == FLUSHED) {
3796 mHavePendingInputBuffers = true;
3797 } else {
3798 onInputBufferAvailable();
3799 }
3800 }
3801 } else if (mFlags & kFlagDequeueInputPending) {
3802 CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
3803
3804 ++mDequeueInputTimeoutGeneration;
3805 mFlags &= ~kFlagDequeueInputPending;
3806 mDequeueInputReplyID = 0;
3807 } else {
3808 postActivityNotificationIfPossible();
3809 }
3810 break;
3811 }
3812
3813 case kWhatDrainThisBuffer:
3814 {
3815 if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
3816 sp<RefBase> obj;
3817 CHECK(msg->findObject("buffer", &obj));
3818 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
3819 if (mFlags & kFlagIsAsync) {
3820 // In asynchronous mode, output format change is processed immediately.
3821 handleOutputFormatChangeIfNeeded(buffer);
3822 } else {
3823 postActivityNotificationIfPossible();
3824 }
3825 mBufferChannel->discardBuffer(buffer);
3826 break;
3827 }
3828
3829 /* size_t index = */updateBuffers(kPortIndexOutput, msg);
3830
3831 if (mState == FLUSHING
3832 || mState == STOPPING
3833 || mState == RELEASING) {
3834 returnBuffersToCodecOnPort(kPortIndexOutput);
3835 break;
3836 }
3837
3838 if (mFlags & kFlagIsAsync) {
3839 sp<RefBase> obj;
3840 CHECK(msg->findObject("buffer", &obj));
3841 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
3842
3843 // In asynchronous mode, output format change is processed immediately.
3844 handleOutputFormatChangeIfNeeded(buffer);
3845 onOutputBufferAvailable();
3846 } else if (mFlags & kFlagDequeueOutputPending) {
3847 CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
3848
3849 ++mDequeueOutputTimeoutGeneration;
3850 mFlags &= ~kFlagDequeueOutputPending;
3851 mDequeueOutputReplyID = 0;
3852 } else {
3853 postActivityNotificationIfPossible();
3854 }
3855
3856 break;
3857 }
3858
3859 case kWhatEOS:
3860 {
3861 // We already notify the client of this by using the
3862 // corresponding flag in "onOutputBufferReady".
3863 break;
3864 }
3865
3866 case kWhatStopCompleted:
3867 {
3868 if (mState != STOPPING) {
3869 ALOGW("Received kWhatStopCompleted in state %d/%s",
3870 mState, stateString(mState).c_str());
3871 break;
3872 }
3873 setState(INITIALIZED);
3874 if (mReplyID) {
3875 postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
3876 } else {
3877 ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
3878 "but the operation completed anyway. (last reply origin=%s)",
3879 mLastReplyOrigin.c_str());
3880 }
3881 break;
3882 }
3883
3884 case kWhatReleaseCompleted:
3885 {
3886 if (mState != RELEASING) {
3887 ALOGW("Received kWhatReleaseCompleted in state %d/%s",
3888 mState, stateString(mState).c_str());
3889 break;
3890 }
3891 setState(UNINITIALIZED);
3892 mComponentName.clear();
3893
3894 mFlags &= ~kFlagIsComponentAllocated;
3895
3896 // off since we're removing all resources including the battery on
3897 if (mBatteryChecker != nullptr) {
3898 mBatteryChecker->onClientRemoved();
3899 }
3900
3901 mResourceManagerProxy->removeClient();
3902 mReleaseSurface.reset();
3903
3904 if (mReplyID != nullptr) {
3905 postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
3906 }
3907 if (mAsyncReleaseCompleteNotification != nullptr) {
3908 flushMediametrics();
3909 mAsyncReleaseCompleteNotification->post();
3910 mAsyncReleaseCompleteNotification.clear();
3911 }
3912 break;
3913 }
3914
3915 case kWhatFlushCompleted:
3916 {
3917 if (mState != FLUSHING) {
3918 ALOGW("received FlushCompleted message in state %d/%s",
3919 mState, stateString(mState).c_str());
3920 break;
3921 }
3922
3923 if (mFlags & kFlagIsAsync) {
3924 setState(FLUSHED);
3925 } else {
3926 setState(STARTED);
3927 mCodec->signalResume();
3928 }
3929
3930 postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
3931 break;
3932 }
3933
3934 default:
3935 TRESPASS();
3936 }
3937 break;
3938 }
3939
3940 case kWhatInit:
3941 {
3942 if (mState != UNINITIALIZED) {
3943 PostReplyWithError(msg, INVALID_OPERATION);
3944 break;
3945 }
3946
3947 if (mReplyID) {
3948 mDeferredMessages.push_back(msg);
3949 break;
3950 }
3951 sp<AReplyToken> replyID;
3952 CHECK(msg->senderAwaitsResponse(&replyID));
3953
3954 mReplyID = replyID;
3955 setState(INITIALIZING);
3956
3957 sp<RefBase> codecInfo;
3958 (void)msg->findObject("codecInfo", &codecInfo);
3959 AString name;
3960 CHECK(msg->findString("name", &name));
3961
3962 sp<AMessage> format = new AMessage;
3963 if (codecInfo) {
3964 format->setObject("codecInfo", codecInfo);
3965 }
3966 format->setString("componentName", name);
3967
3968 mCodec->initiateAllocateComponent(format);
3969 break;
3970 }
3971
3972 case kWhatSetNotification:
3973 {
3974 sp<AMessage> notify;
3975 if (msg->findMessage("on-frame-rendered", ¬ify)) {
3976 mOnFrameRenderedNotification = notify;
3977 }
3978 if (msg->findMessage("first-tunnel-frame-ready", ¬ify)) {
3979 mOnFirstTunnelFrameReadyNotification = notify;
3980 }
3981 break;
3982 }
3983
3984 case kWhatSetCallback:
3985 {
3986 sp<AReplyToken> replyID;
3987 CHECK(msg->senderAwaitsResponse(&replyID));
3988
3989 if (mState == UNINITIALIZED
3990 || mState == INITIALIZING
3991 || isExecuting()) {
3992 // callback can't be set after codec is executing,
3993 // or before it's initialized (as the callback
3994 // will be cleared when it goes to INITIALIZED)
3995 PostReplyWithError(replyID, INVALID_OPERATION);
3996 break;
3997 }
3998
3999 sp<AMessage> callback;
4000 CHECK(msg->findMessage("callback", &callback));
4001
4002 mCallback = callback;
4003
4004 if (mCallback != NULL) {
4005 ALOGI("MediaCodec will operate in async mode");
4006 mFlags |= kFlagIsAsync;
4007 } else {
4008 mFlags &= ~kFlagIsAsync;
4009 }
4010
4011 sp<AMessage> response = new AMessage;
4012 response->postReply(replyID);
4013 break;
4014 }
4015
4016 case kWhatGetMetrics:
4017 {
4018 onGetMetrics(msg);
4019 break;
4020 }
4021
4022
4023 case kWhatConfigure:
4024 {
4025 if (mState != INITIALIZED) {
4026 PostReplyWithError(msg, INVALID_OPERATION);
4027 break;
4028 }
4029
4030 if (mReplyID) {
4031 mDeferredMessages.push_back(msg);
4032 break;
4033 }
4034 sp<AReplyToken> replyID;
4035 CHECK(msg->senderAwaitsResponse(&replyID));
4036
4037 sp<RefBase> obj;
4038 CHECK(msg->findObject("surface", &obj));
4039
4040 sp<AMessage> format;
4041 CHECK(msg->findMessage("format", &format));
4042
4043 // start with a copy of the passed metrics info for use in this run
4044 mediametrics_handle_t handle;
4045 CHECK(msg->findInt64("metrics", &handle));
4046 if (handle != 0) {
4047 if (mMetricsHandle != 0) {
4048 flushMediametrics();
4049 }
4050 mMetricsHandle = mediametrics_dup(handle);
4051 // and set some additional metrics values
4052 initMediametrics();
4053 }
4054
4055 int32_t push;
4056 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
4057 mFlags |= kFlagPushBlankBuffersOnShutdown;
4058 }
4059
4060 if (obj != NULL) {
4061 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
4062 // allow frame dropping by surface by default
4063 mAllowFrameDroppingBySurface = true;
4064 }
4065
4066 format->setObject("native-window", obj);
4067 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
4068 if (err != OK) {
4069 PostReplyWithError(replyID, err);
4070 break;
4071 }
4072 } else {
4073 // we are not using surface so this variable is not used, but initialize sensibly anyway
4074 mAllowFrameDroppingBySurface = false;
4075
4076 handleSetSurface(NULL);
4077 }
4078
4079 uint32_t flags;
4080 CHECK(msg->findInt32("flags", (int32_t *)&flags));
4081 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
4082 if (!(mFlags & kFlagIsAsync)) {
4083 PostReplyWithError(replyID, INVALID_OPERATION);
4084 break;
4085 }
4086 mFlags |= kFlagUseBlockModel;
4087 }
4088 mReplyID = replyID;
4089 setState(CONFIGURING);
4090
4091 void *crypto;
4092 if (!msg->findPointer("crypto", &crypto)) {
4093 crypto = NULL;
4094 }
4095
4096 ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
4097 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4098
4099 mCrypto = static_cast<ICrypto *>(crypto);
4100 mBufferChannel->setCrypto(mCrypto);
4101
4102 ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
4103 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4104
4105 void *descrambler;
4106 if (!msg->findPointer("descrambler", &descrambler)) {
4107 descrambler = NULL;
4108 }
4109
4110 mDescrambler = static_cast<IDescrambler *>(descrambler);
4111 mBufferChannel->setDescrambler(mDescrambler);
4112
4113 format->setInt32("flags", flags);
4114 if (flags & CONFIGURE_FLAG_ENCODE) {
4115 format->setInt32("encoder", true);
4116 mFlags |= kFlagIsEncoder;
4117 }
4118
4119 extractCSD(format);
4120
4121 int32_t tunneled;
4122 if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
4123 ALOGI("Configuring TUNNELED video playback.");
4124 mTunneled = true;
4125 } else {
4126 mTunneled = false;
4127 }
4128
4129 int32_t background = 0;
4130 if (format->findInt32("android._background-mode", &background) && background) {
4131 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
4132 }
4133
4134 mCodec->initiateConfigureComponent(format);
4135 break;
4136 }
4137
4138 case kWhatSetSurface:
4139 {
4140 sp<AReplyToken> replyID;
4141 CHECK(msg->senderAwaitsResponse(&replyID));
4142
4143 status_t err = OK;
4144
4145 switch (mState) {
4146 case CONFIGURED:
4147 case STARTED:
4148 case FLUSHED:
4149 {
4150 sp<RefBase> obj;
4151 (void)msg->findObject("surface", &obj);
4152 sp<Surface> surface = static_cast<Surface *>(obj.get());
4153 if (mSurface == NULL) {
4154 // do not support setting surface if it was not set
4155 err = INVALID_OPERATION;
4156 } else if (obj == NULL) {
4157 // do not support unsetting surface
4158 err = BAD_VALUE;
4159 } else {
4160 err = connectToSurface(surface);
4161 if (err == ALREADY_EXISTS) {
4162 // reconnecting to same surface
4163 err = OK;
4164 } else {
4165 if (err == OK) {
4166 if (mFlags & kFlagUsesSoftwareRenderer) {
4167 if (mSoftRenderer != NULL
4168 && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
4169 pushBlankBuffersToNativeWindow(mSurface.get());
4170 }
4171 surface->setDequeueTimeout(-1);
4172 mSoftRenderer = new SoftwareRenderer(surface);
4173 // TODO: check if this was successful
4174 } else {
4175 err = mCodec->setSurface(surface);
4176 }
4177 }
4178 if (err == OK) {
4179 (void)disconnectFromSurface();
4180 mSurface = surface;
4181 }
4182 }
4183 }
4184 break;
4185 }
4186
4187 default:
4188 err = INVALID_OPERATION;
4189 break;
4190 }
4191
4192 PostReplyWithError(replyID, err);
4193 break;
4194 }
4195
4196 case kWhatCreateInputSurface:
4197 case kWhatSetInputSurface:
4198 {
4199 // Must be configured, but can't have been started yet.
4200 if (mState != CONFIGURED) {
4201 PostReplyWithError(msg, INVALID_OPERATION);
4202 break;
4203 }
4204
4205 if (mReplyID) {
4206 mDeferredMessages.push_back(msg);
4207 break;
4208 }
4209 sp<AReplyToken> replyID;
4210 CHECK(msg->senderAwaitsResponse(&replyID));
4211
4212 mReplyID = replyID;
4213 if (msg->what() == kWhatCreateInputSurface) {
4214 mCodec->initiateCreateInputSurface();
4215 } else {
4216 sp<RefBase> obj;
4217 CHECK(msg->findObject("input-surface", &obj));
4218
4219 mCodec->initiateSetInputSurface(
4220 static_cast<PersistentSurface *>(obj.get()));
4221 }
4222 break;
4223 }
4224 case kWhatStart:
4225 {
4226 if (mState == FLUSHED) {
4227 setState(STARTED);
4228 if (mHavePendingInputBuffers) {
4229 onInputBufferAvailable();
4230 mHavePendingInputBuffers = false;
4231 }
4232 mCodec->signalResume();
4233 PostReplyWithError(msg, OK);
4234 break;
4235 } else if (mState != CONFIGURED) {
4236 PostReplyWithError(msg, INVALID_OPERATION);
4237 break;
4238 }
4239
4240 if (mReplyID) {
4241 mDeferredMessages.push_back(msg);
4242 break;
4243 }
4244 sp<AReplyToken> replyID;
4245 CHECK(msg->senderAwaitsResponse(&replyID));
4246 TunnelPeekState previousState = mTunnelPeekState;
4247 if (previousState != TunnelPeekState::kLegacyMode) {
4248 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
4249 ALOGV("TunnelPeekState: %s -> %s",
4250 asString(previousState),
4251 asString(TunnelPeekState::kEnabledNoBuffer));
4252 }
4253
4254 mReplyID = replyID;
4255 setState(STARTING);
4256
4257 mCodec->initiateStart();
4258 break;
4259 }
4260
4261 case kWhatStop: {
4262 if (mReplyID) {
4263 mDeferredMessages.push_back(msg);
4264 break;
4265 }
4266 [[fallthrough]];
4267 }
4268 case kWhatRelease:
4269 {
4270 State targetState =
4271 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
4272
4273 if ((mState == RELEASING && targetState == UNINITIALIZED)
4274 || (mState == STOPPING && targetState == INITIALIZED)) {
4275 mDeferredMessages.push_back(msg);
4276 break;
4277 }
4278
4279 sp<AReplyToken> replyID;
4280 CHECK(msg->senderAwaitsResponse(&replyID));
4281
4282 sp<AMessage> asyncNotify;
4283 (void)msg->findMessage("async", &asyncNotify);
4284 // post asyncNotify if going out of scope.
4285 struct AsyncNotifyPost {
4286 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
4287 ~AsyncNotifyPost() {
4288 if (mAsyncNotify) {
4289 mAsyncNotify->post();
4290 }
4291 }
4292 void clear() { mAsyncNotify.clear(); }
4293 private:
4294 sp<AMessage> mAsyncNotify;
4295 } asyncNotifyPost{asyncNotify};
4296
4297 // already stopped/released
4298 if (mState == UNINITIALIZED && mReleasedByResourceManager) {
4299 sp<AMessage> response = new AMessage;
4300 response->setInt32("err", OK);
4301 response->postReply(replyID);
4302 break;
4303 }
4304
4305 int32_t reclaimed = 0;
4306 msg->findInt32("reclaimed", &reclaimed);
4307 if (reclaimed) {
4308 if (!mReleasedByResourceManager) {
4309 // notify the async client
4310 if (mFlags & kFlagIsAsync) {
4311 onError(DEAD_OBJECT, ACTION_CODE_FATAL);
4312 }
4313 mReleasedByResourceManager = true;
4314 }
4315
4316 int32_t force = 0;
4317 msg->findInt32("force", &force);
4318 if (!force && hasPendingBuffer()) {
4319 ALOGW("Can't reclaim codec right now due to pending buffers.");
4320
4321 // return WOULD_BLOCK to ask resource manager to retry later.
4322 sp<AMessage> response = new AMessage;
4323 response->setInt32("err", WOULD_BLOCK);
4324 response->postReply(replyID);
4325
4326 break;
4327 }
4328 }
4329
4330 bool isReleasingAllocatedComponent =
4331 (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
4332 if (!isReleasingAllocatedComponent // See 1
4333 && mState != INITIALIZED
4334 && mState != CONFIGURED && !isExecuting()) {
4335 // 1) Permit release to shut down the component if allocated.
4336 //
4337 // 2) We may be in "UNINITIALIZED" state already and
4338 // also shutdown the encoder/decoder without the
4339 // client being aware of this if media server died while
4340 // we were being stopped. The client would assume that
4341 // after stop() returned, it would be safe to call release()
4342 // and it should be in this case, no harm to allow a release()
4343 // if we're already uninitialized.
4344 sp<AMessage> response = new AMessage;
4345 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
4346 // the previous stop/release completes and then reply with OK.
4347 status_t err = mState == targetState ? OK : INVALID_OPERATION;
4348 response->setInt32("err", err);
4349 if (err == OK && targetState == UNINITIALIZED) {
4350 mComponentName.clear();
4351 }
4352 response->postReply(replyID);
4353 break;
4354 }
4355
4356 // If we're flushing, configuring or starting but
4357 // received a release request, post the reply for the pending call
4358 // first, and consider it done. The reply token will be replaced
4359 // after this, and we'll no longer be able to reply.
4360 if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
4361 // mReply is always set if in these states.
4362 postPendingRepliesAndDeferredMessages(
4363 std::string("kWhatRelease:") + stateString(mState));
4364 }
4365 // If we're stopping but received a release request, post the reply
4366 // for the pending call if necessary. Note that the reply may have been
4367 // already posted due to an error.
4368 if (mState == STOPPING && mReplyID) {
4369 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
4370 }
4371
4372 if (mFlags & kFlagSawMediaServerDie) {
4373 // It's dead, Jim. Don't expect initiateShutdown to yield
4374 // any useful results now...
4375 // Any pending reply would have been handled at kWhatError.
4376 setState(UNINITIALIZED);
4377 if (targetState == UNINITIALIZED) {
4378 mComponentName.clear();
4379 }
4380 (new AMessage)->postReply(replyID);
4381 break;
4382 }
4383
4384 // If we already have an error, component may not be able to
4385 // complete the shutdown properly. If we're stopping, post the
4386 // reply now with an error to unblock the client, client can
4387 // release after the failure (instead of ANR).
4388 if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
4389 // Any pending reply would have been handled at kWhatError.
4390 PostReplyWithError(replyID, getStickyError());
4391 break;
4392 }
4393
4394 bool forceSync = false;
4395 if (asyncNotify != nullptr && mSurface != NULL) {
4396 if (!mReleaseSurface) {
4397 uint64_t usage = 0;
4398 if (mSurface->getConsumerUsage(&usage) != OK) {
4399 usage = 0;
4400 }
4401 mReleaseSurface.reset(new ReleaseSurface(usage));
4402 }
4403 if (mSurface != mReleaseSurface->getSurface()) {
4404 status_t err = connectToSurface(mReleaseSurface->getSurface());
4405 ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
4406 if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
4407 err = mCodec->setSurface(mReleaseSurface->getSurface());
4408 ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
4409 }
4410 if (err == OK) {
4411 (void)disconnectFromSurface();
4412 mSurface = mReleaseSurface->getSurface();
4413 } else {
4414 // We were not able to switch the surface, so force
4415 // synchronous release.
4416 forceSync = true;
4417 }
4418 }
4419 }
4420
4421 if (mReplyID) {
4422 // State transition replies are handled above, so this reply
4423 // would not be related to state transition. As we are
4424 // shutting down the component, just fail the operation.
4425 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
4426 }
4427 mReplyID = replyID;
4428 setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
4429
4430 mCodec->initiateShutdown(
4431 msg->what() == kWhatStop /* keepComponentAllocated */);
4432
4433 returnBuffersToCodec(reclaimed);
4434
4435 if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
4436 pushBlankBuffersToNativeWindow(mSurface.get());
4437 }
4438
4439 if (asyncNotify != nullptr) {
4440 if (!forceSync) {
4441 mResourceManagerProxy->markClientForPendingRemoval();
4442 postPendingRepliesAndDeferredMessages("kWhatRelease:async");
4443 }
4444 asyncNotifyPost.clear();
4445 mAsyncReleaseCompleteNotification = asyncNotify;
4446 }
4447
4448 break;
4449 }
4450
4451 case kWhatDequeueInputBuffer:
4452 {
4453 sp<AReplyToken> replyID;
4454 CHECK(msg->senderAwaitsResponse(&replyID));
4455
4456 if (mFlags & kFlagIsAsync) {
4457 ALOGE("dequeueInputBuffer can't be used in async mode");
4458 PostReplyWithError(replyID, INVALID_OPERATION);
4459 break;
4460 }
4461
4462 if (mHaveInputSurface) {
4463 ALOGE("dequeueInputBuffer can't be used with input surface");
4464 PostReplyWithError(replyID, INVALID_OPERATION);
4465 break;
4466 }
4467
4468 if (handleDequeueInputBuffer(replyID, true /* new request */)) {
4469 break;
4470 }
4471
4472 int64_t timeoutUs;
4473 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
4474
4475 if (timeoutUs == 0LL) {
4476 PostReplyWithError(replyID, -EAGAIN);
4477 break;
4478 }
4479
4480 mFlags |= kFlagDequeueInputPending;
4481 mDequeueInputReplyID = replyID;
4482
4483 if (timeoutUs > 0LL) {
4484 sp<AMessage> timeoutMsg =
4485 new AMessage(kWhatDequeueInputTimedOut, this);
4486 timeoutMsg->setInt32(
4487 "generation", ++mDequeueInputTimeoutGeneration);
4488 timeoutMsg->post(timeoutUs);
4489 }
4490 break;
4491 }
4492
4493 case kWhatDequeueInputTimedOut:
4494 {
4495 int32_t generation;
4496 CHECK(msg->findInt32("generation", &generation));
4497
4498 if (generation != mDequeueInputTimeoutGeneration) {
4499 // Obsolete
4500 break;
4501 }
4502
4503 CHECK(mFlags & kFlagDequeueInputPending);
4504
4505 PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
4506
4507 mFlags &= ~kFlagDequeueInputPending;
4508 mDequeueInputReplyID = 0;
4509 break;
4510 }
4511
4512 case kWhatQueueInputBuffer:
4513 {
4514 sp<AReplyToken> replyID;
4515 CHECK(msg->senderAwaitsResponse(&replyID));
4516
4517 if (!isExecuting()) {
4518 PostReplyWithError(replyID, INVALID_OPERATION);
4519 break;
4520 } else if (mFlags & kFlagStickyError) {
4521 PostReplyWithError(replyID, getStickyError());
4522 break;
4523 }
4524
4525 status_t err = UNKNOWN_ERROR;
4526 if (!mLeftover.empty()) {
4527 mLeftover.push_back(msg);
4528 size_t index;
4529 msg->findSize("index", &index);
4530 err = handleLeftover(index);
4531 } else {
4532 err = onQueueInputBuffer(msg);
4533 }
4534
4535 PostReplyWithError(replyID, err);
4536 break;
4537 }
4538
4539 case kWhatDequeueOutputBuffer:
4540 {
4541 sp<AReplyToken> replyID;
4542 CHECK(msg->senderAwaitsResponse(&replyID));
4543
4544 if (mFlags & kFlagIsAsync) {
4545 ALOGE("dequeueOutputBuffer can't be used in async mode");
4546 PostReplyWithError(replyID, INVALID_OPERATION);
4547 break;
4548 }
4549
4550 if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
4551 break;
4552 }
4553
4554 int64_t timeoutUs;
4555 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
4556
4557 if (timeoutUs == 0LL) {
4558 PostReplyWithError(replyID, -EAGAIN);
4559 break;
4560 }
4561
4562 mFlags |= kFlagDequeueOutputPending;
4563 mDequeueOutputReplyID = replyID;
4564
4565 if (timeoutUs > 0LL) {
4566 sp<AMessage> timeoutMsg =
4567 new AMessage(kWhatDequeueOutputTimedOut, this);
4568 timeoutMsg->setInt32(
4569 "generation", ++mDequeueOutputTimeoutGeneration);
4570 timeoutMsg->post(timeoutUs);
4571 }
4572 break;
4573 }
4574
4575 case kWhatDequeueOutputTimedOut:
4576 {
4577 int32_t generation;
4578 CHECK(msg->findInt32("generation", &generation));
4579
4580 if (generation != mDequeueOutputTimeoutGeneration) {
4581 // Obsolete
4582 break;
4583 }
4584
4585 CHECK(mFlags & kFlagDequeueOutputPending);
4586
4587 PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
4588
4589 mFlags &= ~kFlagDequeueOutputPending;
4590 mDequeueOutputReplyID = 0;
4591 break;
4592 }
4593
4594 case kWhatReleaseOutputBuffer:
4595 {
4596 sp<AReplyToken> replyID;
4597 CHECK(msg->senderAwaitsResponse(&replyID));
4598
4599 if (!isExecuting()) {
4600 PostReplyWithError(replyID, INVALID_OPERATION);
4601 break;
4602 } else if (mFlags & kFlagStickyError) {
4603 PostReplyWithError(replyID, getStickyError());
4604 break;
4605 }
4606
4607 status_t err = onReleaseOutputBuffer(msg);
4608
4609 PostReplyWithError(replyID, err);
4610 break;
4611 }
4612
4613 case kWhatSignalEndOfInputStream:
4614 {
4615 if (!isExecuting() || !mHaveInputSurface) {
4616 PostReplyWithError(msg, INVALID_OPERATION);
4617 break;
4618 } else if (mFlags & kFlagStickyError) {
4619 PostReplyWithError(msg, getStickyError());
4620 break;
4621 }
4622
4623 if (mReplyID) {
4624 mDeferredMessages.push_back(msg);
4625 break;
4626 }
4627 sp<AReplyToken> replyID;
4628 CHECK(msg->senderAwaitsResponse(&replyID));
4629
4630 mReplyID = replyID;
4631 mCodec->signalEndOfInputStream();
4632 break;
4633 }
4634
4635 case kWhatGetBuffers:
4636 {
4637 sp<AReplyToken> replyID;
4638 CHECK(msg->senderAwaitsResponse(&replyID));
4639 if (!isExecuting() || (mFlags & kFlagIsAsync)) {
4640 PostReplyWithError(replyID, INVALID_OPERATION);
4641 break;
4642 } else if (mFlags & kFlagStickyError) {
4643 PostReplyWithError(replyID, getStickyError());
4644 break;
4645 }
4646
4647 int32_t portIndex;
4648 CHECK(msg->findInt32("portIndex", &portIndex));
4649
4650 Vector<sp<MediaCodecBuffer> > *dstBuffers;
4651 CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
4652
4653 dstBuffers->clear();
4654 // If we're using input surface (either non-persistent created by
4655 // createInputSurface(), or persistent set by setInputSurface()),
4656 // give the client an empty input buffers array.
4657 if (portIndex != kPortIndexInput || !mHaveInputSurface) {
4658 if (portIndex == kPortIndexInput) {
4659 mBufferChannel->getInputBufferArray(dstBuffers);
4660 } else {
4661 mBufferChannel->getOutputBufferArray(dstBuffers);
4662 }
4663 }
4664
4665 (new AMessage)->postReply(replyID);
4666 break;
4667 }
4668
4669 case kWhatFlush:
4670 {
4671 if (!isExecuting()) {
4672 PostReplyWithError(msg, INVALID_OPERATION);
4673 break;
4674 } else if (mFlags & kFlagStickyError) {
4675 PostReplyWithError(msg, getStickyError());
4676 break;
4677 }
4678
4679 if (mReplyID) {
4680 mDeferredMessages.push_back(msg);
4681 break;
4682 }
4683 sp<AReplyToken> replyID;
4684 CHECK(msg->senderAwaitsResponse(&replyID));
4685
4686 mReplyID = replyID;
4687 // TODO: skip flushing if already FLUSHED
4688 setState(FLUSHING);
4689
4690 mCodec->signalFlush();
4691 returnBuffersToCodec();
4692 TunnelPeekState previousState = mTunnelPeekState;
4693 if (previousState != TunnelPeekState::kLegacyMode) {
4694 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
4695 ALOGV("TunnelPeekState: %s -> %s",
4696 asString(previousState),
4697 asString(TunnelPeekState::kEnabledNoBuffer));
4698 }
4699 break;
4700 }
4701
4702 case kWhatGetInputFormat:
4703 case kWhatGetOutputFormat:
4704 {
4705 sp<AMessage> format =
4706 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
4707
4708 sp<AReplyToken> replyID;
4709 CHECK(msg->senderAwaitsResponse(&replyID));
4710
4711 if ((mState != CONFIGURED && mState != STARTING &&
4712 mState != STARTED && mState != FLUSHING &&
4713 mState != FLUSHED)
4714 || format == NULL) {
4715 PostReplyWithError(replyID, INVALID_OPERATION);
4716 break;
4717 } else if (mFlags & kFlagStickyError) {
4718 PostReplyWithError(replyID, getStickyError());
4719 break;
4720 }
4721
4722 sp<AMessage> response = new AMessage;
4723 response->setMessage("format", format);
4724 response->postReply(replyID);
4725 break;
4726 }
4727
4728 case kWhatRequestIDRFrame:
4729 {
4730 mCodec->signalRequestIDRFrame();
4731 break;
4732 }
4733
4734 case kWhatRequestActivityNotification:
4735 {
4736 CHECK(mActivityNotify == NULL);
4737 CHECK(msg->findMessage("notify", &mActivityNotify));
4738
4739 postActivityNotificationIfPossible();
4740 break;
4741 }
4742
4743 case kWhatGetName:
4744 {
4745 sp<AReplyToken> replyID;
4746 CHECK(msg->senderAwaitsResponse(&replyID));
4747
4748 if (mComponentName.empty()) {
4749 PostReplyWithError(replyID, INVALID_OPERATION);
4750 break;
4751 }
4752
4753 sp<AMessage> response = new AMessage;
4754 response->setString("name", mComponentName.c_str());
4755 response->postReply(replyID);
4756 break;
4757 }
4758
4759 case kWhatGetCodecInfo:
4760 {
4761 sp<AReplyToken> replyID;
4762 CHECK(msg->senderAwaitsResponse(&replyID));
4763
4764 sp<AMessage> response = new AMessage;
4765 response->setObject("codecInfo", mCodecInfo);
4766 response->postReply(replyID);
4767 break;
4768 }
4769
4770 case kWhatSetParameters:
4771 {
4772 sp<AReplyToken> replyID;
4773 CHECK(msg->senderAwaitsResponse(&replyID));
4774
4775 sp<AMessage> params;
4776 CHECK(msg->findMessage("params", ¶ms));
4777
4778 status_t err = onSetParameters(params);
4779
4780 PostReplyWithError(replyID, err);
4781 break;
4782 }
4783
4784 case kWhatDrmReleaseCrypto:
4785 {
4786 onReleaseCrypto(msg);
4787 break;
4788 }
4789
4790 case kWhatCheckBatteryStats:
4791 {
4792 if (mBatteryChecker != nullptr) {
4793 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
4794 mResourceManagerProxy->removeResource(
4795 MediaResource::VideoBatteryResource());
4796 });
4797 }
4798 break;
4799 }
4800
4801 default:
4802 TRESPASS();
4803 }
4804 }
4805
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)4806 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
4807 sp<AMessage> format = buffer->format();
4808 if (mOutputFormat == format) {
4809 return;
4810 }
4811 if (mFlags & kFlagUseBlockModel) {
4812 sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
4813 sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
4814 std::set<std::string> keys;
4815 size_t numEntries = diff1->countEntries();
4816 AMessage::Type type;
4817 for (size_t i = 0; i < numEntries; ++i) {
4818 keys.emplace(diff1->getEntryNameAt(i, &type));
4819 }
4820 numEntries = diff2->countEntries();
4821 for (size_t i = 0; i < numEntries; ++i) {
4822 keys.emplace(diff2->getEntryNameAt(i, &type));
4823 }
4824 sp<WrapperObject<std::set<std::string>>> changedKeys{
4825 new WrapperObject<std::set<std::string>>{std::move(keys)}};
4826 buffer->meta()->setObject("changedKeys", changedKeys);
4827 }
4828 mOutputFormat = format;
4829 mapFormat(mComponentName, format, nullptr, true);
4830 ALOGV("[%s] output format changed to: %s",
4831 mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
4832
4833 if (mSoftRenderer == NULL &&
4834 mSurface != NULL &&
4835 (mFlags & kFlagUsesSoftwareRenderer)) {
4836 AString mime;
4837 CHECK(mOutputFormat->findString("mime", &mime));
4838
4839 // TODO: propagate color aspects to software renderer to allow better
4840 // color conversion to RGB. For now, just mark dataspace for YUV
4841 // rendering.
4842 int32_t dataSpace;
4843 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
4844 ALOGD("[%s] setting dataspace on output surface to #%x",
4845 mComponentName.c_str(), dataSpace);
4846 int err = native_window_set_buffers_data_space(
4847 mSurface.get(), (android_dataspace)dataSpace);
4848 ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
4849 }
4850 if (mOutputFormat->contains("hdr-static-info")) {
4851 HDRStaticInfo info;
4852 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
4853 setNativeWindowHdrMetadata(mSurface.get(), &info);
4854 }
4855 }
4856
4857 sp<ABuffer> hdr10PlusInfo;
4858 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
4859 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
4860 native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
4861 hdr10PlusInfo->size(), hdr10PlusInfo->data());
4862 }
4863
4864 if (mime.startsWithIgnoreCase("video/")) {
4865 mSurface->setDequeueTimeout(-1);
4866 mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
4867 }
4868 }
4869
4870 requestCpuBoostIfNeeded();
4871
4872 if (mFlags & kFlagIsEncoder) {
4873 // Before we announce the format change we should
4874 // collect codec specific data and amend the output
4875 // format as necessary.
4876 int32_t flags = 0;
4877 (void) buffer->meta()->findInt32("flags", &flags);
4878 if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
4879 && !mOwnerName.startsWith("codec2::")) {
4880 status_t err =
4881 amendOutputFormatWithCodecSpecificData(buffer);
4882
4883 if (err != OK) {
4884 ALOGE("Codec spit out malformed codec "
4885 "specific data!");
4886 }
4887 }
4888 }
4889 if (mFlags & kFlagIsAsync) {
4890 onOutputFormatChanged();
4891 } else {
4892 mFlags |= kFlagOutputFormatChanged;
4893 postActivityNotificationIfPossible();
4894 }
4895
4896 // Notify mCrypto of video resolution changes
4897 if (mCrypto != NULL) {
4898 int32_t left, top, right, bottom, width, height;
4899 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
4900 mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
4901 } else if (mOutputFormat->findInt32("width", &width)
4902 && mOutputFormat->findInt32("height", &height)) {
4903 mCrypto->notifyResolution(width, height);
4904 }
4905 }
4906
4907 updateHdrMetrics(false /* isConfig */);
4908 }
4909
extractCSD(const sp<AMessage> & format)4910 void MediaCodec::extractCSD(const sp<AMessage> &format) {
4911 mCSD.clear();
4912
4913 size_t i = 0;
4914 for (;;) {
4915 sp<ABuffer> csd;
4916 if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
4917 break;
4918 }
4919 if (csd->size() == 0) {
4920 ALOGW("csd-%zu size is 0", i);
4921 }
4922
4923 mCSD.push_back(csd);
4924 ++i;
4925 }
4926
4927 ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
4928 }
4929
queueCSDInputBuffer(size_t bufferIndex)4930 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
4931 CHECK(!mCSD.empty());
4932
4933 sp<ABuffer> csd = *mCSD.begin();
4934 mCSD.erase(mCSD.begin());
4935 std::shared_ptr<C2Buffer> c2Buffer;
4936 sp<hardware::HidlMemory> memory;
4937
4938 if (mFlags & kFlagUseBlockModel) {
4939 if (hasCryptoOrDescrambler()) {
4940 constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
4941 thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
4942 kInitialDealerCapacity, "CSD(1MB)");
4943 sp<IMemory> mem = sDealer->allocate(csd->size());
4944 if (mem == nullptr) {
4945 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
4946 while (csd->size() * 2 > newDealerCapacity) {
4947 newDealerCapacity *= 2;
4948 }
4949 sDealer = new MemoryDealer(
4950 newDealerCapacity,
4951 AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
4952 mem = sDealer->allocate(csd->size());
4953 }
4954 memcpy(mem->unsecurePointer(), csd->data(), csd->size());
4955 ssize_t heapOffset;
4956 memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
4957 } else {
4958 std::shared_ptr<C2LinearBlock> block =
4959 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
4960 C2WriteView view{block->map().get()};
4961 if (view.error() != C2_OK) {
4962 return -EINVAL;
4963 }
4964 if (csd->size() > view.capacity()) {
4965 return -EINVAL;
4966 }
4967 memcpy(view.base(), csd->data(), csd->size());
4968 c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
4969 }
4970 } else {
4971 const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
4972 const sp<MediaCodecBuffer> &codecInputData = info.mData;
4973
4974 if (csd->size() > codecInputData->capacity()) {
4975 return -EINVAL;
4976 }
4977 if (codecInputData->data() == NULL) {
4978 ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
4979 return -EINVAL;
4980 }
4981
4982 memcpy(codecInputData->data(), csd->data(), csd->size());
4983 }
4984
4985 AString errorDetailMsg;
4986
4987 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
4988 msg->setSize("index", bufferIndex);
4989 msg->setSize("offset", 0);
4990 msg->setSize("size", csd->size());
4991 msg->setInt64("timeUs", 0LL);
4992 msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
4993 msg->setPointer("errorDetailMsg", &errorDetailMsg);
4994 if (c2Buffer) {
4995 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
4996 new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
4997 msg->setObject("c2buffer", obj);
4998 } else if (memory) {
4999 sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
5000 new WrapperObject<sp<hardware::HidlMemory>>{memory}};
5001 msg->setObject("memory", obj);
5002 }
5003
5004 return onQueueInputBuffer(msg);
5005 }
5006
setState(State newState)5007 void MediaCodec::setState(State newState) {
5008 if (newState == INITIALIZED || newState == UNINITIALIZED) {
5009 delete mSoftRenderer;
5010 mSoftRenderer = NULL;
5011
5012 if ( mCrypto != NULL ) {
5013 ALOGV("setState: ~mCrypto: %p (%d)",
5014 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
5015 }
5016 mCrypto.clear();
5017 mDescrambler.clear();
5018 handleSetSurface(NULL);
5019
5020 mInputFormat.clear();
5021 mOutputFormat.clear();
5022 mFlags &= ~kFlagOutputFormatChanged;
5023 mFlags &= ~kFlagOutputBuffersChanged;
5024 mFlags &= ~kFlagStickyError;
5025 mFlags &= ~kFlagIsEncoder;
5026 mFlags &= ~kFlagIsAsync;
5027 mStickyError = OK;
5028
5029 mActivityNotify.clear();
5030 mCallback.clear();
5031 }
5032
5033 if (newState == UNINITIALIZED) {
5034 // return any straggling buffers, e.g. if we got here on an error
5035 returnBuffersToCodec();
5036
5037 // The component is gone, mediaserver's probably back up already
5038 // but should definitely be back up should we try to instantiate
5039 // another component.. and the cycle continues.
5040 mFlags &= ~kFlagSawMediaServerDie;
5041 }
5042
5043 mState = newState;
5044
5045 if (mBatteryChecker != nullptr) {
5046 mBatteryChecker->setExecuting(isExecuting());
5047 }
5048
5049 cancelPendingDequeueOperations();
5050 }
5051
returnBuffersToCodec(bool isReclaim)5052 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
5053 returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
5054 returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
5055 }
5056
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)5057 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
5058 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5059 Mutex::Autolock al(mBufferLock);
5060
5061 if (portIndex == kPortIndexInput) {
5062 mLeftover.clear();
5063 }
5064 for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
5065 BufferInfo *info = &mPortBuffers[portIndex][i];
5066
5067 if (info->mData != nullptr) {
5068 sp<MediaCodecBuffer> buffer = info->mData;
5069 if (isReclaim && info->mOwnedByClient) {
5070 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
5071 portIndex, i);
5072 } else {
5073 info->mOwnedByClient = false;
5074 info->mData.clear();
5075 }
5076 mBufferChannel->discardBuffer(buffer);
5077 }
5078 }
5079
5080 mAvailPortBuffers[portIndex].clear();
5081 }
5082
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)5083 size_t MediaCodec::updateBuffers(
5084 int32_t portIndex, const sp<AMessage> &msg) {
5085 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5086 size_t index;
5087 CHECK(msg->findSize("index", &index));
5088 sp<RefBase> obj;
5089 CHECK(msg->findObject("buffer", &obj));
5090 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
5091
5092 {
5093 Mutex::Autolock al(mBufferLock);
5094 if (mPortBuffers[portIndex].size() <= index) {
5095 mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
5096 }
5097 mPortBuffers[portIndex][index].mData = buffer;
5098 }
5099 mAvailPortBuffers[portIndex].push_back(index);
5100
5101 return index;
5102 }
5103
onQueueInputBuffer(const sp<AMessage> & msg)5104 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
5105 size_t index;
5106 size_t offset;
5107 size_t size;
5108 int64_t timeUs;
5109 uint32_t flags;
5110 CHECK(msg->findSize("index", &index));
5111 CHECK(msg->findInt64("timeUs", &timeUs));
5112 CHECK(msg->findInt32("flags", (int32_t *)&flags));
5113 std::shared_ptr<C2Buffer> c2Buffer;
5114 sp<hardware::HidlMemory> memory;
5115 sp<RefBase> obj;
5116 if (msg->findObject("c2buffer", &obj)) {
5117 CHECK(obj);
5118 c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
5119 } else if (msg->findObject("memory", &obj)) {
5120 CHECK(obj);
5121 memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
5122 CHECK(msg->findSize("offset", &offset));
5123 } else {
5124 CHECK(msg->findSize("offset", &offset));
5125 }
5126 const CryptoPlugin::SubSample *subSamples;
5127 size_t numSubSamples;
5128 const uint8_t *key = NULL;
5129 const uint8_t *iv = NULL;
5130 CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
5131
5132 // We allow the simpler queueInputBuffer API to be used even in
5133 // secure mode, by fabricating a single unencrypted subSample.
5134 CryptoPlugin::SubSample ss;
5135 CryptoPlugin::Pattern pattern;
5136
5137 if (msg->findSize("size", &size)) {
5138 if (hasCryptoOrDescrambler()) {
5139 ss.mNumBytesOfClearData = size;
5140 ss.mNumBytesOfEncryptedData = 0;
5141
5142 subSamples = &ss;
5143 numSubSamples = 1;
5144 pattern.mEncryptBlocks = 0;
5145 pattern.mSkipBlocks = 0;
5146 }
5147 } else if (!c2Buffer) {
5148 if (!hasCryptoOrDescrambler()) {
5149 ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
5150 mComponentName.c_str());
5151 return -EINVAL;
5152 }
5153
5154 CHECK(msg->findPointer("subSamples", (void **)&subSamples));
5155 CHECK(msg->findSize("numSubSamples", &numSubSamples));
5156 CHECK(msg->findPointer("key", (void **)&key));
5157 CHECK(msg->findPointer("iv", (void **)&iv));
5158 CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
5159 CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
5160
5161 int32_t tmp;
5162 CHECK(msg->findInt32("mode", &tmp));
5163
5164 mode = (CryptoPlugin::Mode)tmp;
5165
5166 size = 0;
5167 for (size_t i = 0; i < numSubSamples; ++i) {
5168 size += subSamples[i].mNumBytesOfClearData;
5169 size += subSamples[i].mNumBytesOfEncryptedData;
5170 }
5171 }
5172
5173 if (index >= mPortBuffers[kPortIndexInput].size()) {
5174 return -ERANGE;
5175 }
5176
5177 BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
5178 sp<MediaCodecBuffer> buffer = info->mData;
5179
5180 if (c2Buffer || memory) {
5181 sp<AMessage> tunings = NULL;
5182 if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
5183 onSetParameters(tunings);
5184 }
5185
5186 status_t err = OK;
5187 if (c2Buffer) {
5188 err = mBufferChannel->attachBuffer(c2Buffer, buffer);
5189 } else if (memory) {
5190 err = mBufferChannel->attachEncryptedBuffer(
5191 memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
5192 offset, subSamples, numSubSamples, buffer);
5193 } else {
5194 err = UNKNOWN_ERROR;
5195 }
5196
5197 if (err == OK && !buffer->asC2Buffer()
5198 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
5199 C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
5200 if (block.size() > buffer->size()) {
5201 C2ConstLinearBlock leftover = block.subBlock(
5202 block.offset() + buffer->size(), block.size() - buffer->size());
5203 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
5204 new WrapperObject<std::shared_ptr<C2Buffer>>{
5205 C2Buffer::CreateLinearBuffer(leftover)}};
5206 msg->setObject("c2buffer", obj);
5207 mLeftover.push_front(msg);
5208 // Not sending EOS if we have leftovers
5209 flags &= ~BUFFER_FLAG_EOS;
5210 }
5211 }
5212
5213 offset = buffer->offset();
5214 size = buffer->size();
5215 if (err != OK) {
5216 ALOGI("block model buffer attach failed: err = %s (%d)",
5217 StrMediaError(err).c_str(), err);
5218 return err;
5219 }
5220 }
5221
5222 if (buffer == nullptr || !info->mOwnedByClient) {
5223 return -EACCES;
5224 }
5225
5226 if (offset + size > buffer->capacity()) {
5227 return -EINVAL;
5228 }
5229
5230 buffer->setRange(offset, size);
5231 buffer->meta()->setInt64("timeUs", timeUs);
5232 if (flags & BUFFER_FLAG_EOS) {
5233 buffer->meta()->setInt32("eos", true);
5234 }
5235
5236 if (flags & BUFFER_FLAG_CODECCONFIG) {
5237 buffer->meta()->setInt32("csd", true);
5238 }
5239
5240 if (mTunneled) {
5241 TunnelPeekState previousState = mTunnelPeekState;
5242 switch(mTunnelPeekState){
5243 case TunnelPeekState::kEnabledNoBuffer:
5244 buffer->meta()->setInt32("tunnel-first-frame", 1);
5245 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
5246 ALOGV("TunnelPeekState: %s -> %s",
5247 asString(previousState),
5248 asString(mTunnelPeekState));
5249 break;
5250 case TunnelPeekState::kDisabledNoBuffer:
5251 buffer->meta()->setInt32("tunnel-first-frame", 1);
5252 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
5253 ALOGV("TunnelPeekState: %s -> %s",
5254 asString(previousState),
5255 asString(mTunnelPeekState));
5256 break;
5257 default:
5258 break;
5259 }
5260 }
5261
5262 status_t err = OK;
5263 if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
5264 AString *errorDetailMsg;
5265 CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
5266 // Notify mCrypto of video resolution changes
5267 if (mTunneled && mCrypto != NULL) {
5268 int32_t width, height;
5269 if (mInputFormat->findInt32("width", &width) &&
5270 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
5271 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
5272 mTunneledInputWidth = width;
5273 mTunneledInputHeight = height;
5274 mCrypto->notifyResolution(width, height);
5275 }
5276 }
5277 }
5278 err = mBufferChannel->queueSecureInputBuffer(
5279 buffer,
5280 (mFlags & kFlagIsSecure),
5281 key,
5282 iv,
5283 mode,
5284 pattern,
5285 subSamples,
5286 numSubSamples,
5287 errorDetailMsg);
5288 if (err != OK) {
5289 mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
5290 ALOGW("Log queueSecureInputBuffer error: %d", err);
5291 }
5292 } else {
5293 err = mBufferChannel->queueInputBuffer(buffer);
5294 if (err != OK) {
5295 mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
5296 ALOGW("Log queueInputBuffer error: %d", err);
5297 }
5298 }
5299
5300 if (err == OK) {
5301 // synchronization boundary for getBufferAndFormat
5302 Mutex::Autolock al(mBufferLock);
5303 info->mOwnedByClient = false;
5304 info->mData.clear();
5305
5306 statsBufferSent(timeUs, buffer);
5307 }
5308
5309 return err;
5310 }
5311
handleLeftover(size_t index)5312 status_t MediaCodec::handleLeftover(size_t index) {
5313 if (mLeftover.empty()) {
5314 return OK;
5315 }
5316 sp<AMessage> msg = mLeftover.front();
5317 mLeftover.pop_front();
5318 msg->setSize("index", index);
5319 return onQueueInputBuffer(msg);
5320 }
5321
5322 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)5323 size_t MediaCodec::CreateFramesRenderedMessage(
5324 const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
5325 size_t index = 0;
5326
5327 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
5328 it != done.cend(); ++it) {
5329 if (it->getRenderTimeNs() < 0) {
5330 continue; // dropped frame from tracking
5331 }
5332 msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
5333 msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
5334 ++index;
5335 }
5336 return index;
5337 }
5338
onReleaseOutputBuffer(const sp<AMessage> & msg)5339 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
5340 size_t index;
5341 CHECK(msg->findSize("index", &index));
5342
5343 int32_t render;
5344 if (!msg->findInt32("render", &render)) {
5345 render = 0;
5346 }
5347
5348 if (!isExecuting()) {
5349 return -EINVAL;
5350 }
5351
5352 if (index >= mPortBuffers[kPortIndexOutput].size()) {
5353 return -ERANGE;
5354 }
5355
5356 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
5357
5358 if (info->mData == nullptr || !info->mOwnedByClient) {
5359 return -EACCES;
5360 }
5361
5362 // synchronization boundary for getBufferAndFormat
5363 sp<MediaCodecBuffer> buffer;
5364 {
5365 Mutex::Autolock al(mBufferLock);
5366 info->mOwnedByClient = false;
5367 buffer = info->mData;
5368 info->mData.clear();
5369 }
5370
5371 if (render && buffer->size() != 0) {
5372 int64_t mediaTimeUs = -1;
5373 buffer->meta()->findInt64("timeUs", &mediaTimeUs);
5374
5375 int64_t renderTimeNs = 0;
5376 if (!msg->findInt64("timestampNs", &renderTimeNs)) {
5377 // use media timestamp if client did not request a specific render timestamp
5378 ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
5379 renderTimeNs = mediaTimeUs * 1000;
5380 }
5381
5382 if (mSoftRenderer != NULL) {
5383 std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
5384 buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
5385 mPortBuffers[kPortIndexOutput].size(), buffer->format());
5386
5387 // if we are running, notify rendered frames
5388 if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
5389 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
5390 sp<AMessage> data = new AMessage;
5391 if (CreateFramesRenderedMessage(doneFrames, data)) {
5392 notify->setMessage("data", data);
5393 notify->post();
5394 }
5395 }
5396 }
5397 status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
5398
5399 if (err == NO_INIT) {
5400 ALOGE("rendering to non-initilized(obsolete) surface");
5401 return err;
5402 }
5403 if (err != OK) {
5404 ALOGI("rendring output error %d", err);
5405 }
5406 } else {
5407 mBufferChannel->discardBuffer(buffer);
5408 }
5409
5410 return OK;
5411 }
5412
peekNextPortBuffer(int32_t portIndex)5413 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
5414 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5415
5416 List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
5417
5418 if (availBuffers->empty()) {
5419 return nullptr;
5420 }
5421
5422 return &mPortBuffers[portIndex][*availBuffers->begin()];
5423 }
5424
dequeuePortBuffer(int32_t portIndex)5425 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
5426 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5427
5428 BufferInfo *info = peekNextPortBuffer(portIndex);
5429 if (!info) {
5430 return -EAGAIN;
5431 }
5432
5433 List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
5434 size_t index = *availBuffers->begin();
5435 CHECK_EQ(info, &mPortBuffers[portIndex][index]);
5436 availBuffers->erase(availBuffers->begin());
5437
5438 CHECK(!info->mOwnedByClient);
5439 {
5440 Mutex::Autolock al(mBufferLock);
5441 info->mOwnedByClient = true;
5442
5443 // set image-data
5444 if (info->mData->format() != NULL) {
5445 sp<ABuffer> imageData;
5446 if (info->mData->format()->findBuffer("image-data", &imageData)) {
5447 info->mData->meta()->setBuffer("image-data", imageData);
5448 }
5449 int32_t left, top, right, bottom;
5450 if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
5451 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
5452 }
5453 }
5454 }
5455
5456 return index;
5457 }
5458
connectToSurface(const sp<Surface> & surface)5459 status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {
5460 status_t err = OK;
5461 if (surface != NULL) {
5462 uint64_t oldId, newId;
5463 if (mSurface != NULL
5464 && surface->getUniqueId(&newId) == NO_ERROR
5465 && mSurface->getUniqueId(&oldId) == NO_ERROR
5466 && newId == oldId) {
5467 ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
5468 return ALREADY_EXISTS;
5469 }
5470
5471 // in case we don't connect, ensure that we don't signal the surface is
5472 // connected to the screen
5473 mIsSurfaceToScreen = false;
5474
5475 err = nativeWindowConnect(surface.get(), "connectToSurface");
5476 if (err == OK) {
5477 // Require a fresh set of buffers after each connect by using a unique generation
5478 // number. Rely on the fact that max supported process id by Linux is 2^22.
5479 // PID is never 0 so we don't have to worry that we use the default generation of 0.
5480 // TODO: come up with a unique scheme if other producers also set the generation number.
5481 static uint32_t mSurfaceGeneration = 0;
5482 uint32_t generation = (getpid() << 10) | (++mSurfaceGeneration & ((1 << 10) - 1));
5483 surface->setGenerationNumber(generation);
5484 ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), generation);
5485
5486 // HACK: clear any free buffers. Remove when connect will automatically do this.
5487 // This is needed as the consumer may be holding onto stale frames that it can reattach
5488 // to this surface after disconnect/connect, and those free frames would inherit the new
5489 // generation number. Disconnecting after setting a unique generation prevents this.
5490 nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
5491 err = nativeWindowConnect(surface.get(), "connectToSurface(reconnect)");
5492 }
5493
5494 if (err != OK) {
5495 ALOGE("nativeWindowConnect returned an error: %s (%d)", strerror(-err), err);
5496 } else {
5497 if (!mAllowFrameDroppingBySurface) {
5498 disableLegacyBufferDropPostQ(surface);
5499 }
5500 // keep track whether or not the buffers of the connected surface go to the screen
5501 int result = 0;
5502 surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
5503 mIsSurfaceToScreen = result != 0;
5504 }
5505 }
5506 // do not return ALREADY_EXISTS unless surfaces are the same
5507 return err == ALREADY_EXISTS ? BAD_VALUE : err;
5508 }
5509
disconnectFromSurface()5510 status_t MediaCodec::disconnectFromSurface() {
5511 status_t err = OK;
5512 if (mSurface != NULL) {
5513 // Resetting generation is not technically needed, but there is no need to keep it either
5514 mSurface->setGenerationNumber(0);
5515 err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
5516 if (err != OK) {
5517 ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
5518 }
5519 // assume disconnected even on error
5520 mSurface.clear();
5521 mIsSurfaceToScreen = false;
5522 }
5523 return err;
5524 }
5525
handleSetSurface(const sp<Surface> & surface)5526 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
5527 status_t err = OK;
5528 if (mSurface != NULL) {
5529 (void)disconnectFromSurface();
5530 }
5531 if (surface != NULL) {
5532 err = connectToSurface(surface);
5533 if (err == OK) {
5534 mSurface = surface;
5535 }
5536 }
5537 return err;
5538 }
5539
onInputBufferAvailable()5540 void MediaCodec::onInputBufferAvailable() {
5541 int32_t index;
5542 while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
5543 sp<AMessage> msg = mCallback->dup();
5544 msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
5545 msg->setInt32("index", index);
5546 msg->post();
5547 }
5548 }
5549
onOutputBufferAvailable()5550 void MediaCodec::onOutputBufferAvailable() {
5551 int32_t index;
5552 while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
5553 const sp<MediaCodecBuffer> &buffer =
5554 mPortBuffers[kPortIndexOutput][index].mData;
5555 sp<AMessage> msg = mCallback->dup();
5556 msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
5557 msg->setInt32("index", index);
5558 msg->setSize("offset", buffer->offset());
5559 msg->setSize("size", buffer->size());
5560
5561 int64_t timeUs;
5562 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
5563
5564 msg->setInt64("timeUs", timeUs);
5565
5566 int32_t flags;
5567 CHECK(buffer->meta()->findInt32("flags", &flags));
5568
5569 msg->setInt32("flags", flags);
5570
5571 statsBufferReceived(timeUs, buffer);
5572
5573 msg->post();
5574 }
5575 }
5576
onError(status_t err,int32_t actionCode,const char * detail)5577 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
5578 if (mCallback != NULL) {
5579 sp<AMessage> msg = mCallback->dup();
5580 msg->setInt32("callbackID", CB_ERROR);
5581 msg->setInt32("err", err);
5582 msg->setInt32("actionCode", actionCode);
5583
5584 if (detail != NULL) {
5585 msg->setString("detail", detail);
5586 }
5587
5588 msg->post();
5589 }
5590 }
5591
onOutputFormatChanged()5592 void MediaCodec::onOutputFormatChanged() {
5593 if (mCallback != NULL) {
5594 sp<AMessage> msg = mCallback->dup();
5595 msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
5596 msg->setMessage("format", mOutputFormat);
5597 msg->post();
5598 }
5599 }
5600
postActivityNotificationIfPossible()5601 void MediaCodec::postActivityNotificationIfPossible() {
5602 if (mActivityNotify == NULL) {
5603 return;
5604 }
5605
5606 bool isErrorOrOutputChanged =
5607 (mFlags & (kFlagStickyError
5608 | kFlagOutputBuffersChanged
5609 | kFlagOutputFormatChanged));
5610
5611 if (isErrorOrOutputChanged
5612 || !mAvailPortBuffers[kPortIndexInput].empty()
5613 || !mAvailPortBuffers[kPortIndexOutput].empty()) {
5614 mActivityNotify->setInt32("input-buffers",
5615 mAvailPortBuffers[kPortIndexInput].size());
5616
5617 if (isErrorOrOutputChanged) {
5618 // we want consumer to dequeue as many times as it can
5619 mActivityNotify->setInt32("output-buffers", INT32_MAX);
5620 } else {
5621 mActivityNotify->setInt32("output-buffers",
5622 mAvailPortBuffers[kPortIndexOutput].size());
5623 }
5624 mActivityNotify->post();
5625 mActivityNotify.clear();
5626 }
5627 }
5628
setParameters(const sp<AMessage> & params)5629 status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) {
5630 sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
5631 msg->setMessage("params", params);
5632
5633 sp<AMessage> response;
5634 return PostAndAwaitResponse(msg, &response);
5635 }
5636
onSetParameters(const sp<AMessage> & params)5637 status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) {
5638 updateLowLatency(params);
5639 mapFormat(mComponentName, params, nullptr, false);
5640 updateTunnelPeek(params);
5641 mCodec->signalSetParameters(params);
5642
5643 return OK;
5644 }
5645
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)5646 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
5647 const sp<MediaCodecBuffer> &buffer) {
5648 AString mime;
5649 CHECK(mOutputFormat->findString("mime", &mime));
5650
5651 if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
5652 // Codec specific data should be SPS and PPS in a single buffer,
5653 // each prefixed by a startcode (0x00 0x00 0x00 0x01).
5654 // We separate the two and put them into the output format
5655 // under the keys "csd-0" and "csd-1".
5656
5657 unsigned csdIndex = 0;
5658
5659 const uint8_t *data = buffer->data();
5660 size_t size = buffer->size();
5661
5662 const uint8_t *nalStart;
5663 size_t nalSize;
5664 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
5665 sp<ABuffer> csd = new ABuffer(nalSize + 4);
5666 memcpy(csd->data(), "\x00\x00\x00\x01", 4);
5667 memcpy(csd->data() + 4, nalStart, nalSize);
5668
5669 mOutputFormat->setBuffer(
5670 AStringPrintf("csd-%u", csdIndex).c_str(), csd);
5671
5672 ++csdIndex;
5673 }
5674
5675 if (csdIndex != 2) {
5676 return ERROR_MALFORMED;
5677 }
5678 } else {
5679 // For everything else we just stash the codec specific data into
5680 // the output format as a single piece of csd under "csd-0".
5681 sp<ABuffer> csd = new ABuffer(buffer->size());
5682 memcpy(csd->data(), buffer->data(), buffer->size());
5683 csd->setRange(0, buffer->size());
5684 mOutputFormat->setBuffer("csd-0", csd);
5685 }
5686
5687 return OK;
5688 }
5689
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)5690 void MediaCodec::postPendingRepliesAndDeferredMessages(
5691 std::string origin, status_t err /* = OK */) {
5692 sp<AMessage> response{new AMessage};
5693 if (err != OK) {
5694 response->setInt32("err", err);
5695 }
5696 postPendingRepliesAndDeferredMessages(origin, response);
5697 }
5698
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)5699 void MediaCodec::postPendingRepliesAndDeferredMessages(
5700 std::string origin, const sp<AMessage> &response) {
5701 LOG_ALWAYS_FATAL_IF(
5702 !mReplyID,
5703 "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
5704 origin.c_str(),
5705 mLastReplyOrigin.c_str());
5706 mLastReplyOrigin = origin;
5707 response->postReply(mReplyID);
5708 mReplyID.clear();
5709 ALOGV_IF(!mDeferredMessages.empty(),
5710 "posting %zu deferred messages", mDeferredMessages.size());
5711 for (sp<AMessage> msg : mDeferredMessages) {
5712 msg->post();
5713 }
5714 mDeferredMessages.clear();
5715 }
5716
stateString(State state)5717 std::string MediaCodec::stateString(State state) {
5718 const char *rval = NULL;
5719 char rawbuffer[16]; // room for "%d"
5720
5721 switch (state) {
5722 case UNINITIALIZED: rval = "UNINITIALIZED"; break;
5723 case INITIALIZING: rval = "INITIALIZING"; break;
5724 case INITIALIZED: rval = "INITIALIZED"; break;
5725 case CONFIGURING: rval = "CONFIGURING"; break;
5726 case CONFIGURED: rval = "CONFIGURED"; break;
5727 case STARTING: rval = "STARTING"; break;
5728 case STARTED: rval = "STARTED"; break;
5729 case FLUSHING: rval = "FLUSHING"; break;
5730 case FLUSHED: rval = "FLUSHED"; break;
5731 case STOPPING: rval = "STOPPING"; break;
5732 case RELEASING: rval = "RELEASING"; break;
5733 default:
5734 snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
5735 rval = rawbuffer;
5736 break;
5737 }
5738 return rval;
5739 }
5740
5741 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)5742 status_t MediaCodec::CanFetchLinearBlock(
5743 const std::vector<std::string> &names, bool *isCompatible) {
5744 *isCompatible = false;
5745 if (names.size() == 0) {
5746 *isCompatible = true;
5747 return OK;
5748 }
5749 const CodecListCache &cache = GetCodecListCache();
5750 for (const std::string &name : names) {
5751 auto it = cache.mCodecInfoMap.find(name);
5752 if (it == cache.mCodecInfoMap.end()) {
5753 return NAME_NOT_FOUND;
5754 }
5755 const char *owner = it->second->getOwnerName();
5756 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
5757 *isCompatible = false;
5758 return OK;
5759 } else if (strncmp(owner, "codec2::", 8) != 0) {
5760 return NAME_NOT_FOUND;
5761 }
5762 }
5763 return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
5764 }
5765
5766 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)5767 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
5768 size_t capacity, const std::vector<std::string> &names) {
5769 return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
5770 }
5771
5772 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)5773 status_t MediaCodec::CanFetchGraphicBlock(
5774 const std::vector<std::string> &names, bool *isCompatible) {
5775 *isCompatible = false;
5776 if (names.size() == 0) {
5777 *isCompatible = true;
5778 return OK;
5779 }
5780 const CodecListCache &cache = GetCodecListCache();
5781 for (const std::string &name : names) {
5782 auto it = cache.mCodecInfoMap.find(name);
5783 if (it == cache.mCodecInfoMap.end()) {
5784 return NAME_NOT_FOUND;
5785 }
5786 const char *owner = it->second->getOwnerName();
5787 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
5788 *isCompatible = false;
5789 return OK;
5790 } else if (strncmp(owner, "codec2.", 7) != 0) {
5791 return NAME_NOT_FOUND;
5792 }
5793 }
5794 return CCodec::CanFetchGraphicBlock(names, isCompatible);
5795 }
5796
5797 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)5798 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
5799 int32_t width,
5800 int32_t height,
5801 int32_t format,
5802 uint64_t usage,
5803 const std::vector<std::string> &names) {
5804 return CCodec::FetchGraphicBlock(width, height, format, usage, names);
5805 }
5806
5807 } // namespace android
5808