1 /*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "ACodec"
19
20 #ifdef __LP64__
21 #define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
22 #endif
23
24 #include <inttypes.h>
25 #include <utils/Trace.h>
26
27 #include <gui/Surface.h>
28
29 #include <media/stagefright/ACodec.h>
30
31 #include <binder/MemoryDealer.h>
32
33 #include <media/stagefright/foundation/hexdump.h>
34 #include <media/stagefright/foundation/ABuffer.h>
35 #include <media/stagefright/foundation/ADebug.h>
36 #include <media/stagefright/foundation/AMessage.h>
37 #include <media/stagefright/foundation/AUtils.h>
38
39 #include <media/stagefright/BufferProducerWrapper.h>
40 #include <media/stagefright/MediaCodec.h>
41 #include <media/stagefright/MediaCodecList.h>
42 #include <media/stagefright/MediaDefs.h>
43 #include <media/stagefright/OMXClient.h>
44 #include <media/stagefright/PersistentSurface.h>
45 #include <media/stagefright/SurfaceUtils.h>
46 #include <media/hardware/HardwareAPI.h>
47
48 #include <OMX_AudioExt.h>
49 #include <OMX_VideoExt.h>
50 #include <OMX_Component.h>
51 #include <OMX_IndexExt.h>
52 #include <OMX_AsString.h>
53
54 #include "include/avc_utils.h"
55 #include "include/DataConverter.h"
56 #include "omx/OMXUtils.h"
57
58 namespace android {
59
60 enum {
61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
62 };
63
64 // OMX errors are directly mapped into status_t range if
65 // there is no corresponding MediaError status code.
66 // Use the statusFromOMXError(int32_t omxError) function.
67 //
68 // Currently this is a direct map.
69 // See frameworks/native/include/media/openmax/OMX_Core.h
70 //
71 // Vendor OMX errors from 0x90000000 - 0x9000FFFF
72 // Extension OMX errors from 0x8F000000 - 0x90000000
73 // Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current)
74 //
75
76 // returns true if err is a recognized OMX error code.
77 // as OMX error is OMX_S32, this is an int32_t type
isOMXError(int32_t err)78 static inline bool isOMXError(int32_t err) {
79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX);
80 }
81
82 // converts an OMX error to a status_t
statusFromOMXError(int32_t omxError)83 static inline status_t statusFromOMXError(int32_t omxError) {
84 switch (omxError) {
85 case OMX_ErrorInvalidComponentName:
86 case OMX_ErrorComponentNotFound:
87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names.
88 default:
89 return isOMXError(omxError) ? omxError : 0; // no translation required
90 }
91 }
92
93 // checks and converts status_t to a non-side-effect status_t
makeNoSideEffectStatus(status_t err)94 static inline status_t makeNoSideEffectStatus(status_t err) {
95 switch (err) {
96 // the following errors have side effects and may come
97 // from other code modules. Remap for safety reasons.
98 case INVALID_OPERATION:
99 case DEAD_OBJECT:
100 return UNKNOWN_ERROR;
101 default:
102 return err;
103 }
104 }
105
106 struct MessageList : public RefBase {
MessageListandroid::MessageList107 MessageList() {
108 }
~MessageListandroid::MessageList109 virtual ~MessageList() {
110 }
getListandroid::MessageList111 std::list<sp<AMessage> > &getList() { return mList; }
112 private:
113 std::list<sp<AMessage> > mList;
114
115 DISALLOW_EVIL_CONSTRUCTORS(MessageList);
116 };
117
getCopyConverter()118 static sp<DataConverter> getCopyConverter() {
119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited
120 static sp<DataConverter> sCopyConverter; // zero-inited
121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); });
122 return sCopyConverter;
123 }
124
125 struct CodecObserver : public BnOMXObserver {
CodecObserverandroid::CodecObserver126 CodecObserver() {}
127
setNotificationMessageandroid::CodecObserver128 void setNotificationMessage(const sp<AMessage> &msg) {
129 mNotify = msg;
130 }
131
132 // from IOMXObserver
onMessagesandroid::CodecObserver133 virtual void onMessages(const std::list<omx_message> &messages) {
134 if (messages.empty()) {
135 return;
136 }
137
138 sp<AMessage> notify = mNotify->dup();
139 bool first = true;
140 sp<MessageList> msgList = new MessageList();
141 for (std::list<omx_message>::const_iterator it = messages.cbegin();
142 it != messages.cend(); ++it) {
143 const omx_message &omx_msg = *it;
144 if (first) {
145 notify->setInt32("node", omx_msg.node);
146 first = false;
147 }
148
149 sp<AMessage> msg = new AMessage;
150 msg->setInt32("type", omx_msg.type);
151 switch (omx_msg.type) {
152 case omx_message::EVENT:
153 {
154 msg->setInt32("event", omx_msg.u.event_data.event);
155 msg->setInt32("data1", omx_msg.u.event_data.data1);
156 msg->setInt32("data2", omx_msg.u.event_data.data2);
157 break;
158 }
159
160 case omx_message::EMPTY_BUFFER_DONE:
161 {
162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
163 msg->setInt32("fence_fd", omx_msg.fenceFd);
164 break;
165 }
166
167 case omx_message::FILL_BUFFER_DONE:
168 {
169 msg->setInt32(
170 "buffer", omx_msg.u.extended_buffer_data.buffer);
171 msg->setInt32(
172 "range_offset",
173 omx_msg.u.extended_buffer_data.range_offset);
174 msg->setInt32(
175 "range_length",
176 omx_msg.u.extended_buffer_data.range_length);
177 msg->setInt32(
178 "flags",
179 omx_msg.u.extended_buffer_data.flags);
180 msg->setInt64(
181 "timestamp",
182 omx_msg.u.extended_buffer_data.timestamp);
183 msg->setInt32(
184 "fence_fd", omx_msg.fenceFd);
185 break;
186 }
187
188 case omx_message::FRAME_RENDERED:
189 {
190 msg->setInt64(
191 "media_time_us", omx_msg.u.render_data.timestamp);
192 msg->setInt64(
193 "system_nano", omx_msg.u.render_data.nanoTime);
194 break;
195 }
196
197 default:
198 ALOGE("Unrecognized message type: %d", omx_msg.type);
199 break;
200 }
201 msgList->getList().push_back(msg);
202 }
203 notify->setObject("messages", msgList);
204 notify->post();
205 }
206
207 protected:
~CodecObserverandroid::CodecObserver208 virtual ~CodecObserver() {}
209
210 private:
211 sp<AMessage> mNotify;
212
213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
214 };
215
216 ////////////////////////////////////////////////////////////////////////////////
217
218 struct ACodec::BaseState : public AState {
219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL);
220
221 protected:
222 enum PortMode {
223 KEEP_BUFFERS,
224 RESUBMIT_BUFFERS,
225 FREE_BUFFERS,
226 };
227
228 ACodec *mCodec;
229
230 virtual PortMode getPortMode(OMX_U32 portIndex);
231
232 virtual bool onMessageReceived(const sp<AMessage> &msg);
233
234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
235
236 virtual void onOutputBufferDrained(const sp<AMessage> &msg);
237 virtual void onInputBufferFilled(const sp<AMessage> &msg);
238
239 void postFillThisBuffer(BufferInfo *info);
240
241 private:
242 // Handles an OMX message. Returns true iff message was handled.
243 bool onOMXMessage(const sp<AMessage> &msg);
244
245 // Handles a list of messages. Returns true iff messages were handled.
246 bool onOMXMessageList(const sp<AMessage> &msg);
247
248 // returns true iff this message is for this component and the component is alive
249 bool checkOMXMessage(const sp<AMessage> &msg);
250
251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd);
252
253 bool onOMXFillBufferDone(
254 IOMX::buffer_id bufferID,
255 size_t rangeOffset, size_t rangeLength,
256 OMX_U32 flags,
257 int64_t timeUs,
258 int fenceFd);
259
260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
261
262 void getMoreInputDataIfPossible();
263
264 DISALLOW_EVIL_CONSTRUCTORS(BaseState);
265 };
266
267 ////////////////////////////////////////////////////////////////////////////////
268
269 struct ACodec::DeathNotifier : public IBinder::DeathRecipient {
DeathNotifierandroid::ACodec::DeathNotifier270 DeathNotifier(const sp<AMessage> ¬ify)
271 : mNotify(notify) {
272 }
273
binderDiedandroid::ACodec::DeathNotifier274 virtual void binderDied(const wp<IBinder> &) {
275 mNotify->post();
276 }
277
278 protected:
~DeathNotifierandroid::ACodec::DeathNotifier279 virtual ~DeathNotifier() {}
280
281 private:
282 sp<AMessage> mNotify;
283
284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier);
285 };
286
287 struct ACodec::UninitializedState : public ACodec::BaseState {
288 UninitializedState(ACodec *codec);
289
290 protected:
291 virtual bool onMessageReceived(const sp<AMessage> &msg);
292 virtual void stateEntered();
293
294 private:
295 void onSetup(const sp<AMessage> &msg);
296 bool onAllocateComponent(const sp<AMessage> &msg);
297
298 sp<DeathNotifier> mDeathNotifier;
299
300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
301 };
302
303 ////////////////////////////////////////////////////////////////////////////////
304
305 struct ACodec::LoadedState : public ACodec::BaseState {
306 LoadedState(ACodec *codec);
307
308 protected:
309 virtual bool onMessageReceived(const sp<AMessage> &msg);
310 virtual void stateEntered();
311
312 private:
313 friend struct ACodec::UninitializedState;
314
315 bool onConfigureComponent(const sp<AMessage> &msg);
316 void onCreateInputSurface(const sp<AMessage> &msg);
317 void onSetInputSurface(const sp<AMessage> &msg);
318 void onStart();
319 void onShutdown(bool keepComponentAllocated);
320
321 status_t setupInputSurface();
322
323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState);
324 };
325
326 ////////////////////////////////////////////////////////////////////////////////
327
328 struct ACodec::LoadedToIdleState : public ACodec::BaseState {
329 LoadedToIdleState(ACodec *codec);
330
331 protected:
332 virtual bool onMessageReceived(const sp<AMessage> &msg);
333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
334 virtual void stateEntered();
335
336 private:
337 status_t allocateBuffers();
338
339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState);
340 };
341
342 ////////////////////////////////////////////////////////////////////////////////
343
344 struct ACodec::IdleToExecutingState : public ACodec::BaseState {
345 IdleToExecutingState(ACodec *codec);
346
347 protected:
348 virtual bool onMessageReceived(const sp<AMessage> &msg);
349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
350 virtual void stateEntered();
351
352 private:
353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState);
354 };
355
356 ////////////////////////////////////////////////////////////////////////////////
357
358 struct ACodec::ExecutingState : public ACodec::BaseState {
359 ExecutingState(ACodec *codec);
360
361 void submitRegularOutputBuffers();
362 void submitOutputMetaBuffers();
363 void submitOutputBuffers();
364
365 // Submit output buffers to the decoder, submit input buffers to client
366 // to fill with data.
367 void resume();
368
369 // Returns true iff input and output buffers are in play.
activeandroid::ACodec::ExecutingState370 bool active() const { return mActive; }
371
372 protected:
373 virtual PortMode getPortMode(OMX_U32 portIndex);
374 virtual bool onMessageReceived(const sp<AMessage> &msg);
375 virtual void stateEntered();
376
377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
379
380 private:
381 bool mActive;
382
383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState);
384 };
385
386 ////////////////////////////////////////////////////////////////////////////////
387
388 struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState {
389 OutputPortSettingsChangedState(ACodec *codec);
390
391 protected:
392 virtual PortMode getPortMode(OMX_U32 portIndex);
393 virtual bool onMessageReceived(const sp<AMessage> &msg);
394 virtual void stateEntered();
395
396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
398
399 private:
400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
401 };
402
403 ////////////////////////////////////////////////////////////////////////////////
404
405 struct ACodec::ExecutingToIdleState : public ACodec::BaseState {
406 ExecutingToIdleState(ACodec *codec);
407
408 protected:
409 virtual bool onMessageReceived(const sp<AMessage> &msg);
410 virtual void stateEntered();
411
412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
413
414 virtual void onOutputBufferDrained(const sp<AMessage> &msg);
415 virtual void onInputBufferFilled(const sp<AMessage> &msg);
416
417 private:
418 void changeStateIfWeOwnAllBuffers();
419
420 bool mComponentNowIdle;
421
422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
423 };
424
425 ////////////////////////////////////////////////////////////////////////////////
426
427 struct ACodec::IdleToLoadedState : public ACodec::BaseState {
428 IdleToLoadedState(ACodec *codec);
429
430 protected:
431 virtual bool onMessageReceived(const sp<AMessage> &msg);
432 virtual void stateEntered();
433
434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
435
436 private:
437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState);
438 };
439
440 ////////////////////////////////////////////////////////////////////////////////
441
442 struct ACodec::FlushingState : public ACodec::BaseState {
443 FlushingState(ACodec *codec);
444
445 protected:
446 virtual bool onMessageReceived(const sp<AMessage> &msg);
447 virtual void stateEntered();
448
449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
450
451 virtual void onOutputBufferDrained(const sp<AMessage> &msg);
452 virtual void onInputBufferFilled(const sp<AMessage> &msg);
453
454 private:
455 bool mFlushComplete[2];
456
457 void changeStateIfWeOwnAllBuffers();
458
459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState);
460 };
461
462 ////////////////////////////////////////////////////////////////////////////////
463
setWriteFence(int fenceFd,const char * dbg)464 void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) {
465 if (mFenceFd >= 0) {
466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s",
467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
468 }
469 mFenceFd = fenceFd;
470 mIsReadFence = false;
471 }
472
setReadFence(int fenceFd,const char * dbg)473 void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) {
474 if (mFenceFd >= 0) {
475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s",
476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
477 }
478 mFenceFd = fenceFd;
479 mIsReadFence = true;
480 }
481
checkWriteFence(const char * dbg)482 void ACodec::BufferInfo::checkWriteFence(const char *dbg) {
483 if (mFenceFd >= 0 && mIsReadFence) {
484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg);
485 }
486 }
487
checkReadFence(const char * dbg)488 void ACodec::BufferInfo::checkReadFence(const char *dbg) {
489 if (mFenceFd >= 0 && !mIsReadFence) {
490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg);
491 }
492 }
493
494 ////////////////////////////////////////////////////////////////////////////////
495
ACodec()496 ACodec::ACodec()
497 : mQuirks(0),
498 mNode(0),
499 mUsingNativeWindow(false),
500 mNativeWindowUsageBits(0),
501 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
502 mIsVideo(false),
503 mIsEncoder(false),
504 mFatalError(false),
505 mShutdownInProgress(false),
506 mExplicitShutdown(false),
507 mIsLegacyVP9Decoder(false),
508 mEncoderDelay(0),
509 mEncoderPadding(0),
510 mRotationDegrees(0),
511 mChannelMaskPresent(false),
512 mChannelMask(0),
513 mDequeueCounter(0),
514 mInputMetadataType(kMetadataBufferTypeInvalid),
515 mOutputMetadataType(kMetadataBufferTypeInvalid),
516 mLegacyAdaptiveExperiment(false),
517 mMetadataBuffersToSubmit(0),
518 mNumUndequeuedBuffers(0),
519 mRepeatFrameDelayUs(-1ll),
520 mMaxPtsGapUs(-1ll),
521 mMaxFps(-1),
522 mTimePerFrameUs(-1ll),
523 mTimePerCaptureUs(-1ll),
524 mCreateInputBuffersSuspended(false),
525 mTunneled(false),
526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0),
527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) {
528 mUninitializedState = new UninitializedState(this);
529 mLoadedState = new LoadedState(this);
530 mLoadedToIdleState = new LoadedToIdleState(this);
531 mIdleToExecutingState = new IdleToExecutingState(this);
532 mExecutingState = new ExecutingState(this);
533
534 mOutputPortSettingsChangedState =
535 new OutputPortSettingsChangedState(this);
536
537 mExecutingToIdleState = new ExecutingToIdleState(this);
538 mIdleToLoadedState = new IdleToLoadedState(this);
539 mFlushingState = new FlushingState(this);
540
541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
542 mInputEOSResult = OK;
543
544 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
545
546 changeState(mUninitializedState);
547 }
548
~ACodec()549 ACodec::~ACodec() {
550 }
551
setNotificationMessage(const sp<AMessage> & msg)552 void ACodec::setNotificationMessage(const sp<AMessage> &msg) {
553 mNotify = msg;
554 }
555
initiateSetup(const sp<AMessage> & msg)556 void ACodec::initiateSetup(const sp<AMessage> &msg) {
557 msg->setWhat(kWhatSetup);
558 msg->setTarget(this);
559 msg->post();
560 }
561
signalSetParameters(const sp<AMessage> & params)562 void ACodec::signalSetParameters(const sp<AMessage> ¶ms) {
563 sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
564 msg->setMessage("params", params);
565 msg->post();
566 }
567
initiateAllocateComponent(const sp<AMessage> & msg)568 void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
569 msg->setWhat(kWhatAllocateComponent);
570 msg->setTarget(this);
571 msg->post();
572 }
573
initiateConfigureComponent(const sp<AMessage> & msg)574 void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
575 msg->setWhat(kWhatConfigureComponent);
576 msg->setTarget(this);
577 msg->post();
578 }
579
setSurface(const sp<Surface> & surface)580 status_t ACodec::setSurface(const sp<Surface> &surface) {
581 sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
582 msg->setObject("surface", surface);
583
584 sp<AMessage> response;
585 status_t err = msg->postAndAwaitResponse(&response);
586
587 if (err == OK) {
588 (void)response->findInt32("err", &err);
589 }
590 return err;
591 }
592
initiateCreateInputSurface()593 void ACodec::initiateCreateInputSurface() {
594 (new AMessage(kWhatCreateInputSurface, this))->post();
595 }
596
initiateSetInputSurface(const sp<PersistentSurface> & surface)597 void ACodec::initiateSetInputSurface(
598 const sp<PersistentSurface> &surface) {
599 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
600 msg->setObject("input-surface", surface);
601 msg->post();
602 }
603
signalEndOfInputStream()604 void ACodec::signalEndOfInputStream() {
605 (new AMessage(kWhatSignalEndOfInputStream, this))->post();
606 }
607
initiateStart()608 void ACodec::initiateStart() {
609 (new AMessage(kWhatStart, this))->post();
610 }
611
signalFlush()612 void ACodec::signalFlush() {
613 ALOGV("[%s] signalFlush", mComponentName.c_str());
614 (new AMessage(kWhatFlush, this))->post();
615 }
616
signalResume()617 void ACodec::signalResume() {
618 (new AMessage(kWhatResume, this))->post();
619 }
620
initiateShutdown(bool keepComponentAllocated)621 void ACodec::initiateShutdown(bool keepComponentAllocated) {
622 sp<AMessage> msg = new AMessage(kWhatShutdown, this);
623 msg->setInt32("keepComponentAllocated", keepComponentAllocated);
624 msg->post();
625 if (!keepComponentAllocated) {
626 // ensure shutdown completes in 3 seconds
627 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000);
628 }
629 }
630
signalRequestIDRFrame()631 void ACodec::signalRequestIDRFrame() {
632 (new AMessage(kWhatRequestIDRFrame, this))->post();
633 }
634
635 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
636 // Some codecs may return input buffers before having them processed.
637 // This causes a halt if we already signaled an EOS on the input
638 // port. For now keep submitting an output buffer if there was an
639 // EOS on the input port, but not yet on the output port.
signalSubmitOutputMetadataBufferIfEOS_workaround()640 void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() {
641 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
642 mMetadataBuffersToSubmit > 0) {
643 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post();
644 }
645 }
646
handleSetSurface(const sp<Surface> & surface)647 status_t ACodec::handleSetSurface(const sp<Surface> &surface) {
648 // allow keeping unset surface
649 if (surface == NULL) {
650 if (mNativeWindow != NULL) {
651 ALOGW("cannot unset a surface");
652 return INVALID_OPERATION;
653 }
654 return OK;
655 }
656
657 // cannot switch from bytebuffers to surface
658 if (mNativeWindow == NULL) {
659 ALOGW("component was not configured with a surface");
660 return INVALID_OPERATION;
661 }
662
663 ANativeWindow *nativeWindow = surface.get();
664 // if we have not yet started the codec, we can simply set the native window
665 if (mBuffers[kPortIndexInput].size() == 0) {
666 mNativeWindow = surface;
667 return OK;
668 }
669
670 // we do not support changing a tunneled surface after start
671 if (mTunneled) {
672 ALOGW("cannot change tunneled surface");
673 return INVALID_OPERATION;
674 }
675
676 int usageBits = 0;
677 // no need to reconnect as we will not dequeue all buffers
678 status_t err = setupNativeWindowSizeFormatAndUsage(
679 nativeWindow, &usageBits,
680 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */);
681 if (err != OK) {
682 return err;
683 }
684
685 int ignoredFlags = kVideoGrallocUsage;
686 // New output surface is not allowed to add new usage flag except ignored ones.
687 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) {
688 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits);
689 return BAD_VALUE;
690 }
691
692 // get min undequeued count. We cannot switch to a surface that has a higher
693 // undequeued count than we allocated.
694 int minUndequeuedBuffers = 0;
695 err = nativeWindow->query(
696 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
697 &minUndequeuedBuffers);
698 if (err != 0) {
699 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
700 strerror(-err), -err);
701 return err;
702 }
703 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) {
704 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)",
705 minUndequeuedBuffers, mNumUndequeuedBuffers);
706 return BAD_VALUE;
707 }
708
709 // we cannot change the number of output buffers while OMX is running
710 // set up surface to the same count
711 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
712 ALOGV("setting up surface for %zu buffers", buffers.size());
713
714 err = native_window_set_buffer_count(nativeWindow, buffers.size());
715 if (err != 0) {
716 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
717 -err);
718 return err;
719 }
720
721 // need to enable allocation when attaching
722 surface->getIGraphicBufferProducer()->allowAllocation(true);
723
724 // for meta data mode, we move dequeud buffers to the new surface.
725 // for non-meta mode, we must move all registered buffers
726 for (size_t i = 0; i < buffers.size(); ++i) {
727 const BufferInfo &info = buffers[i];
728 // skip undequeued buffers for meta data mode
729 if (storingMetadataInDecodedBuffers()
730 && !mLegacyAdaptiveExperiment
731 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
732 ALOGV("skipping buffer");
733 continue;
734 }
735 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer());
736
737 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer());
738 if (err != OK) {
739 ALOGE("failed to attach buffer %p to the new surface: %s (%d)",
740 info.mGraphicBuffer->getNativeBuffer(),
741 strerror(-err), -err);
742 return err;
743 }
744 }
745
746 // cancel undequeued buffers to new surface
747 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) {
748 for (size_t i = 0; i < buffers.size(); ++i) {
749 BufferInfo &info = buffers.editItemAt(i);
750 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
751 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
752 err = nativeWindow->cancelBuffer(
753 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd);
754 info.mFenceFd = -1;
755 if (err != OK) {
756 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)",
757 info.mGraphicBuffer->getNativeBuffer(),
758 strerror(-err), -err);
759 return err;
760 }
761 }
762 }
763 // disallow further allocation
764 (void)surface->getIGraphicBufferProducer()->allowAllocation(false);
765 }
766
767 // push blank buffers to previous window if requested
768 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) {
769 pushBlankBuffersToNativeWindow(mNativeWindow.get());
770 }
771
772 mNativeWindow = nativeWindow;
773 mNativeWindowUsageBits = usageBits;
774 return OK;
775 }
776
allocateBuffersOnPort(OMX_U32 portIndex)777 status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
778 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
779
780 CHECK(mDealer[portIndex] == NULL);
781 CHECK(mBuffers[portIndex].isEmpty());
782
783 status_t err;
784 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
785 if (storingMetadataInDecodedBuffers()) {
786 err = allocateOutputMetadataBuffers();
787 } else {
788 err = allocateOutputBuffersFromNativeWindow();
789 }
790 } else {
791 OMX_PARAM_PORTDEFINITIONTYPE def;
792 InitOMXParams(&def);
793 def.nPortIndex = portIndex;
794
795 err = mOMX->getParameter(
796 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
797
798 if (err == OK) {
799 MetadataBufferType type =
800 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
801 size_t bufSize = def.nBufferSize;
802 if (type == kMetadataBufferTypeANWBuffer) {
803 bufSize = sizeof(VideoNativeMetadata);
804 } else if (type == kMetadataBufferTypeNativeHandleSource) {
805 bufSize = sizeof(VideoNativeHandleMetadata);
806 }
807
808 // If using gralloc or native source input metadata buffers, allocate largest
809 // metadata size as we prefer to generate native source metadata, but component
810 // may require gralloc source. For camera source, allocate at least enough
811 // size for native metadata buffers.
812 size_t allottedSize = bufSize;
813 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) {
814 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata));
815 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) {
816 bufSize = max(bufSize, sizeof(VideoNativeMetadata));
817 }
818
819 size_t conversionBufferSize = 0;
820
821 sp<DataConverter> converter = mConverter[portIndex];
822 if (converter != NULL) {
823 // here we assume sane conversions of max 4:1, so result fits in int32
824 if (portIndex == kPortIndexInput) {
825 conversionBufferSize = converter->sourceSize(bufSize);
826 } else {
827 conversionBufferSize = converter->targetSize(bufSize);
828 }
829 }
830
831 size_t alignment = MemoryDealer::getAllocationAlignment();
832
833 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port",
834 mComponentName.c_str(),
835 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type),
836 portIndex == kPortIndexInput ? "input" : "output");
837
838 // verify buffer sizes to avoid overflow in align()
839 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) {
840 ALOGE("b/22885421");
841 return NO_MEMORY;
842 }
843
844 // don't modify bufSize as OMX may not expect it to increase after negotiation
845 size_t alignedSize = align(bufSize, alignment);
846 size_t alignedConvSize = align(conversionBufferSize, alignment);
847 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) {
848 ALOGE("b/22885421");
849 return NO_MEMORY;
850 }
851
852 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize);
853 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
854
855 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
856 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize);
857 if (mem == NULL || mem->pointer() == NULL) {
858 return NO_MEMORY;
859 }
860
861 BufferInfo info;
862 info.mStatus = BufferInfo::OWNED_BY_US;
863 info.mFenceFd = -1;
864 info.mRenderInfo = NULL;
865 info.mNativeHandle = NULL;
866
867 uint32_t requiresAllocateBufferBit =
868 (portIndex == kPortIndexInput)
869 ? kRequiresAllocateBufferOnInputPorts
870 : kRequiresAllocateBufferOnOutputPorts;
871
872 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) {
873 mem.clear();
874
875 void *ptr = NULL;
876 sp<NativeHandle> native_handle;
877 err = mOMX->allocateSecureBuffer(
878 mNode, portIndex, bufSize, &info.mBufferID,
879 &ptr, &native_handle);
880
881 // TRICKY: this representation is unorthodox, but ACodec requires
882 // an ABuffer with a proper size to validate range offsets and lengths.
883 // Since mData is never referenced for secure input, it is used to store
884 // either the pointer to the secure buffer, or the opaque handle as on
885 // some devices ptr is actually an opaque handle, not a pointer.
886
887 // TRICKY2: use native handle as the base of the ABuffer if received one,
888 // because Widevine source only receives these base addresses.
889 const native_handle_t *native_handle_ptr =
890 native_handle == NULL ? NULL : native_handle->handle();
891 info.mData = new ABuffer(
892 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize);
893 info.mNativeHandle = native_handle;
894 info.mCodecData = info.mData;
895 } else if (mQuirks & requiresAllocateBufferBit) {
896 err = mOMX->allocateBufferWithBackup(
897 mNode, portIndex, mem, &info.mBufferID, allottedSize);
898 } else {
899 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize);
900 }
901
902 if (mem != NULL) {
903 info.mCodecData = new ABuffer(mem->pointer(), bufSize);
904 info.mCodecRef = mem;
905
906 if (type == kMetadataBufferTypeANWBuffer) {
907 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
908 }
909
910 // if we require conversion, allocate conversion buffer for client use;
911 // otherwise, reuse codec buffer
912 if (mConverter[portIndex] != NULL) {
913 CHECK_GT(conversionBufferSize, (size_t)0);
914 mem = mDealer[portIndex]->allocate(conversionBufferSize);
915 if (mem == NULL|| mem->pointer() == NULL) {
916 return NO_MEMORY;
917 }
918 info.mData = new ABuffer(mem->pointer(), conversionBufferSize);
919 info.mMemRef = mem;
920 } else {
921 info.mData = info.mCodecData;
922 info.mMemRef = info.mCodecRef;
923 }
924 }
925
926 mBuffers[portIndex].push(info);
927 }
928 }
929 }
930
931 if (err != OK) {
932 return err;
933 }
934
935 sp<AMessage> notify = mNotify->dup();
936 notify->setInt32("what", CodecBase::kWhatBuffersAllocated);
937
938 notify->setInt32("portIndex", portIndex);
939
940 sp<PortDescription> desc = new PortDescription;
941
942 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
943 const BufferInfo &info = mBuffers[portIndex][i];
944 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef);
945 }
946
947 notify->setObject("portDesc", desc);
948 notify->post();
949
950 return OK;
951 }
952
setupNativeWindowSizeFormatAndUsage(ANativeWindow * nativeWindow,int * finalUsage,bool reconnect)953 status_t ACodec::setupNativeWindowSizeFormatAndUsage(
954 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */,
955 bool reconnect) {
956 OMX_PARAM_PORTDEFINITIONTYPE def;
957 InitOMXParams(&def);
958 def.nPortIndex = kPortIndexOutput;
959
960 status_t err = mOMX->getParameter(
961 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
962
963 if (err != OK) {
964 return err;
965 }
966
967 OMX_U32 usage = 0;
968 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
969 if (err != 0) {
970 ALOGW("querying usage flags from OMX IL component failed: %d", err);
971 // XXX: Currently this error is logged, but not fatal.
972 usage = 0;
973 }
974 int omxUsage = usage;
975
976 if (mFlags & kFlagIsGrallocUsageProtected) {
977 usage |= GRALLOC_USAGE_PROTECTED;
978 }
979
980 usage |= kVideoGrallocUsage;
981 *finalUsage = usage;
982
983 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
984 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN;
985
986 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
987 return setNativeWindowSizeFormatAndUsage(
988 nativeWindow,
989 def.format.video.nFrameWidth,
990 def.format.video.nFrameHeight,
991 def.format.video.eColorFormat,
992 mRotationDegrees,
993 usage,
994 reconnect);
995 }
996
configureOutputBuffersFromNativeWindow(OMX_U32 * bufferCount,OMX_U32 * bufferSize,OMX_U32 * minUndequeuedBuffers,bool preregister)997 status_t ACodec::configureOutputBuffersFromNativeWindow(
998 OMX_U32 *bufferCount, OMX_U32 *bufferSize,
999 OMX_U32 *minUndequeuedBuffers, bool preregister) {
1000
1001 OMX_PARAM_PORTDEFINITIONTYPE def;
1002 InitOMXParams(&def);
1003 def.nPortIndex = kPortIndexOutput;
1004
1005 status_t err = mOMX->getParameter(
1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
1007
1008 if (err == OK) {
1009 err = setupNativeWindowSizeFormatAndUsage(
1010 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */);
1011 }
1012 if (err != OK) {
1013 mNativeWindowUsageBits = 0;
1014 return err;
1015 }
1016
1017 // Exits here for tunneled video playback codecs -- i.e. skips native window
1018 // buffer allocation step as this is managed by the tunneled OMX omponent
1019 // itself and explicitly sets def.nBufferCountActual to 0.
1020 if (mTunneled) {
1021 ALOGV("Tunneled Playback: skipping native window buffer allocation.");
1022 def.nBufferCountActual = 0;
1023 err = mOMX->setParameter(
1024 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
1025
1026 *minUndequeuedBuffers = 0;
1027 *bufferCount = 0;
1028 *bufferSize = 0;
1029 return err;
1030 }
1031
1032 *minUndequeuedBuffers = 0;
1033 err = mNativeWindow->query(
1034 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1035 (int *)minUndequeuedBuffers);
1036
1037 if (err != 0) {
1038 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
1039 strerror(-err), -err);
1040 return err;
1041 }
1042
1043 // FIXME: assume that surface is controlled by app (native window
1044 // returns the number for the case when surface is not controlled by app)
1045 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
1046 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
1047
1048 // Use conservative allocation while also trying to reduce starvation
1049 //
1050 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
1051 // minimum needed for the consumer to be able to work
1052 // 2. try to allocate two (2) additional buffers to reduce starvation from
1053 // the consumer
1054 // plus an extra buffer to account for incorrect minUndequeuedBufs
1055 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
1056 OMX_U32 newBufferCount =
1057 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
1058 def.nBufferCountActual = newBufferCount;
1059 err = mOMX->setParameter(
1060 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
1061
1062 if (err == OK) {
1063 *minUndequeuedBuffers += extraBuffers;
1064 break;
1065 }
1066
1067 ALOGW("[%s] setting nBufferCountActual to %u failed: %d",
1068 mComponentName.c_str(), newBufferCount, err);
1069 /* exit condition */
1070 if (extraBuffers == 0) {
1071 return err;
1072 }
1073 }
1074
1075 err = native_window_set_buffer_count(
1076 mNativeWindow.get(), def.nBufferCountActual);
1077
1078 if (err != 0) {
1079 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
1080 -err);
1081 return err;
1082 }
1083
1084 *bufferCount = def.nBufferCountActual;
1085 *bufferSize = def.nBufferSize;
1086 return err;
1087 }
1088
allocateOutputBuffersFromNativeWindow()1089 status_t ACodec::allocateOutputBuffersFromNativeWindow() {
1090 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
1091 status_t err = configureOutputBuffersFromNativeWindow(
1092 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */);
1093 if (err != 0)
1094 return err;
1095 mNumUndequeuedBuffers = minUndequeuedBuffers;
1096
1097 if (!storingMetadataInDecodedBuffers()) {
1098 static_cast<Surface*>(mNativeWindow.get())
1099 ->getIGraphicBufferProducer()->allowAllocation(true);
1100 }
1101
1102 ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
1103 "output port",
1104 mComponentName.c_str(), bufferCount, bufferSize);
1105
1106 // Dequeue buffers and send them to OMX
1107 for (OMX_U32 i = 0; i < bufferCount; i++) {
1108 ANativeWindowBuffer *buf;
1109 int fenceFd;
1110 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
1111 if (err != 0) {
1112 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
1113 break;
1114 }
1115
1116 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
1117 BufferInfo info;
1118 info.mStatus = BufferInfo::OWNED_BY_US;
1119 info.mFenceFd = fenceFd;
1120 info.mIsReadFence = false;
1121 info.mRenderInfo = NULL;
1122 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);
1123 info.mCodecData = info.mData;
1124 info.mGraphicBuffer = graphicBuffer;
1125 mBuffers[kPortIndexOutput].push(info);
1126
1127 IOMX::buffer_id bufferId;
1128 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
1129 &bufferId);
1130 if (err != 0) {
1131 ALOGE("registering GraphicBuffer %u with OMX IL component failed: "
1132 "%d", i, err);
1133 break;
1134 }
1135
1136 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
1137
1138 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
1139 mComponentName.c_str(),
1140 bufferId, graphicBuffer.get());
1141 }
1142
1143 OMX_U32 cancelStart;
1144 OMX_U32 cancelEnd;
1145
1146 if (err != 0) {
1147 // If an error occurred while dequeuing we need to cancel any buffers
1148 // that were dequeued.
1149 cancelStart = 0;
1150 cancelEnd = mBuffers[kPortIndexOutput].size();
1151 } else {
1152 // Return the required minimum undequeued buffers to the native window.
1153 cancelStart = bufferCount - minUndequeuedBuffers;
1154 cancelEnd = bufferCount;
1155 }
1156
1157 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
1158 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1159 if (info->mStatus == BufferInfo::OWNED_BY_US) {
1160 status_t error = cancelBufferToNativeWindow(info);
1161 if (err == 0) {
1162 err = error;
1163 }
1164 }
1165 }
1166
1167 if (!storingMetadataInDecodedBuffers()) {
1168 static_cast<Surface*>(mNativeWindow.get())
1169 ->getIGraphicBufferProducer()->allowAllocation(false);
1170 }
1171
1172 return err;
1173 }
1174
allocateOutputMetadataBuffers()1175 status_t ACodec::allocateOutputMetadataBuffers() {
1176 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
1177 status_t err = configureOutputBuffersFromNativeWindow(
1178 &bufferCount, &bufferSize, &minUndequeuedBuffers,
1179 mLegacyAdaptiveExperiment /* preregister */);
1180 if (err != 0)
1181 return err;
1182 mNumUndequeuedBuffers = minUndequeuedBuffers;
1183
1184 ALOGV("[%s] Allocating %u meta buffers on output port",
1185 mComponentName.c_str(), bufferCount);
1186
1187 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ?
1188 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata);
1189 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment());
1190 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec");
1191
1192 // Dequeue buffers and send them to OMX
1193 for (OMX_U32 i = 0; i < bufferCount; i++) {
1194 BufferInfo info;
1195 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
1196 info.mFenceFd = -1;
1197 info.mRenderInfo = NULL;
1198 info.mGraphicBuffer = NULL;
1199 info.mDequeuedAt = mDequeueCounter;
1200
1201 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize);
1202 if (mem == NULL || mem->pointer() == NULL) {
1203 return NO_MEMORY;
1204 }
1205 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
1206 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
1207 }
1208 info.mData = new ABuffer(mem->pointer(), mem->size());
1209 info.mMemRef = mem;
1210 info.mCodecData = info.mData;
1211 info.mCodecRef = mem;
1212
1213 // we use useBuffer for metadata regardless of quirks
1214 err = mOMX->useBuffer(
1215 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size());
1216 mBuffers[kPortIndexOutput].push(info);
1217
1218 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)",
1219 mComponentName.c_str(), info.mBufferID, mem->pointer());
1220 }
1221
1222 if (mLegacyAdaptiveExperiment) {
1223 // preallocate and preregister buffers
1224 static_cast<Surface *>(mNativeWindow.get())
1225 ->getIGraphicBufferProducer()->allowAllocation(true);
1226
1227 ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
1228 "output port",
1229 mComponentName.c_str(), bufferCount, bufferSize);
1230
1231 // Dequeue buffers then cancel them all
1232 for (OMX_U32 i = 0; i < bufferCount; i++) {
1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1234
1235 ANativeWindowBuffer *buf;
1236 int fenceFd;
1237 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
1238 if (err != 0) {
1239 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
1240 break;
1241 }
1242
1243 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
1244 mOMX->updateGraphicBufferInMeta(
1245 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID);
1246 info->mStatus = BufferInfo::OWNED_BY_US;
1247 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy");
1248 info->mGraphicBuffer = graphicBuffer;
1249 }
1250
1251 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) {
1252 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1253 if (info->mStatus == BufferInfo::OWNED_BY_US) {
1254 status_t error = cancelBufferToNativeWindow(info);
1255 if (err == OK) {
1256 err = error;
1257 }
1258 }
1259 }
1260
1261 static_cast<Surface*>(mNativeWindow.get())
1262 ->getIGraphicBufferProducer()->allowAllocation(false);
1263 }
1264
1265 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
1266 return err;
1267 }
1268
submitOutputMetadataBuffer()1269 status_t ACodec::submitOutputMetadataBuffer() {
1270 CHECK(storingMetadataInDecodedBuffers());
1271 if (mMetadataBuffersToSubmit == 0)
1272 return OK;
1273
1274 BufferInfo *info = dequeueBufferFromNativeWindow();
1275 if (info == NULL) {
1276 return ERROR_IO;
1277 }
1278
1279 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",
1280 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get());
1281
1282 --mMetadataBuffersToSubmit;
1283 info->checkWriteFence("submitOutputMetadataBuffer");
1284 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd);
1285 info->mFenceFd = -1;
1286 if (err == OK) {
1287 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
1288 }
1289
1290 return err;
1291 }
1292
waitForFence(int fd,const char * dbg)1293 status_t ACodec::waitForFence(int fd, const char *dbg ) {
1294 status_t res = OK;
1295 if (fd >= 0) {
1296 sp<Fence> fence = new Fence(fd);
1297 res = fence->wait(IOMX::kFenceTimeoutMs);
1298 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg);
1299 }
1300 return res;
1301 }
1302
1303 // static
_asString(BufferInfo::Status s)1304 const char *ACodec::_asString(BufferInfo::Status s) {
1305 switch (s) {
1306 case BufferInfo::OWNED_BY_US: return "OUR";
1307 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT";
1308 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM";
1309 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM";
1310 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE";
1311 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED";
1312 default: return "?";
1313 }
1314 }
1315
dumpBuffers(OMX_U32 portIndex)1316 void ACodec::dumpBuffers(OMX_U32 portIndex) {
1317 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
1318 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(),
1319 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size());
1320 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
1321 const BufferInfo &info = mBuffers[portIndex][i];
1322 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u",
1323 i, info.mBufferID, info.mGraphicBuffer.get(),
1324 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(),
1325 _asString(info.mStatus), info.mStatus, info.mDequeuedAt);
1326 }
1327 }
1328
cancelBufferToNativeWindow(BufferInfo * info)1329 status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
1330 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
1331
1332 ALOGV("[%s] Calling cancelBuffer on buffer %u",
1333 mComponentName.c_str(), info->mBufferID);
1334
1335 info->checkWriteFence("cancelBufferToNativeWindow");
1336 int err = mNativeWindow->cancelBuffer(
1337 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
1338 info->mFenceFd = -1;
1339
1340 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",
1341 mComponentName.c_str(), info->mBufferID);
1342 // change ownership even if cancelBuffer fails
1343 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
1344
1345 return err;
1346 }
1347
updateRenderInfoForDequeuedBuffer(ANativeWindowBuffer * buf,int fenceFd,BufferInfo * info)1348 void ACodec::updateRenderInfoForDequeuedBuffer(
1349 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
1350
1351 info->mRenderInfo =
1352 mRenderTracker.updateInfoForDequeuedBuffer(
1353 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
1354
1355 // check for any fences already signaled
1356 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
1357 }
1358
onFrameRendered(int64_t mediaTimeUs,nsecs_t systemNano)1359 void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
1360 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
1361 mRenderTracker.dumpRenderQueue();
1362 }
1363 }
1364
notifyOfRenderedFrames(bool dropIncomplete,FrameRenderTracker::Info * until)1365 void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
1366 sp<AMessage> msg = mNotify->dup();
1367 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered);
1368 std::list<FrameRenderTracker::Info> done =
1369 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
1370
1371 // unlink untracked frames
1372 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
1373 it != done.cend(); ++it) {
1374 ssize_t index = it->getIndex();
1375 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
1376 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL;
1377 } else if (index >= 0) {
1378 // THIS SHOULD NEVER HAPPEN
1379 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
1380 }
1381 }
1382
1383 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) {
1384 msg->post();
1385 }
1386 }
1387
dequeueBufferFromNativeWindow()1388 ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
1389 ANativeWindowBuffer *buf;
1390 CHECK(mNativeWindow.get() != NULL);
1391
1392 if (mTunneled) {
1393 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel"
1394 " video playback mode mode!");
1395 return NULL;
1396 }
1397
1398 if (mFatalError) {
1399 ALOGW("not dequeuing from native window due to fatal error");
1400 return NULL;
1401 }
1402
1403 int fenceFd = -1;
1404 do {
1405 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
1406 if (err != 0) {
1407 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err);
1408 return NULL;
1409 }
1410
1411 bool stale = false;
1412 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
1413 i--;
1414 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1415
1416 if (info->mGraphicBuffer != NULL &&
1417 info->mGraphicBuffer->handle == buf->handle) {
1418 // Since consumers can attach buffers to BufferQueues, it is possible
1419 // that a known yet stale buffer can return from a surface that we
1420 // once used. We can simply ignore this as we have already dequeued
1421 // this buffer properly. NOTE: this does not eliminate all cases,
1422 // e.g. it is possible that we have queued the valid buffer to the
1423 // NW, and a stale copy of the same buffer gets dequeued - which will
1424 // be treated as the valid buffer by ACodec.
1425 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
1426 ALOGI("dequeued stale buffer %p. discarding", buf);
1427 stale = true;
1428 break;
1429 }
1430
1431 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer());
1432 info->mStatus = BufferInfo::OWNED_BY_US;
1433 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
1434 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
1435 return info;
1436 }
1437 }
1438
1439 // It is also possible to receive a previously unregistered buffer
1440 // in non-meta mode. These should be treated as stale buffers. The
1441 // same is possible in meta mode, in which case, it will be treated
1442 // as a normal buffer, which is not desirable.
1443 // TODO: fix this.
1444 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) {
1445 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf);
1446 stale = true;
1447 }
1448 if (stale) {
1449 // TODO: detach stale buffer, but there is no API yet to do it.
1450 buf = NULL;
1451 }
1452 } while (buf == NULL);
1453
1454 // get oldest undequeued buffer
1455 BufferInfo *oldest = NULL;
1456 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
1457 i--;
1458 BufferInfo *info =
1459 &mBuffers[kPortIndexOutput].editItemAt(i);
1460 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
1461 (oldest == NULL ||
1462 // avoid potential issues from counter rolling over
1463 mDequeueCounter - info->mDequeuedAt >
1464 mDequeueCounter - oldest->mDequeuedAt)) {
1465 oldest = info;
1466 }
1467 }
1468
1469 // it is impossible dequeue a buffer when there are no buffers with ANW
1470 CHECK(oldest != NULL);
1471 // it is impossible to dequeue an unknown buffer in non-meta mode, as the
1472 // while loop above does not complete
1473 CHECK(storingMetadataInDecodedBuffers());
1474
1475 // discard buffer in LRU info and replace with new buffer
1476 oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
1477 oldest->mStatus = BufferInfo::OWNED_BY_US;
1478 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
1479 mRenderTracker.untrackFrame(oldest->mRenderInfo);
1480 oldest->mRenderInfo = NULL;
1481
1482 mOMX->updateGraphicBufferInMeta(
1483 mNode, kPortIndexOutput, oldest->mGraphicBuffer,
1484 oldest->mBufferID);
1485
1486 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) {
1487 VideoGrallocMetadata *grallocMeta =
1488 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base());
1489 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
1490 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
1491 mDequeueCounter - oldest->mDequeuedAt,
1492 (void *)(uintptr_t)grallocMeta->pHandle,
1493 oldest->mGraphicBuffer->handle, oldest->mData->base());
1494 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
1495 VideoNativeMetadata *nativeMeta =
1496 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base());
1497 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
1498 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
1499 mDequeueCounter - oldest->mDequeuedAt,
1500 (void *)(uintptr_t)nativeMeta->pBuffer,
1501 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());
1502 }
1503
1504 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
1505 return oldest;
1506 }
1507
freeBuffersOnPort(OMX_U32 portIndex)1508 status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
1509 status_t err = OK;
1510 for (size_t i = mBuffers[portIndex].size(); i > 0;) {
1511 i--;
1512 status_t err2 = freeBuffer(portIndex, i);
1513 if (err == OK) {
1514 err = err2;
1515 }
1516 }
1517
1518 // clear mDealer even on an error
1519 mDealer[portIndex].clear();
1520 return err;
1521 }
1522
freeOutputBuffersNotOwnedByComponent()1523 status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
1524 status_t err = OK;
1525 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
1526 i--;
1527 BufferInfo *info =
1528 &mBuffers[kPortIndexOutput].editItemAt(i);
1529
1530 // At this time some buffers may still be with the component
1531 // or being drained.
1532 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&
1533 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) {
1534 status_t err2 = freeBuffer(kPortIndexOutput, i);
1535 if (err == OK) {
1536 err = err2;
1537 }
1538 }
1539 }
1540
1541 return err;
1542 }
1543
freeBuffer(OMX_U32 portIndex,size_t i)1544 status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
1545 BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
1546 status_t err = OK;
1547
1548 // there should not be any fences in the metadata
1549 MetadataBufferType type =
1550 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
1551 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL
1552 && info->mData->size() >= sizeof(VideoNativeMetadata)) {
1553 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd;
1554 if (fenceFd >= 0) {
1555 ALOGW("unreleased fence (%d) in %s metadata buffer %zu",
1556 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i);
1557 }
1558 }
1559
1560 switch (info->mStatus) {
1561 case BufferInfo::OWNED_BY_US:
1562 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) {
1563 (void)cancelBufferToNativeWindow(info);
1564 }
1565 // fall through
1566
1567 case BufferInfo::OWNED_BY_NATIVE_WINDOW:
1568 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID);
1569 break;
1570
1571 default:
1572 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus);
1573 err = FAILED_TRANSACTION;
1574 break;
1575 }
1576
1577 if (info->mFenceFd >= 0) {
1578 ::close(info->mFenceFd);
1579 }
1580
1581 if (portIndex == kPortIndexOutput) {
1582 mRenderTracker.untrackFrame(info->mRenderInfo, i);
1583 info->mRenderInfo = NULL;
1584 }
1585
1586 // remove buffer even if mOMX->freeBuffer fails
1587 mBuffers[portIndex].removeAt(i);
1588 return err;
1589 }
1590
findBufferByID(uint32_t portIndex,IOMX::buffer_id bufferID,ssize_t * index)1591 ACodec::BufferInfo *ACodec::findBufferByID(
1592 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
1593 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
1594 BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
1595
1596 if (info->mBufferID == bufferID) {
1597 if (index != NULL) {
1598 *index = i;
1599 }
1600 return info;
1601 }
1602 }
1603
1604 ALOGE("Could not find buffer with ID %u", bufferID);
1605 return NULL;
1606 }
1607
setComponentRole(bool isEncoder,const char * mime)1608 status_t ACodec::setComponentRole(
1609 bool isEncoder, const char *mime) {
1610 const char *role = getComponentRole(isEncoder, mime);
1611 if (role == NULL) {
1612 return BAD_VALUE;
1613 }
1614 status_t err = setComponentRole(mOMX, mNode, role);
1615 if (err != OK) {
1616 ALOGW("[%s] Failed to set standard component role '%s'.",
1617 mComponentName.c_str(), role);
1618 }
1619 return err;
1620 }
1621
1622 //static
getComponentRole(bool isEncoder,const char * mime)1623 const char *ACodec::getComponentRole(
1624 bool isEncoder, const char *mime) {
1625 struct MimeToRole {
1626 const char *mime;
1627 const char *decoderRole;
1628 const char *encoderRole;
1629 };
1630
1631 static const MimeToRole kMimeToRole[] = {
1632 { MEDIA_MIMETYPE_AUDIO_MPEG,
1633 "audio_decoder.mp3", "audio_encoder.mp3" },
1634 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
1635 "audio_decoder.mp1", "audio_encoder.mp1" },
1636 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
1637 "audio_decoder.mp2", "audio_encoder.mp2" },
1638 { MEDIA_MIMETYPE_AUDIO_AMR_NB,
1639 "audio_decoder.amrnb", "audio_encoder.amrnb" },
1640 { MEDIA_MIMETYPE_AUDIO_AMR_WB,
1641 "audio_decoder.amrwb", "audio_encoder.amrwb" },
1642 { MEDIA_MIMETYPE_AUDIO_AAC,
1643 "audio_decoder.aac", "audio_encoder.aac" },
1644 { MEDIA_MIMETYPE_AUDIO_VORBIS,
1645 "audio_decoder.vorbis", "audio_encoder.vorbis" },
1646 { MEDIA_MIMETYPE_AUDIO_OPUS,
1647 "audio_decoder.opus", "audio_encoder.opus" },
1648 { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
1649 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
1650 { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
1651 "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
1652 { MEDIA_MIMETYPE_VIDEO_AVC,
1653 "video_decoder.avc", "video_encoder.avc" },
1654 { MEDIA_MIMETYPE_VIDEO_HEVC,
1655 "video_decoder.hevc", "video_encoder.hevc" },
1656 { MEDIA_MIMETYPE_VIDEO_MPEG4,
1657 "video_decoder.mpeg4", "video_encoder.mpeg4" },
1658 { MEDIA_MIMETYPE_VIDEO_H263,
1659 "video_decoder.h263", "video_encoder.h263" },
1660 { MEDIA_MIMETYPE_VIDEO_VP8,
1661 "video_decoder.vp8", "video_encoder.vp8" },
1662 { MEDIA_MIMETYPE_VIDEO_VP9,
1663 "video_decoder.vp9", "video_encoder.vp9" },
1664 { MEDIA_MIMETYPE_AUDIO_RAW,
1665 "audio_decoder.raw", "audio_encoder.raw" },
1666 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
1667 "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
1668 { MEDIA_MIMETYPE_AUDIO_FLAC,
1669 "audio_decoder.flac", "audio_encoder.flac" },
1670 { MEDIA_MIMETYPE_AUDIO_MSGSM,
1671 "audio_decoder.gsm", "audio_encoder.gsm" },
1672 { MEDIA_MIMETYPE_VIDEO_MPEG2,
1673 "video_decoder.mpeg2", "video_encoder.mpeg2" },
1674 { MEDIA_MIMETYPE_AUDIO_AC3,
1675 "audio_decoder.ac3", "audio_encoder.ac3" },
1676 { MEDIA_MIMETYPE_AUDIO_EAC3,
1677 "audio_decoder.eac3", "audio_encoder.eac3" },
1678 };
1679
1680 static const size_t kNumMimeToRole =
1681 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
1682
1683 size_t i;
1684 for (i = 0; i < kNumMimeToRole; ++i) {
1685 if (!strcasecmp(mime, kMimeToRole[i].mime)) {
1686 break;
1687 }
1688 }
1689
1690 if (i == kNumMimeToRole) {
1691 return NULL;
1692 }
1693
1694 return isEncoder ? kMimeToRole[i].encoderRole
1695 : kMimeToRole[i].decoderRole;
1696 }
1697
1698 //static
setComponentRole(const sp<IOMX> & omx,IOMX::node_id node,const char * role)1699 status_t ACodec::setComponentRole(
1700 const sp<IOMX> &omx, IOMX::node_id node, const char *role) {
1701 OMX_PARAM_COMPONENTROLETYPE roleParams;
1702 InitOMXParams(&roleParams);
1703
1704 strncpy((char *)roleParams.cRole,
1705 role, OMX_MAX_STRINGNAME_SIZE - 1);
1706
1707 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
1708
1709 return omx->setParameter(
1710 node, OMX_IndexParamStandardComponentRole,
1711 &roleParams, sizeof(roleParams));
1712 }
1713
configureCodec(const char * mime,const sp<AMessage> & msg)1714 status_t ACodec::configureCodec(
1715 const char *mime, const sp<AMessage> &msg) {
1716 int32_t encoder;
1717 if (!msg->findInt32("encoder", &encoder)) {
1718 encoder = false;
1719 }
1720
1721 sp<AMessage> inputFormat = new AMessage;
1722 sp<AMessage> outputFormat = new AMessage;
1723 mConfigFormat = msg;
1724
1725 mIsEncoder = encoder;
1726
1727 mInputMetadataType = kMetadataBufferTypeInvalid;
1728 mOutputMetadataType = kMetadataBufferTypeInvalid;
1729
1730 status_t err = setComponentRole(encoder /* isEncoder */, mime);
1731
1732 if (err != OK) {
1733 return err;
1734 }
1735
1736 int32_t bitRate = 0;
1737 // FLAC encoder doesn't need a bitrate, other encoders do
1738 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
1739 && !msg->findInt32("bitrate", &bitRate)) {
1740 return INVALID_OPERATION;
1741 }
1742
1743 // propagate bitrate to the output so that the muxer has it
1744 if (encoder && msg->findInt32("bitrate", &bitRate)) {
1745 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
1746 // average bitrate. We've been setting both bitrate and max-bitrate to this same value.
1747 outputFormat->setInt32("bitrate", bitRate);
1748 outputFormat->setInt32("max-bitrate", bitRate);
1749 }
1750
1751 int32_t storeMeta;
1752 if (encoder
1753 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
1754 && storeMeta != kMetadataBufferTypeInvalid) {
1755 mInputMetadataType = (MetadataBufferType)storeMeta;
1756 err = mOMX->storeMetaDataInBuffers(
1757 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
1758 if (err != OK) {
1759 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
1760 mComponentName.c_str(), err);
1761
1762 return err;
1763 } else if (storeMeta == kMetadataBufferTypeANWBuffer
1764 && mInputMetadataType == kMetadataBufferTypeGrallocSource) {
1765 // IOMX translates ANWBuffers to gralloc source already.
1766 mInputMetadataType = (MetadataBufferType)storeMeta;
1767 }
1768
1769 uint32_t usageBits;
1770 if (mOMX->getParameter(
1771 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
1772 &usageBits, sizeof(usageBits)) == OK) {
1773 inputFormat->setInt32(
1774 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
1775 }
1776 }
1777
1778 int32_t prependSPSPPS = 0;
1779 if (encoder
1780 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS)
1781 && prependSPSPPS != 0) {
1782 OMX_INDEXTYPE index;
1783 err = mOMX->getExtensionIndex(
1784 mNode,
1785 "OMX.google.android.index.prependSPSPPSToIDRFrames",
1786 &index);
1787
1788 if (err == OK) {
1789 PrependSPSPPSToIDRFramesParams params;
1790 InitOMXParams(¶ms);
1791 params.bEnable = OMX_TRUE;
1792
1793 err = mOMX->setParameter(
1794 mNode, index, ¶ms, sizeof(params));
1795 }
1796
1797 if (err != OK) {
1798 ALOGE("Encoder could not be configured to emit SPS/PPS before "
1799 "IDR frames. (err %d)", err);
1800
1801 return err;
1802 }
1803 }
1804
1805 // Only enable metadata mode on encoder output if encoder can prepend
1806 // sps/pps to idr frames, since in metadata mode the bitstream is in an
1807 // opaque handle, to which we don't have access.
1808 int32_t video = !strncasecmp(mime, "video/", 6);
1809 mIsVideo = video;
1810 if (encoder && video) {
1811 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
1812 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta)
1813 && storeMeta != 0);
1814
1815 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource;
1816 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);
1817 if (err != OK) {
1818 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
1819 mComponentName.c_str(), err);
1820 }
1821
1822 if (!msg->findInt64(
1823 "repeat-previous-frame-after",
1824 &mRepeatFrameDelayUs)) {
1825 mRepeatFrameDelayUs = -1ll;
1826 }
1827
1828 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
1829 mMaxPtsGapUs = -1ll;
1830 }
1831
1832 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
1833 mMaxFps = -1;
1834 }
1835
1836 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
1837 mTimePerCaptureUs = -1ll;
1838 }
1839
1840 if (!msg->findInt32(
1841 "create-input-buffers-suspended",
1842 (int32_t*)&mCreateInputBuffersSuspended)) {
1843 mCreateInputBuffersSuspended = false;
1844 }
1845 }
1846
1847 // NOTE: we only use native window for video decoders
1848 sp<RefBase> obj;
1849 bool haveNativeWindow = msg->findObject("native-window", &obj)
1850 && obj != NULL && video && !encoder;
1851 mUsingNativeWindow = haveNativeWindow;
1852 mLegacyAdaptiveExperiment = false;
1853 if (video && !encoder) {
1854 inputFormat->setInt32("adaptive-playback", false);
1855
1856 int32_t usageProtected;
1857 if (msg->findInt32("protected", &usageProtected) && usageProtected) {
1858 if (!haveNativeWindow) {
1859 ALOGE("protected output buffers must be sent to an ANativeWindow");
1860 return PERMISSION_DENIED;
1861 }
1862 mFlags |= kFlagIsGrallocUsageProtected;
1863 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
1864 }
1865
1866 if (mFlags & kFlagIsSecure) {
1867 // use native_handles for secure input buffers
1868 err = mOMX->enableNativeBuffers(
1869 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE);
1870 ALOGI_IF(err != OK, "falling back to non-native_handles");
1871 err = OK; // ignore error for now
1872 }
1873 }
1874 if (haveNativeWindow) {
1875 sp<ANativeWindow> nativeWindow =
1876 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get()));
1877
1878 // START of temporary support for automatic FRC - THIS WILL BE REMOVED
1879 int32_t autoFrc;
1880 if (msg->findInt32("auto-frc", &autoFrc)) {
1881 bool enabled = autoFrc;
1882 OMX_CONFIG_BOOLEANTYPE config;
1883 InitOMXParams(&config);
1884 config.bEnabled = (OMX_BOOL)enabled;
1885 status_t temp = mOMX->setConfig(
1886 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
1887 &config, sizeof(config));
1888 if (temp == OK) {
1889 outputFormat->setInt32("auto-frc", enabled);
1890 } else if (enabled) {
1891 ALOGI("codec does not support requested auto-frc (err %d)", temp);
1892 }
1893 }
1894 // END of temporary support for automatic FRC
1895
1896 int32_t tunneled;
1897 if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
1898 tunneled != 0) {
1899 ALOGI("Configuring TUNNELED video playback.");
1900 mTunneled = true;
1901
1902 int32_t audioHwSync = 0;
1903 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) {
1904 ALOGW("No Audio HW Sync provided for video tunnel");
1905 }
1906 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
1907 if (err != OK) {
1908 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!",
1909 audioHwSync, nativeWindow.get());
1910 return err;
1911 }
1912
1913 int32_t maxWidth = 0, maxHeight = 0;
1914 if (msg->findInt32("max-width", &maxWidth) &&
1915 msg->findInt32("max-height", &maxHeight)) {
1916
1917 err = mOMX->prepareForAdaptivePlayback(
1918 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
1919 if (err != OK) {
1920 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
1921 mComponentName.c_str(), err);
1922 // allow failure
1923 err = OK;
1924 } else {
1925 inputFormat->setInt32("max-width", maxWidth);
1926 inputFormat->setInt32("max-height", maxHeight);
1927 inputFormat->setInt32("adaptive-playback", true);
1928 }
1929 }
1930 } else {
1931 ALOGV("Configuring CPU controlled video playback.");
1932 mTunneled = false;
1933
1934 // Explicity reset the sideband handle of the window for
1935 // non-tunneled video in case the window was previously used
1936 // for a tunneled video playback.
1937 err = native_window_set_sideband_stream(nativeWindow.get(), NULL);
1938 if (err != OK) {
1939 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err);
1940 return err;
1941 }
1942
1943 // Always try to enable dynamic output buffers on native surface
1944 mOutputMetadataType = kMetadataBufferTypeANWBuffer;
1945 err = mOMX->storeMetaDataInBuffers(
1946 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
1947 if (err != OK) {
1948 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
1949 mComponentName.c_str(), err);
1950
1951 // if adaptive playback has been requested, try JB fallback
1952 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
1953 // LARGE MEMORY REQUIREMENT
1954
1955 // we will not do adaptive playback on software accessed
1956 // surfaces as they never had to respond to changes in the
1957 // crop window, and we don't trust that they will be able to.
1958 int usageBits = 0;
1959 bool canDoAdaptivePlayback;
1960
1961 if (nativeWindow->query(
1962 nativeWindow.get(),
1963 NATIVE_WINDOW_CONSUMER_USAGE_BITS,
1964 &usageBits) != OK) {
1965 canDoAdaptivePlayback = false;
1966 } else {
1967 canDoAdaptivePlayback =
1968 (usageBits &
1969 (GRALLOC_USAGE_SW_READ_MASK |
1970 GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
1971 }
1972
1973 int32_t maxWidth = 0, maxHeight = 0;
1974 if (canDoAdaptivePlayback &&
1975 msg->findInt32("max-width", &maxWidth) &&
1976 msg->findInt32("max-height", &maxHeight)) {
1977 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
1978 mComponentName.c_str(), maxWidth, maxHeight);
1979
1980 err = mOMX->prepareForAdaptivePlayback(
1981 mNode, kPortIndexOutput, OMX_TRUE, maxWidth,
1982 maxHeight);
1983 ALOGW_IF(err != OK,
1984 "[%s] prepareForAdaptivePlayback failed w/ err %d",
1985 mComponentName.c_str(), err);
1986
1987 if (err == OK) {
1988 inputFormat->setInt32("max-width", maxWidth);
1989 inputFormat->setInt32("max-height", maxHeight);
1990 inputFormat->setInt32("adaptive-playback", true);
1991 }
1992 }
1993 // allow failure
1994 err = OK;
1995 } else {
1996 ALOGV("[%s] storeMetaDataInBuffers succeeded",
1997 mComponentName.c_str());
1998 CHECK(storingMetadataInDecodedBuffers());
1999 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled(
2000 "legacy-adaptive", !msg->contains("no-experiments"));
2001
2002 inputFormat->setInt32("adaptive-playback", true);
2003 }
2004
2005 int32_t push;
2006 if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
2007 && push != 0) {
2008 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
2009 }
2010 }
2011
2012 int32_t rotationDegrees;
2013 if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
2014 mRotationDegrees = rotationDegrees;
2015 } else {
2016 mRotationDegrees = 0;
2017 }
2018 }
2019
2020 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit;
2021 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
2022 // invalid encodings will default to PCM-16bit in setupRawAudioFormat.
2023
2024 if (video) {
2025 // determine need for software renderer
2026 bool usingSwRenderer = false;
2027 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) {
2028 usingSwRenderer = true;
2029 haveNativeWindow = false;
2030 }
2031
2032 if (encoder) {
2033 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat);
2034 } else {
2035 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
2036 }
2037
2038 if (err != OK) {
2039 return err;
2040 }
2041
2042 if (haveNativeWindow) {
2043 mNativeWindow = static_cast<Surface *>(obj.get());
2044 }
2045
2046 // initialize native window now to get actual output format
2047 // TODO: this is needed for some encoders even though they don't use native window
2048 err = initNativeWindow();
2049 if (err != OK) {
2050 return err;
2051 }
2052
2053 // fallback for devices that do not handle flex-YUV for native buffers
2054 if (haveNativeWindow) {
2055 int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
2056 if (msg->findInt32("color-format", &requestedColorFormat) &&
2057 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
2058 status_t err = getPortFormat(kPortIndexOutput, outputFormat);
2059 if (err != OK) {
2060 return err;
2061 }
2062 int32_t colorFormat = OMX_COLOR_FormatUnused;
2063 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
2064 if (!outputFormat->findInt32("color-format", &colorFormat)) {
2065 ALOGE("ouptut port did not have a color format (wrong domain?)");
2066 return BAD_VALUE;
2067 }
2068 ALOGD("[%s] Requested output format %#x and got %#x.",
2069 mComponentName.c_str(), requestedColorFormat, colorFormat);
2070 if (!isFlexibleColorFormat(
2071 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
2072 || flexibleEquivalent != (OMX_U32)requestedColorFormat) {
2073 // device did not handle flex-YUV request for native window, fall back
2074 // to SW renderer
2075 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
2076 mNativeWindow.clear();
2077 mNativeWindowUsageBits = 0;
2078 haveNativeWindow = false;
2079 usingSwRenderer = true;
2080 if (storingMetadataInDecodedBuffers()) {
2081 err = mOMX->storeMetaDataInBuffers(
2082 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType);
2083 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case
2084 // TODO: implement adaptive-playback support for bytebuffer mode.
2085 // This is done by SW codecs, but most HW codecs don't support it.
2086 inputFormat->setInt32("adaptive-playback", false);
2087 }
2088 if (err == OK) {
2089 err = mOMX->enableNativeBuffers(
2090 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
2091 }
2092 if (mFlags & kFlagIsGrallocUsageProtected) {
2093 // fallback is not supported for protected playback
2094 err = PERMISSION_DENIED;
2095 } else if (err == OK) {
2096 err = setupVideoDecoder(
2097 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
2098 }
2099 }
2100 }
2101 }
2102
2103 if (usingSwRenderer) {
2104 outputFormat->setInt32("using-sw-renderer", 1);
2105 }
2106 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
2107 int32_t numChannels, sampleRate;
2108 if (!msg->findInt32("channel-count", &numChannels)
2109 || !msg->findInt32("sample-rate", &sampleRate)) {
2110 // Since we did not always check for these, leave them optional
2111 // and have the decoder figure it all out.
2112 err = OK;
2113 } else {
2114 err = setupRawAudioFormat(
2115 encoder ? kPortIndexInput : kPortIndexOutput,
2116 sampleRate,
2117 numChannels);
2118 }
2119 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
2120 int32_t numChannels, sampleRate;
2121 if (!msg->findInt32("channel-count", &numChannels)
2122 || !msg->findInt32("sample-rate", &sampleRate)) {
2123 err = INVALID_OPERATION;
2124 } else {
2125 int32_t isADTS, aacProfile;
2126 int32_t sbrMode;
2127 int32_t maxOutputChannelCount;
2128 int32_t pcmLimiterEnable;
2129 drcParams_t drc;
2130 if (!msg->findInt32("is-adts", &isADTS)) {
2131 isADTS = 0;
2132 }
2133 if (!msg->findInt32("aac-profile", &aacProfile)) {
2134 aacProfile = OMX_AUDIO_AACObjectNull;
2135 }
2136 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) {
2137 sbrMode = -1;
2138 }
2139
2140 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
2141 maxOutputChannelCount = -1;
2142 }
2143 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
2144 // value is unknown
2145 pcmLimiterEnable = -1;
2146 }
2147 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
2148 // value is unknown
2149 drc.encodedTargetLevel = -1;
2150 }
2151 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
2152 // value is unknown
2153 drc.drcCut = -1;
2154 }
2155 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
2156 // value is unknown
2157 drc.drcBoost = -1;
2158 }
2159 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
2160 // value is unknown
2161 drc.heavyCompression = -1;
2162 }
2163 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
2164 // value is unknown
2165 drc.targetRefLevel = -1;
2166 }
2167
2168 err = setupAACCodec(
2169 encoder, numChannels, sampleRate, bitRate, aacProfile,
2170 isADTS != 0, sbrMode, maxOutputChannelCount, drc,
2171 pcmLimiterEnable);
2172 }
2173 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
2174 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
2175 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
2176 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
2177 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
2178 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
2179 // These are PCM-like formats with a fixed sample rate but
2180 // a variable number of channels.
2181
2182 int32_t numChannels;
2183 if (!msg->findInt32("channel-count", &numChannels)) {
2184 err = INVALID_OPERATION;
2185 } else {
2186 int32_t sampleRate;
2187 if (!msg->findInt32("sample-rate", &sampleRate)) {
2188 sampleRate = 8000;
2189 }
2190 err = setupG711Codec(encoder, sampleRate, numChannels);
2191 }
2192 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
2193 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1;
2194 if (encoder &&
2195 (!msg->findInt32("channel-count", &numChannels)
2196 || !msg->findInt32("sample-rate", &sampleRate))) {
2197 ALOGE("missing channel count or sample rate for FLAC encoder");
2198 err = INVALID_OPERATION;
2199 } else {
2200 if (encoder) {
2201 if (!msg->findInt32(
2202 "complexity", &compressionLevel) &&
2203 !msg->findInt32(
2204 "flac-compression-level", &compressionLevel)) {
2205 compressionLevel = 5; // default FLAC compression level
2206 } else if (compressionLevel < 0) {
2207 ALOGW("compression level %d outside [0..8] range, "
2208 "using 0",
2209 compressionLevel);
2210 compressionLevel = 0;
2211 } else if (compressionLevel > 8) {
2212 ALOGW("compression level %d outside [0..8] range, "
2213 "using 8",
2214 compressionLevel);
2215 compressionLevel = 8;
2216 }
2217 }
2218 err = setupFlacCodec(
2219 encoder, numChannels, sampleRate, compressionLevel);
2220 }
2221 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
2222 int32_t numChannels, sampleRate;
2223 if (encoder
2224 || !msg->findInt32("channel-count", &numChannels)
2225 || !msg->findInt32("sample-rate", &sampleRate)) {
2226 err = INVALID_OPERATION;
2227 } else {
2228 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding);
2229 }
2230 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) {
2231 int32_t numChannels;
2232 int32_t sampleRate;
2233 if (!msg->findInt32("channel-count", &numChannels)
2234 || !msg->findInt32("sample-rate", &sampleRate)) {
2235 err = INVALID_OPERATION;
2236 } else {
2237 err = setupAC3Codec(encoder, numChannels, sampleRate);
2238 }
2239 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) {
2240 int32_t numChannels;
2241 int32_t sampleRate;
2242 if (!msg->findInt32("channel-count", &numChannels)
2243 || !msg->findInt32("sample-rate", &sampleRate)) {
2244 err = INVALID_OPERATION;
2245 } else {
2246 err = setupEAC3Codec(encoder, numChannels, sampleRate);
2247 }
2248 }
2249
2250 if (err != OK) {
2251 return err;
2252 }
2253
2254 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) {
2255 mEncoderDelay = 0;
2256 }
2257
2258 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) {
2259 mEncoderPadding = 0;
2260 }
2261
2262 if (msg->findInt32("channel-mask", &mChannelMask)) {
2263 mChannelMaskPresent = true;
2264 } else {
2265 mChannelMaskPresent = false;
2266 }
2267
2268 int32_t maxInputSize;
2269 if (msg->findInt32("max-input-size", &maxInputSize)) {
2270 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
2271 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
2272 err = setMinBufferSize(kPortIndexInput, 8192); // XXX
2273 }
2274
2275 int32_t priority;
2276 if (msg->findInt32("priority", &priority)) {
2277 err = setPriority(priority);
2278 }
2279
2280 int32_t rateInt = -1;
2281 float rateFloat = -1;
2282 if (!msg->findFloat("operating-rate", &rateFloat)) {
2283 msg->findInt32("operating-rate", &rateInt);
2284 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
2285 }
2286 if (rateFloat > 0) {
2287 err = setOperatingRate(rateFloat, video);
2288 }
2289
2290 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame.
2291 mBaseOutputFormat = outputFormat;
2292 // trigger a kWhatOutputFormatChanged msg on first buffer
2293 mLastOutputFormat.clear();
2294
2295 err = getPortFormat(kPortIndexInput, inputFormat);
2296 if (err == OK) {
2297 err = getPortFormat(kPortIndexOutput, outputFormat);
2298 if (err == OK) {
2299 mInputFormat = inputFormat;
2300 mOutputFormat = outputFormat;
2301 }
2302 }
2303
2304 // create data converters if needed
2305 if (!video && err == OK) {
2306 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
2307 if (encoder) {
2308 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
2309 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding);
2310 if (mConverter[kPortIndexInput] != NULL) {
2311 mInputFormat->setInt32("pcm-encoding", pcmEncoding);
2312 }
2313 } else {
2314 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
2315 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding);
2316 if (mConverter[kPortIndexOutput] != NULL) {
2317 mOutputFormat->setInt32("pcm-encoding", pcmEncoding);
2318 }
2319 }
2320 }
2321
2322 return err;
2323 }
2324
setPriority(int32_t priority)2325 status_t ACodec::setPriority(int32_t priority) {
2326 if (priority < 0) {
2327 return BAD_VALUE;
2328 }
2329 OMX_PARAM_U32TYPE config;
2330 InitOMXParams(&config);
2331 config.nU32 = (OMX_U32)priority;
2332 status_t temp = mOMX->setConfig(
2333 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority,
2334 &config, sizeof(config));
2335 if (temp != OK) {
2336 ALOGI("codec does not support config priority (err %d)", temp);
2337 }
2338 return OK;
2339 }
2340
setOperatingRate(float rateFloat,bool isVideo)2341 status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) {
2342 if (rateFloat < 0) {
2343 return BAD_VALUE;
2344 }
2345 OMX_U32 rate;
2346 if (isVideo) {
2347 if (rateFloat > 65535) {
2348 return BAD_VALUE;
2349 }
2350 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f);
2351 } else {
2352 if (rateFloat > UINT_MAX) {
2353 return BAD_VALUE;
2354 }
2355 rate = (OMX_U32)(rateFloat);
2356 }
2357 OMX_PARAM_U32TYPE config;
2358 InitOMXParams(&config);
2359 config.nU32 = rate;
2360 status_t err = mOMX->setConfig(
2361 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
2362 &config, sizeof(config));
2363 if (err != OK) {
2364 ALOGI("codec does not support config operating rate (err %d)", err);
2365 }
2366 return OK;
2367 }
2368
getIntraRefreshPeriod(uint32_t * intraRefreshPeriod)2369 status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) {
2370 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
2371 InitOMXParams(¶ms);
2372 params.nPortIndex = kPortIndexOutput;
2373 status_t err = mOMX->getConfig(
2374 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params));
2375 if (err == OK) {
2376 *intraRefreshPeriod = params.nRefreshPeriod;
2377 return OK;
2378 }
2379
2380 // Fallback to query through standard OMX index.
2381 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams;
2382 InitOMXParams(&refreshParams);
2383 refreshParams.nPortIndex = kPortIndexOutput;
2384 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
2385 err = mOMX->getParameter(
2386 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams));
2387 if (err != OK || refreshParams.nCirMBs == 0) {
2388 *intraRefreshPeriod = 0;
2389 return OK;
2390 }
2391
2392 // Calculate period based on width and height
2393 uint32_t width, height;
2394 OMX_PARAM_PORTDEFINITIONTYPE def;
2395 InitOMXParams(&def);
2396 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
2397 def.nPortIndex = kPortIndexOutput;
2398 err = mOMX->getParameter(
2399 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2400 if (err != OK) {
2401 *intraRefreshPeriod = 0;
2402 return err;
2403 }
2404 width = video_def->nFrameWidth;
2405 height = video_def->nFrameHeight;
2406 // Use H.264/AVC MacroBlock size 16x16
2407 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs);
2408
2409 return OK;
2410 }
2411
setIntraRefreshPeriod(uint32_t intraRefreshPeriod,bool inConfigure)2412 status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) {
2413 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
2414 InitOMXParams(¶ms);
2415 params.nPortIndex = kPortIndexOutput;
2416 params.nRefreshPeriod = intraRefreshPeriod;
2417 status_t err = mOMX->setConfig(
2418 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params));
2419 if (err == OK) {
2420 return OK;
2421 }
2422
2423 // Only in configure state, a component could invoke setParameter.
2424 if (!inConfigure) {
2425 return INVALID_OPERATION;
2426 } else {
2427 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str());
2428 }
2429
2430 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams;
2431 InitOMXParams(&refreshParams);
2432 refreshParams.nPortIndex = kPortIndexOutput;
2433 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
2434
2435 if (intraRefreshPeriod == 0) {
2436 // 0 means disable intra refresh.
2437 refreshParams.nCirMBs = 0;
2438 } else {
2439 // Calculate macroblocks that need to be intra coded base on width and height
2440 uint32_t width, height;
2441 OMX_PARAM_PORTDEFINITIONTYPE def;
2442 InitOMXParams(&def);
2443 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
2444 def.nPortIndex = kPortIndexOutput;
2445 err = mOMX->getParameter(
2446 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2447 if (err != OK) {
2448 return err;
2449 }
2450 width = video_def->nFrameWidth;
2451 height = video_def->nFrameHeight;
2452 // Use H.264/AVC MacroBlock size 16x16
2453 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod);
2454 }
2455
2456 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh,
2457 &refreshParams, sizeof(refreshParams));
2458 if (err != OK) {
2459 return err;
2460 }
2461
2462 return OK;
2463 }
2464
configureTemporalLayers(const sp<AMessage> & msg,bool inConfigure,sp<AMessage> & outputFormat)2465 status_t ACodec::configureTemporalLayers(
2466 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) {
2467 if (!mIsVideo || !mIsEncoder) {
2468 return INVALID_OPERATION;
2469 }
2470
2471 AString tsSchema;
2472 if (!msg->findString("ts-schema", &tsSchema)) {
2473 return OK;
2474 }
2475
2476 unsigned int numLayers = 0;
2477 unsigned int numBLayers = 0;
2478 int tags;
2479 char dummy;
2480 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern =
2481 OMX_VIDEO_AndroidTemporalLayeringPatternNone;
2482 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
2483 && numLayers > 0) {
2484 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
2485 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
2486 &numLayers, &dummy, &numBLayers, &dummy))
2487 && (tags == 1 || (tags == 3 && dummy == '+'))
2488 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
2489 numLayers += numBLayers;
2490 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
2491 } else {
2492 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str());
2493 return BAD_VALUE;
2494 }
2495
2496 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams;
2497 InitOMXParams(&layerParams);
2498 layerParams.nPortIndex = kPortIndexOutput;
2499
2500 status_t err = mOMX->getParameter(
2501 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
2502 &layerParams, sizeof(layerParams));
2503
2504 if (err != OK) {
2505 return err;
2506 } else if (!(layerParams.eSupportedPatterns & pattern)) {
2507 return BAD_VALUE;
2508 }
2509
2510 numLayers = min(numLayers, layerParams.nLayerCountMax);
2511 numBLayers = min(numBLayers, layerParams.nBLayerCountMax);
2512
2513 if (!inConfigure) {
2514 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig;
2515 InitOMXParams(&layerConfig);
2516 layerConfig.nPortIndex = kPortIndexOutput;
2517 layerConfig.ePattern = pattern;
2518 layerConfig.nPLayerCountActual = numLayers - numBLayers;
2519 layerConfig.nBLayerCountActual = numBLayers;
2520 layerConfig.bBitrateRatiosSpecified = OMX_FALSE;
2521
2522 err = mOMX->setConfig(
2523 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering,
2524 &layerConfig, sizeof(layerConfig));
2525 } else {
2526 layerParams.ePattern = pattern;
2527 layerParams.nPLayerCountActual = numLayers - numBLayers;
2528 layerParams.nBLayerCountActual = numBLayers;
2529 layerParams.bBitrateRatiosSpecified = OMX_FALSE;
2530
2531 err = mOMX->setParameter(
2532 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
2533 &layerParams, sizeof(layerParams));
2534 }
2535
2536 AString configSchema;
2537 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) {
2538 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers);
2539 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) {
2540 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers);
2541 }
2542
2543 if (err != OK) {
2544 ALOGW("Failed to set temporal layers to %s (requested %s)",
2545 configSchema.c_str(), tsSchema.c_str());
2546 return err;
2547 }
2548
2549 err = mOMX->getParameter(
2550 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
2551 &layerParams, sizeof(layerParams));
2552
2553 if (err == OK) {
2554 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)",
2555 tsSchema.c_str(), configSchema.c_str(),
2556 asString(layerParams.ePattern), layerParams.ePattern,
2557 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual);
2558
2559 if (outputFormat.get() == mOutputFormat.get()) {
2560 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event
2561 }
2562 // assume we got what we configured
2563 outputFormat->setString("ts-schema", configSchema);
2564 }
2565 return err;
2566 }
2567
setMinBufferSize(OMX_U32 portIndex,size_t size)2568 status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
2569 OMX_PARAM_PORTDEFINITIONTYPE def;
2570 InitOMXParams(&def);
2571 def.nPortIndex = portIndex;
2572
2573 status_t err = mOMX->getParameter(
2574 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2575
2576 if (err != OK) {
2577 return err;
2578 }
2579
2580 if (def.nBufferSize >= size) {
2581 return OK;
2582 }
2583
2584 def.nBufferSize = size;
2585
2586 err = mOMX->setParameter(
2587 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2588
2589 if (err != OK) {
2590 return err;
2591 }
2592
2593 err = mOMX->getParameter(
2594 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2595
2596 if (err != OK) {
2597 return err;
2598 }
2599
2600 if (def.nBufferSize < size) {
2601 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize);
2602 return FAILED_TRANSACTION;
2603 }
2604
2605 return OK;
2606 }
2607
selectAudioPortFormat(OMX_U32 portIndex,OMX_AUDIO_CODINGTYPE desiredFormat)2608 status_t ACodec::selectAudioPortFormat(
2609 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) {
2610 OMX_AUDIO_PARAM_PORTFORMATTYPE format;
2611 InitOMXParams(&format);
2612
2613 format.nPortIndex = portIndex;
2614 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
2615 format.nIndex = index;
2616 status_t err = mOMX->getParameter(
2617 mNode, OMX_IndexParamAudioPortFormat,
2618 &format, sizeof(format));
2619
2620 if (err != OK) {
2621 return err;
2622 }
2623
2624 if (format.eEncoding == desiredFormat) {
2625 break;
2626 }
2627
2628 if (index == kMaxIndicesToCheck) {
2629 ALOGW("[%s] stopping checking formats after %u: %s(%x)",
2630 mComponentName.c_str(), index,
2631 asString(format.eEncoding), format.eEncoding);
2632 return ERROR_UNSUPPORTED;
2633 }
2634 }
2635
2636 return mOMX->setParameter(
2637 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format));
2638 }
2639
setupAACCodec(bool encoder,int32_t numChannels,int32_t sampleRate,int32_t bitRate,int32_t aacProfile,bool isADTS,int32_t sbrMode,int32_t maxOutputChannelCount,const drcParams_t & drc,int32_t pcmLimiterEnable)2640 status_t ACodec::setupAACCodec(
2641 bool encoder, int32_t numChannels, int32_t sampleRate,
2642 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode,
2643 int32_t maxOutputChannelCount, const drcParams_t& drc,
2644 int32_t pcmLimiterEnable) {
2645 if (encoder && isADTS) {
2646 return -EINVAL;
2647 }
2648
2649 status_t err = setupRawAudioFormat(
2650 encoder ? kPortIndexInput : kPortIndexOutput,
2651 sampleRate,
2652 numChannels);
2653
2654 if (err != OK) {
2655 return err;
2656 }
2657
2658 if (encoder) {
2659 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC);
2660
2661 if (err != OK) {
2662 return err;
2663 }
2664
2665 OMX_PARAM_PORTDEFINITIONTYPE def;
2666 InitOMXParams(&def);
2667 def.nPortIndex = kPortIndexOutput;
2668
2669 err = mOMX->getParameter(
2670 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2671
2672 if (err != OK) {
2673 return err;
2674 }
2675
2676 def.format.audio.bFlagErrorConcealment = OMX_TRUE;
2677 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
2678
2679 err = mOMX->setParameter(
2680 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2681
2682 if (err != OK) {
2683 return err;
2684 }
2685
2686 OMX_AUDIO_PARAM_AACPROFILETYPE profile;
2687 InitOMXParams(&profile);
2688 profile.nPortIndex = kPortIndexOutput;
2689
2690 err = mOMX->getParameter(
2691 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2692
2693 if (err != OK) {
2694 return err;
2695 }
2696
2697 profile.nChannels = numChannels;
2698
2699 profile.eChannelMode =
2700 (numChannels == 1)
2701 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo;
2702
2703 profile.nSampleRate = sampleRate;
2704 profile.nBitRate = bitRate;
2705 profile.nAudioBandWidth = 0;
2706 profile.nFrameLength = 0;
2707 profile.nAACtools = OMX_AUDIO_AACToolAll;
2708 profile.nAACERtools = OMX_AUDIO_AACERNone;
2709 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile;
2710 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
2711 switch (sbrMode) {
2712 case 0:
2713 // disable sbr
2714 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
2715 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
2716 break;
2717 case 1:
2718 // enable single-rate sbr
2719 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
2720 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
2721 break;
2722 case 2:
2723 // enable dual-rate sbr
2724 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
2725 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
2726 break;
2727 case -1:
2728 // enable both modes -> the codec will decide which mode should be used
2729 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
2730 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
2731 break;
2732 default:
2733 // unsupported sbr mode
2734 return BAD_VALUE;
2735 }
2736
2737
2738 err = mOMX->setParameter(
2739 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2740
2741 if (err != OK) {
2742 return err;
2743 }
2744
2745 return err;
2746 }
2747
2748 OMX_AUDIO_PARAM_AACPROFILETYPE profile;
2749 InitOMXParams(&profile);
2750 profile.nPortIndex = kPortIndexInput;
2751
2752 err = mOMX->getParameter(
2753 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2754
2755 if (err != OK) {
2756 return err;
2757 }
2758
2759 profile.nChannels = numChannels;
2760 profile.nSampleRate = sampleRate;
2761
2762 profile.eAACStreamFormat =
2763 isADTS
2764 ? OMX_AUDIO_AACStreamFormatMP4ADTS
2765 : OMX_AUDIO_AACStreamFormatMP4FF;
2766
2767 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation;
2768 InitOMXParams(&presentation);
2769 presentation.nMaxOutputChannels = maxOutputChannelCount;
2770 presentation.nDrcCut = drc.drcCut;
2771 presentation.nDrcBoost = drc.drcBoost;
2772 presentation.nHeavyCompression = drc.heavyCompression;
2773 presentation.nTargetReferenceLevel = drc.targetRefLevel;
2774 presentation.nEncodedTargetLevel = drc.encodedTargetLevel;
2775 presentation.nPCMLimiterEnable = pcmLimiterEnable;
2776
2777 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2778 if (res == OK) {
2779 // optional parameters, will not cause configuration failure
2780 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
2781 &presentation, sizeof(presentation));
2782 } else {
2783 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res);
2784 }
2785 return res;
2786 }
2787
setupAC3Codec(bool encoder,int32_t numChannels,int32_t sampleRate)2788 status_t ACodec::setupAC3Codec(
2789 bool encoder, int32_t numChannels, int32_t sampleRate) {
2790 status_t err = setupRawAudioFormat(
2791 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels);
2792
2793 if (err != OK) {
2794 return err;
2795 }
2796
2797 if (encoder) {
2798 ALOGW("AC3 encoding is not supported.");
2799 return INVALID_OPERATION;
2800 }
2801
2802 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
2803 InitOMXParams(&def);
2804 def.nPortIndex = kPortIndexInput;
2805
2806 err = mOMX->getParameter(
2807 mNode,
2808 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
2809 &def,
2810 sizeof(def));
2811
2812 if (err != OK) {
2813 return err;
2814 }
2815
2816 def.nChannels = numChannels;
2817 def.nSampleRate = sampleRate;
2818
2819 return mOMX->setParameter(
2820 mNode,
2821 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
2822 &def,
2823 sizeof(def));
2824 }
2825
setupEAC3Codec(bool encoder,int32_t numChannels,int32_t sampleRate)2826 status_t ACodec::setupEAC3Codec(
2827 bool encoder, int32_t numChannels, int32_t sampleRate) {
2828 status_t err = setupRawAudioFormat(
2829 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels);
2830
2831 if (err != OK) {
2832 return err;
2833 }
2834
2835 if (encoder) {
2836 ALOGW("EAC3 encoding is not supported.");
2837 return INVALID_OPERATION;
2838 }
2839
2840 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def;
2841 InitOMXParams(&def);
2842 def.nPortIndex = kPortIndexInput;
2843
2844 err = mOMX->getParameter(
2845 mNode,
2846 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
2847 &def,
2848 sizeof(def));
2849
2850 if (err != OK) {
2851 return err;
2852 }
2853
2854 def.nChannels = numChannels;
2855 def.nSampleRate = sampleRate;
2856
2857 return mOMX->setParameter(
2858 mNode,
2859 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
2860 &def,
2861 sizeof(def));
2862 }
2863
pickModeFromBitRate(bool isAMRWB,int32_t bps)2864 static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
2865 bool isAMRWB, int32_t bps) {
2866 if (isAMRWB) {
2867 if (bps <= 6600) {
2868 return OMX_AUDIO_AMRBandModeWB0;
2869 } else if (bps <= 8850) {
2870 return OMX_AUDIO_AMRBandModeWB1;
2871 } else if (bps <= 12650) {
2872 return OMX_AUDIO_AMRBandModeWB2;
2873 } else if (bps <= 14250) {
2874 return OMX_AUDIO_AMRBandModeWB3;
2875 } else if (bps <= 15850) {
2876 return OMX_AUDIO_AMRBandModeWB4;
2877 } else if (bps <= 18250) {
2878 return OMX_AUDIO_AMRBandModeWB5;
2879 } else if (bps <= 19850) {
2880 return OMX_AUDIO_AMRBandModeWB6;
2881 } else if (bps <= 23050) {
2882 return OMX_AUDIO_AMRBandModeWB7;
2883 }
2884
2885 // 23850 bps
2886 return OMX_AUDIO_AMRBandModeWB8;
2887 } else { // AMRNB
2888 if (bps <= 4750) {
2889 return OMX_AUDIO_AMRBandModeNB0;
2890 } else if (bps <= 5150) {
2891 return OMX_AUDIO_AMRBandModeNB1;
2892 } else if (bps <= 5900) {
2893 return OMX_AUDIO_AMRBandModeNB2;
2894 } else if (bps <= 6700) {
2895 return OMX_AUDIO_AMRBandModeNB3;
2896 } else if (bps <= 7400) {
2897 return OMX_AUDIO_AMRBandModeNB4;
2898 } else if (bps <= 7950) {
2899 return OMX_AUDIO_AMRBandModeNB5;
2900 } else if (bps <= 10200) {
2901 return OMX_AUDIO_AMRBandModeNB6;
2902 }
2903
2904 // 12200 bps
2905 return OMX_AUDIO_AMRBandModeNB7;
2906 }
2907 }
2908
setupAMRCodec(bool encoder,bool isWAMR,int32_t bitrate)2909 status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
2910 OMX_AUDIO_PARAM_AMRTYPE def;
2911 InitOMXParams(&def);
2912 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput;
2913
2914 status_t err =
2915 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
2916
2917 if (err != OK) {
2918 return err;
2919 }
2920
2921 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
2922 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate);
2923
2924 err = mOMX->setParameter(
2925 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
2926
2927 if (err != OK) {
2928 return err;
2929 }
2930
2931 return setupRawAudioFormat(
2932 encoder ? kPortIndexInput : kPortIndexOutput,
2933 isWAMR ? 16000 : 8000 /* sampleRate */,
2934 1 /* numChannels */);
2935 }
2936
setupG711Codec(bool encoder,int32_t sampleRate,int32_t numChannels)2937 status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) {
2938 if (encoder) {
2939 return INVALID_OPERATION;
2940 }
2941
2942 return setupRawAudioFormat(
2943 kPortIndexInput, sampleRate, numChannels);
2944 }
2945
setupFlacCodec(bool encoder,int32_t numChannels,int32_t sampleRate,int32_t compressionLevel)2946 status_t ACodec::setupFlacCodec(
2947 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) {
2948
2949 if (encoder) {
2950 OMX_AUDIO_PARAM_FLACTYPE def;
2951 InitOMXParams(&def);
2952 def.nPortIndex = kPortIndexOutput;
2953
2954 // configure compression level
2955 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
2956 if (err != OK) {
2957 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err);
2958 return err;
2959 }
2960 def.nCompressionLevel = compressionLevel;
2961 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
2962 if (err != OK) {
2963 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err);
2964 return err;
2965 }
2966 }
2967
2968 return setupRawAudioFormat(
2969 encoder ? kPortIndexInput : kPortIndexOutput,
2970 sampleRate,
2971 numChannels);
2972 }
2973
setupRawAudioFormat(OMX_U32 portIndex,int32_t sampleRate,int32_t numChannels,AudioEncoding encoding)2974 status_t ACodec::setupRawAudioFormat(
2975 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) {
2976 OMX_PARAM_PORTDEFINITIONTYPE def;
2977 InitOMXParams(&def);
2978 def.nPortIndex = portIndex;
2979
2980 status_t err = mOMX->getParameter(
2981 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2982
2983 if (err != OK) {
2984 return err;
2985 }
2986
2987 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
2988
2989 err = mOMX->setParameter(
2990 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2991
2992 if (err != OK) {
2993 return err;
2994 }
2995
2996 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
2997 InitOMXParams(&pcmParams);
2998 pcmParams.nPortIndex = portIndex;
2999
3000 err = mOMX->getParameter(
3001 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
3002
3003 if (err != OK) {
3004 return err;
3005 }
3006
3007 pcmParams.nChannels = numChannels;
3008 switch (encoding) {
3009 case kAudioEncodingPcm8bit:
3010 pcmParams.eNumData = OMX_NumericalDataUnsigned;
3011 pcmParams.nBitPerSample = 8;
3012 break;
3013 case kAudioEncodingPcmFloat:
3014 pcmParams.eNumData = OMX_NumericalDataFloat;
3015 pcmParams.nBitPerSample = 32;
3016 break;
3017 case kAudioEncodingPcm16bit:
3018 pcmParams.eNumData = OMX_NumericalDataSigned;
3019 pcmParams.nBitPerSample = 16;
3020 break;
3021 default:
3022 return BAD_VALUE;
3023 }
3024 pcmParams.bInterleaved = OMX_TRUE;
3025 pcmParams.nSamplingRate = sampleRate;
3026 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear;
3027
3028 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) {
3029 return OMX_ErrorNone;
3030 }
3031
3032 err = mOMX->setParameter(
3033 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
3034 // if we could not set up raw format to non-16-bit, try with 16-bit
3035 // NOTE: we will also verify this via readback, in case codec ignores these fields
3036 if (err != OK && encoding != kAudioEncodingPcm16bit) {
3037 pcmParams.eNumData = OMX_NumericalDataSigned;
3038 pcmParams.nBitPerSample = 16;
3039 err = mOMX->setParameter(
3040 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
3041 }
3042 return err;
3043 }
3044
configureTunneledVideoPlayback(int32_t audioHwSync,const sp<ANativeWindow> & nativeWindow)3045 status_t ACodec::configureTunneledVideoPlayback(
3046 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) {
3047 native_handle_t* sidebandHandle;
3048
3049 status_t err = mOMX->configureVideoTunnelMode(
3050 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
3051 if (err != OK) {
3052 ALOGE("configureVideoTunnelMode failed! (err %d).", err);
3053 return err;
3054 }
3055
3056 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle);
3057 if (err != OK) {
3058 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
3059 sidebandHandle, err);
3060 return err;
3061 }
3062
3063 return OK;
3064 }
3065
setVideoPortFormatType(OMX_U32 portIndex,OMX_VIDEO_CODINGTYPE compressionFormat,OMX_COLOR_FORMATTYPE colorFormat,bool usingNativeBuffers)3066 status_t ACodec::setVideoPortFormatType(
3067 OMX_U32 portIndex,
3068 OMX_VIDEO_CODINGTYPE compressionFormat,
3069 OMX_COLOR_FORMATTYPE colorFormat,
3070 bool usingNativeBuffers) {
3071 OMX_VIDEO_PARAM_PORTFORMATTYPE format;
3072 InitOMXParams(&format);
3073 format.nPortIndex = portIndex;
3074 format.nIndex = 0;
3075 bool found = false;
3076
3077 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
3078 format.nIndex = index;
3079 status_t err = mOMX->getParameter(
3080 mNode, OMX_IndexParamVideoPortFormat,
3081 &format, sizeof(format));
3082
3083 if (err != OK) {
3084 return err;
3085 }
3086
3087 // substitute back flexible color format to codec supported format
3088 OMX_U32 flexibleEquivalent;
3089 if (compressionFormat == OMX_VIDEO_CodingUnused
3090 && isFlexibleColorFormat(
3091 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent)
3092 && colorFormat == flexibleEquivalent) {
3093 ALOGI("[%s] using color format %#x in place of %#x",
3094 mComponentName.c_str(), format.eColorFormat, colorFormat);
3095 colorFormat = format.eColorFormat;
3096 }
3097
3098 // The following assertion is violated by TI's video decoder.
3099 // CHECK_EQ(format.nIndex, index);
3100
3101 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) {
3102 if (portIndex == kPortIndexInput
3103 && colorFormat == format.eColorFormat) {
3104 // eCompressionFormat does not seem right.
3105 found = true;
3106 break;
3107 }
3108 if (portIndex == kPortIndexOutput
3109 && compressionFormat == format.eCompressionFormat) {
3110 // eColorFormat does not seem right.
3111 found = true;
3112 break;
3113 }
3114 }
3115
3116 if (format.eCompressionFormat == compressionFormat
3117 && format.eColorFormat == colorFormat) {
3118 found = true;
3119 break;
3120 }
3121
3122 if (index == kMaxIndicesToCheck) {
3123 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)",
3124 mComponentName.c_str(), index,
3125 asString(format.eCompressionFormat), format.eCompressionFormat,
3126 asString(format.eColorFormat), format.eColorFormat);
3127 }
3128 }
3129
3130 if (!found) {
3131 return UNKNOWN_ERROR;
3132 }
3133
3134 status_t err = mOMX->setParameter(
3135 mNode, OMX_IndexParamVideoPortFormat,
3136 &format, sizeof(format));
3137
3138 return err;
3139 }
3140
3141 // Set optimal output format. OMX component lists output formats in the order
3142 // of preference, but this got more complicated since the introduction of flexible
3143 // YUV formats. We support a legacy behavior for applications that do not use
3144 // surface output, do not specify an output format, but expect a "usable" standard
3145 // OMX format. SW readable and standard formats must be flex-YUV.
3146 //
3147 // Suggested preference order:
3148 // - optimal format for texture rendering (mediaplayer behavior)
3149 // - optimal SW readable & texture renderable format (flex-YUV support)
3150 // - optimal SW readable non-renderable format (flex-YUV bytebuffer support)
3151 // - legacy "usable" standard formats
3152 //
3153 // For legacy support, we prefer a standard format, but will settle for a SW readable
3154 // flex-YUV format.
setSupportedOutputFormat(bool getLegacyFlexibleFormat)3155 status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) {
3156 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat;
3157 InitOMXParams(&format);
3158 format.nPortIndex = kPortIndexOutput;
3159
3160 InitOMXParams(&legacyFormat);
3161 // this field will change when we find a suitable legacy format
3162 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused;
3163
3164 for (OMX_U32 index = 0; ; ++index) {
3165 format.nIndex = index;
3166 status_t err = mOMX->getParameter(
3167 mNode, OMX_IndexParamVideoPortFormat,
3168 &format, sizeof(format));
3169 if (err != OK) {
3170 // no more formats, pick legacy format if found
3171 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) {
3172 memcpy(&format, &legacyFormat, sizeof(format));
3173 break;
3174 }
3175 return err;
3176 }
3177 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) {
3178 return OMX_ErrorBadParameter;
3179 }
3180 if (!getLegacyFlexibleFormat) {
3181 break;
3182 }
3183 // standard formats that were exposed to users before
3184 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar
3185 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar
3186 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
3187 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar
3188 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
3189 break;
3190 }
3191 // find best legacy non-standard format
3192 OMX_U32 flexibleEquivalent;
3193 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused
3194 && isFlexibleColorFormat(
3195 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */,
3196 &flexibleEquivalent)
3197 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) {
3198 memcpy(&legacyFormat, &format, sizeof(format));
3199 }
3200 }
3201 return mOMX->setParameter(
3202 mNode, OMX_IndexParamVideoPortFormat,
3203 &format, sizeof(format));
3204 }
3205
3206 static const struct VideoCodingMapEntry {
3207 const char *mMime;
3208 OMX_VIDEO_CODINGTYPE mVideoCodingType;
3209 } kVideoCodingMapEntry[] = {
3210 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
3211 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC },
3212 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
3213 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
3214 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
3215 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 },
3216 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 },
3217 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision },
3218 };
3219
GetVideoCodingTypeFromMime(const char * mime,OMX_VIDEO_CODINGTYPE * codingType)3220 static status_t GetVideoCodingTypeFromMime(
3221 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
3222 for (size_t i = 0;
3223 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
3224 ++i) {
3225 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) {
3226 *codingType = kVideoCodingMapEntry[i].mVideoCodingType;
3227 return OK;
3228 }
3229 }
3230
3231 *codingType = OMX_VIDEO_CodingUnused;
3232
3233 return ERROR_UNSUPPORTED;
3234 }
3235
GetMimeTypeForVideoCoding(OMX_VIDEO_CODINGTYPE codingType,AString * mime)3236 static status_t GetMimeTypeForVideoCoding(
3237 OMX_VIDEO_CODINGTYPE codingType, AString *mime) {
3238 for (size_t i = 0;
3239 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
3240 ++i) {
3241 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) {
3242 *mime = kVideoCodingMapEntry[i].mMime;
3243 return OK;
3244 }
3245 }
3246
3247 mime->clear();
3248
3249 return ERROR_UNSUPPORTED;
3250 }
3251
setPortBufferNum(OMX_U32 portIndex,int bufferNum)3252 status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) {
3253 OMX_PARAM_PORTDEFINITIONTYPE def;
3254 InitOMXParams(&def);
3255 def.nPortIndex = portIndex;
3256 status_t err;
3257 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(),
3258 portIndex == kPortIndexInput ? "input" : "output", bufferNum);
3259 err = mOMX->getParameter(
3260 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3261 if (err != OK) {
3262 return err;
3263 }
3264 def.nBufferCountActual = bufferNum;
3265 err = mOMX->setParameter(
3266 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3267 if (err != OK) {
3268 // Component could reject this request.
3269 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(),
3270 portIndex == kPortIndexInput ? "input" : "output", bufferNum);
3271 }
3272 return OK;
3273 }
3274
setupVideoDecoder(const char * mime,const sp<AMessage> & msg,bool haveNativeWindow,bool usingSwRenderer,sp<AMessage> & outputFormat)3275 status_t ACodec::setupVideoDecoder(
3276 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow,
3277 bool usingSwRenderer, sp<AMessage> &outputFormat) {
3278 int32_t width, height;
3279 if (!msg->findInt32("width", &width)
3280 || !msg->findInt32("height", &height)) {
3281 return INVALID_OPERATION;
3282 }
3283
3284 OMX_VIDEO_CODINGTYPE compressionFormat;
3285 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
3286
3287 if (err != OK) {
3288 return err;
3289 }
3290
3291 if (compressionFormat == OMX_VIDEO_CodingVP9) {
3292 OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
3293 InitOMXParams(¶ms);
3294 params.nPortIndex = kPortIndexInput;
3295 // Check if VP9 decoder advertises supported profiles.
3296 params.nProfileIndex = 0;
3297 status_t err = mOMX->getParameter(
3298 mNode,
3299 OMX_IndexParamVideoProfileLevelQuerySupported,
3300 ¶ms,
3301 sizeof(params));
3302 mIsLegacyVP9Decoder = err != OK;
3303 }
3304
3305 err = setVideoPortFormatType(
3306 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
3307
3308 if (err != OK) {
3309 return err;
3310 }
3311
3312 int32_t tmp;
3313 if (msg->findInt32("color-format", &tmp)) {
3314 OMX_COLOR_FORMATTYPE colorFormat =
3315 static_cast<OMX_COLOR_FORMATTYPE>(tmp);
3316 err = setVideoPortFormatType(
3317 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow);
3318 if (err != OK) {
3319 ALOGW("[%s] does not support color format %d",
3320 mComponentName.c_str(), colorFormat);
3321 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
3322 }
3323 } else {
3324 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
3325 }
3326
3327 if (err != OK) {
3328 return err;
3329 }
3330
3331 // Set the component input buffer number to be |tmp|. If succeed,
3332 // component will set input port buffer number to be |tmp|. If fail,
3333 // component will keep the same buffer number as before.
3334 if (msg->findInt32("android._num-input-buffers", &tmp)) {
3335 err = setPortBufferNum(kPortIndexInput, tmp);
3336 if (err != OK)
3337 return err;
3338 }
3339
3340 // Set the component output buffer number to be |tmp|. If succeed,
3341 // component will set output port buffer number to be |tmp|. If fail,
3342 // component will keep the same buffer number as before.
3343 if (msg->findInt32("android._num-output-buffers", &tmp)) {
3344 err = setPortBufferNum(kPortIndexOutput, tmp);
3345 if (err != OK)
3346 return err;
3347 }
3348
3349 int32_t frameRateInt;
3350 float frameRateFloat;
3351 if (!msg->findFloat("frame-rate", &frameRateFloat)) {
3352 if (!msg->findInt32("frame-rate", &frameRateInt)) {
3353 frameRateInt = -1;
3354 }
3355 frameRateFloat = (float)frameRateInt;
3356 }
3357
3358 err = setVideoFormatOnPort(
3359 kPortIndexInput, width, height, compressionFormat, frameRateFloat);
3360
3361 if (err != OK) {
3362 return err;
3363 }
3364
3365 err = setVideoFormatOnPort(
3366 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused);
3367
3368 if (err != OK) {
3369 return err;
3370 }
3371
3372 err = setColorAspectsForVideoDecoder(
3373 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat);
3374 if (err == ERROR_UNSUPPORTED) { // support is optional
3375 err = OK;
3376 }
3377
3378 if (err != OK) {
3379 return err;
3380 }
3381
3382 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat);
3383 if (err == ERROR_UNSUPPORTED) { // support is optional
3384 err = OK;
3385 }
3386 return err;
3387 }
3388
initDescribeColorAspectsIndex()3389 status_t ACodec::initDescribeColorAspectsIndex() {
3390 status_t err = mOMX->getExtensionIndex(
3391 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex);
3392 if (err != OK) {
3393 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0;
3394 }
3395 return err;
3396 }
3397
setCodecColorAspects(DescribeColorAspectsParams & params,bool verify)3398 status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) {
3399 status_t err = ERROR_UNSUPPORTED;
3400 if (mDescribeColorAspectsIndex) {
3401 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
3402 }
3403 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
3404 mComponentName.c_str(),
3405 params.sAspects.mRange, asString(params.sAspects.mRange),
3406 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
3407 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
3408 params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
3409 err, asString(err));
3410
3411 if (verify && err == OK) {
3412 err = getCodecColorAspects(params);
3413 }
3414
3415 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex,
3416 "[%s] setting color aspects failed even though codec advertises support",
3417 mComponentName.c_str());
3418 return err;
3419 }
3420
setColorAspectsForVideoDecoder(int32_t width,int32_t height,bool usingNativeWindow,const sp<AMessage> & configFormat,sp<AMessage> & outputFormat)3421 status_t ACodec::setColorAspectsForVideoDecoder(
3422 int32_t width, int32_t height, bool usingNativeWindow,
3423 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
3424 DescribeColorAspectsParams params;
3425 InitOMXParams(¶ms);
3426 params.nPortIndex = kPortIndexOutput;
3427
3428 getColorAspectsFromFormat(configFormat, params.sAspects);
3429 if (usingNativeWindow) {
3430 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
3431 // The default aspects will be set back to the output format during the
3432 // getFormat phase of configure(). Set non-Unspecified values back into the
3433 // format, in case component does not support this enumeration.
3434 setColorAspectsIntoFormat(params.sAspects, outputFormat);
3435 }
3436
3437 (void)initDescribeColorAspectsIndex();
3438
3439 // communicate color aspects to codec
3440 return setCodecColorAspects(params);
3441 }
3442
getCodecColorAspects(DescribeColorAspectsParams & params)3443 status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) {
3444 status_t err = ERROR_UNSUPPORTED;
3445 if (mDescribeColorAspectsIndex) {
3446 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params));
3447 }
3448 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
3449 mComponentName.c_str(),
3450 params.sAspects.mRange, asString(params.sAspects.mRange),
3451 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
3452 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
3453 params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
3454 err, asString(err));
3455 if (params.bRequestingDataSpace) {
3456 ALOGV("for dataspace %#x", params.nDataSpace);
3457 }
3458 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex
3459 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) {
3460 ALOGW("[%s] getting color aspects failed even though codec advertises support",
3461 mComponentName.c_str());
3462 }
3463 return err;
3464 }
3465
getInputColorAspectsForVideoEncoder(sp<AMessage> & format)3466 status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) {
3467 DescribeColorAspectsParams params;
3468 InitOMXParams(¶ms);
3469 params.nPortIndex = kPortIndexInput;
3470 status_t err = getCodecColorAspects(params);
3471 if (err == OK) {
3472 // we only set encoder input aspects if codec supports them
3473 setColorAspectsIntoFormat(params.sAspects, format, true /* force */);
3474 }
3475 return err;
3476 }
3477
getDataSpace(DescribeColorAspectsParams & params,android_dataspace * dataSpace,bool tryCodec)3478 status_t ACodec::getDataSpace(
3479 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */,
3480 bool tryCodec) {
3481 status_t err = OK;
3482 if (tryCodec) {
3483 // request dataspace guidance from codec.
3484 params.bRequestingDataSpace = OMX_TRUE;
3485 err = getCodecColorAspects(params);
3486 params.bRequestingDataSpace = OMX_FALSE;
3487 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) {
3488 *dataSpace = (android_dataspace)params.nDataSpace;
3489 return err;
3490 } else if (err == ERROR_UNSUPPORTED) {
3491 // ignore not-implemented error for dataspace requests
3492 err = OK;
3493 }
3494 }
3495
3496 // this returns legacy versions if available
3497 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */);
3498 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
3499 "and dataspace %#x",
3500 mComponentName.c_str(),
3501 params.sAspects.mRange, asString(params.sAspects.mRange),
3502 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
3503 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
3504 params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
3505 *dataSpace);
3506 return err;
3507 }
3508
3509
getColorAspectsAndDataSpaceForVideoDecoder(int32_t width,int32_t height,const sp<AMessage> & configFormat,sp<AMessage> & outputFormat,android_dataspace * dataSpace)3510 status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder(
3511 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat,
3512 android_dataspace *dataSpace) {
3513 DescribeColorAspectsParams params;
3514 InitOMXParams(¶ms);
3515 params.nPortIndex = kPortIndexOutput;
3516
3517 // reset default format and get resulting format
3518 getColorAspectsFromFormat(configFormat, params.sAspects);
3519 if (dataSpace != NULL) {
3520 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
3521 }
3522 status_t err = setCodecColorAspects(params, true /* readBack */);
3523
3524 // we always set specified aspects for decoders
3525 setColorAspectsIntoFormat(params.sAspects, outputFormat);
3526
3527 if (dataSpace != NULL) {
3528 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */);
3529 if (err == OK) {
3530 err = res;
3531 }
3532 }
3533
3534 return err;
3535 }
3536
3537 // initial video encoder setup for bytebuffer mode
setColorAspectsForVideoEncoder(const sp<AMessage> & configFormat,sp<AMessage> & outputFormat,sp<AMessage> & inputFormat)3538 status_t ACodec::setColorAspectsForVideoEncoder(
3539 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
3540 // copy config to output format as this is not exposed via getFormat
3541 copyColorConfig(configFormat, outputFormat);
3542
3543 DescribeColorAspectsParams params;
3544 InitOMXParams(¶ms);
3545 params.nPortIndex = kPortIndexInput;
3546 getColorAspectsFromFormat(configFormat, params.sAspects);
3547
3548 (void)initDescribeColorAspectsIndex();
3549
3550 int32_t usingRecorder;
3551 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
3552 android_dataspace dataSpace = HAL_DATASPACE_BT709;
3553 int32_t width, height;
3554 if (configFormat->findInt32("width", &width)
3555 && configFormat->findInt32("height", &height)) {
3556 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
3557 status_t err = getDataSpace(
3558 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */);
3559 if (err != OK) {
3560 return err;
3561 }
3562 setColorAspectsIntoFormat(params.sAspects, outputFormat);
3563 }
3564 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace);
3565 }
3566
3567 // communicate color aspects to codec, but do not allow change of the platform aspects
3568 ColorAspects origAspects = params.sAspects;
3569 for (int triesLeft = 2; --triesLeft >= 0; ) {
3570 status_t err = setCodecColorAspects(params, true /* readBack */);
3571 if (err != OK
3572 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
3573 params.sAspects, origAspects, true /* usePlatformAspects */)) {
3574 return err;
3575 }
3576 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.",
3577 mComponentName.c_str());
3578 }
3579 return OK;
3580 }
3581
setHDRStaticInfoForVideoCodec(OMX_U32 portIndex,const sp<AMessage> & configFormat,sp<AMessage> & outputFormat)3582 status_t ACodec::setHDRStaticInfoForVideoCodec(
3583 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
3584 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
3585
3586 DescribeHDRStaticInfoParams params;
3587 InitOMXParams(¶ms);
3588 params.nPortIndex = portIndex;
3589
3590 HDRStaticInfo *info = ¶ms.sInfo;
3591 if (getHDRStaticInfoFromFormat(configFormat, info)) {
3592 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat);
3593 }
3594
3595 (void)initDescribeHDRStaticInfoIndex();
3596
3597 // communicate HDR static Info to codec
3598 return setHDRStaticInfo(params);
3599 }
3600
3601 // subsequent initial video encoder setup for surface mode
setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(android_dataspace * dataSpace)3602 status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(
3603 android_dataspace *dataSpace /* nonnull */) {
3604 DescribeColorAspectsParams params;
3605 InitOMXParams(¶ms);
3606 params.nPortIndex = kPortIndexInput;
3607 ColorAspects &aspects = params.sAspects;
3608
3609 // reset default format and store resulting format into both input and output formats
3610 getColorAspectsFromFormat(mConfigFormat, aspects);
3611 int32_t width, height;
3612 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) {
3613 setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
3614 }
3615 setColorAspectsIntoFormat(aspects, mInputFormat);
3616 setColorAspectsIntoFormat(aspects, mOutputFormat);
3617
3618 // communicate color aspects to codec, but do not allow any change
3619 ColorAspects origAspects = aspects;
3620 status_t err = OK;
3621 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) {
3622 status_t err = setCodecColorAspects(params, true /* readBack */);
3623 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) {
3624 break;
3625 }
3626 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.",
3627 mComponentName.c_str());
3628 }
3629
3630 *dataSpace = HAL_DATASPACE_BT709;
3631 aspects = origAspects; // restore desired color aspects
3632 status_t res = getDataSpace(
3633 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */);
3634 if (err == OK) {
3635 err = res;
3636 }
3637 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace);
3638 mInputFormat->setBuffer(
3639 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects)));
3640
3641 // update input format with codec supported color aspects (basically set unsupported
3642 // aspects to Unspecified)
3643 if (err == OK) {
3644 (void)getInputColorAspectsForVideoEncoder(mInputFormat);
3645 }
3646
3647 ALOGV("set default color aspects, updated input format to %s, output format to %s",
3648 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str());
3649
3650 return err;
3651 }
3652
getHDRStaticInfoForVideoCodec(OMX_U32 portIndex,sp<AMessage> & format)3653 status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) {
3654 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
3655 DescribeHDRStaticInfoParams params;
3656 InitOMXParams(¶ms);
3657 params.nPortIndex = portIndex;
3658
3659 status_t err = getHDRStaticInfo(params);
3660 if (err == OK) {
3661 // we only set decodec output HDRStaticInfo if codec supports them
3662 setHDRStaticInfoIntoFormat(params.sInfo, format);
3663 }
3664 return err;
3665 }
3666
initDescribeHDRStaticInfoIndex()3667 status_t ACodec::initDescribeHDRStaticInfoIndex() {
3668 status_t err = mOMX->getExtensionIndex(
3669 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex);
3670 if (err != OK) {
3671 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0;
3672 }
3673 return err;
3674 }
3675
setHDRStaticInfo(const DescribeHDRStaticInfoParams & params)3676 status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) {
3677 status_t err = ERROR_UNSUPPORTED;
3678 if (mDescribeHDRStaticInfoIndex) {
3679 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
3680 }
3681
3682 const HDRStaticInfo *info = ¶ms.sInfo;
3683 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, "
3684 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)",
3685 mComponentName.c_str(),
3686 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y,
3687 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y,
3688 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance,
3689 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel);
3690
3691 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
3692 "[%s] setting HDRStaticInfo failed even though codec advertises support",
3693 mComponentName.c_str());
3694 return err;
3695 }
3696
getHDRStaticInfo(DescribeHDRStaticInfoParams & params)3697 status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) {
3698 status_t err = ERROR_UNSUPPORTED;
3699 if (mDescribeHDRStaticInfoIndex) {
3700 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params));
3701 }
3702
3703 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
3704 "[%s] getting HDRStaticInfo failed even though codec advertises support",
3705 mComponentName.c_str());
3706 return err;
3707 }
3708
setupVideoEncoder(const char * mime,const sp<AMessage> & msg,sp<AMessage> & outputFormat,sp<AMessage> & inputFormat)3709 status_t ACodec::setupVideoEncoder(
3710 const char *mime, const sp<AMessage> &msg,
3711 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
3712 int32_t tmp;
3713 if (!msg->findInt32("color-format", &tmp)) {
3714 return INVALID_OPERATION;
3715 }
3716
3717 OMX_COLOR_FORMATTYPE colorFormat =
3718 static_cast<OMX_COLOR_FORMATTYPE>(tmp);
3719
3720 status_t err = setVideoPortFormatType(
3721 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat);
3722
3723 if (err != OK) {
3724 ALOGE("[%s] does not support color format %d",
3725 mComponentName.c_str(), colorFormat);
3726
3727 return err;
3728 }
3729
3730 /* Input port configuration */
3731
3732 OMX_PARAM_PORTDEFINITIONTYPE def;
3733 InitOMXParams(&def);
3734
3735 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
3736
3737 def.nPortIndex = kPortIndexInput;
3738
3739 err = mOMX->getParameter(
3740 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3741
3742 if (err != OK) {
3743 return err;
3744 }
3745
3746 int32_t width, height, bitrate;
3747 if (!msg->findInt32("width", &width)
3748 || !msg->findInt32("height", &height)
3749 || !msg->findInt32("bitrate", &bitrate)) {
3750 return INVALID_OPERATION;
3751 }
3752
3753 video_def->nFrameWidth = width;
3754 video_def->nFrameHeight = height;
3755
3756 int32_t stride;
3757 if (!msg->findInt32("stride", &stride)) {
3758 stride = width;
3759 }
3760
3761 video_def->nStride = stride;
3762
3763 int32_t sliceHeight;
3764 if (!msg->findInt32("slice-height", &sliceHeight)) {
3765 sliceHeight = height;
3766 }
3767
3768 video_def->nSliceHeight = sliceHeight;
3769
3770 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2;
3771
3772 float frameRate;
3773 if (!msg->findFloat("frame-rate", &frameRate)) {
3774 int32_t tmp;
3775 if (!msg->findInt32("frame-rate", &tmp)) {
3776 return INVALID_OPERATION;
3777 }
3778 frameRate = (float)tmp;
3779 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate);
3780 }
3781
3782 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
3783 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
3784 // this is redundant as it was already set up in setVideoPortFormatType
3785 // FIXME for now skip this only for flexible YUV formats
3786 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) {
3787 video_def->eColorFormat = colorFormat;
3788 }
3789
3790 err = mOMX->setParameter(
3791 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3792
3793 if (err != OK) {
3794 ALOGE("[%s] failed to set input port definition parameters.",
3795 mComponentName.c_str());
3796
3797 return err;
3798 }
3799
3800 /* Output port configuration */
3801
3802 OMX_VIDEO_CODINGTYPE compressionFormat;
3803 err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
3804
3805 if (err != OK) {
3806 return err;
3807 }
3808
3809 err = setVideoPortFormatType(
3810 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused);
3811
3812 if (err != OK) {
3813 ALOGE("[%s] does not support compression format %d",
3814 mComponentName.c_str(), compressionFormat);
3815
3816 return err;
3817 }
3818
3819 def.nPortIndex = kPortIndexOutput;
3820
3821 err = mOMX->getParameter(
3822 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3823
3824 if (err != OK) {
3825 return err;
3826 }
3827
3828 video_def->nFrameWidth = width;
3829 video_def->nFrameHeight = height;
3830 video_def->xFramerate = 0;
3831 video_def->nBitrate = bitrate;
3832 video_def->eCompressionFormat = compressionFormat;
3833 video_def->eColorFormat = OMX_COLOR_FormatUnused;
3834
3835 err = mOMX->setParameter(
3836 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3837
3838 if (err != OK) {
3839 ALOGE("[%s] failed to set output port definition parameters.",
3840 mComponentName.c_str());
3841
3842 return err;
3843 }
3844
3845 int32_t intraRefreshPeriod = 0;
3846 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod)
3847 && intraRefreshPeriod >= 0) {
3848 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true);
3849 if (err != OK) {
3850 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional",
3851 mComponentName.c_str());
3852 err = OK;
3853 }
3854 }
3855
3856 switch (compressionFormat) {
3857 case OMX_VIDEO_CodingMPEG4:
3858 err = setupMPEG4EncoderParameters(msg);
3859 break;
3860
3861 case OMX_VIDEO_CodingH263:
3862 err = setupH263EncoderParameters(msg);
3863 break;
3864
3865 case OMX_VIDEO_CodingAVC:
3866 err = setupAVCEncoderParameters(msg);
3867 break;
3868
3869 case OMX_VIDEO_CodingHEVC:
3870 err = setupHEVCEncoderParameters(msg);
3871 break;
3872
3873 case OMX_VIDEO_CodingVP8:
3874 case OMX_VIDEO_CodingVP9:
3875 err = setupVPXEncoderParameters(msg, outputFormat);
3876 break;
3877
3878 default:
3879 break;
3880 }
3881
3882 if (err != OK) {
3883 return err;
3884 }
3885
3886 // Set up color aspects on input, but propagate them to the output format, as they will
3887 // not be read back from encoder.
3888 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat);
3889 if (err == ERROR_UNSUPPORTED) {
3890 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str());
3891 err = OK;
3892 }
3893
3894 if (err != OK) {
3895 return err;
3896 }
3897
3898 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat);
3899 if (err == ERROR_UNSUPPORTED) { // support is optional
3900 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str());
3901 err = OK;
3902 }
3903
3904 if (err != OK) {
3905 return err;
3906 }
3907
3908 switch (compressionFormat) {
3909 case OMX_VIDEO_CodingAVC:
3910 case OMX_VIDEO_CodingHEVC:
3911 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat);
3912 if (err != OK) {
3913 err = OK; // ignore failure
3914 }
3915 break;
3916
3917 case OMX_VIDEO_CodingVP8:
3918 case OMX_VIDEO_CodingVP9:
3919 // TODO: do we need to support android.generic layering? webrtc layering is
3920 // already set up in setupVPXEncoderParameters.
3921 break;
3922
3923 default:
3924 break;
3925 }
3926
3927 if (err == OK) {
3928 ALOGI("setupVideoEncoder succeeded");
3929 }
3930
3931 return err;
3932 }
3933
setCyclicIntraMacroblockRefresh(const sp<AMessage> & msg,int32_t mode)3934 status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) {
3935 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params;
3936 InitOMXParams(¶ms);
3937 params.nPortIndex = kPortIndexOutput;
3938
3939 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode);
3940
3941 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic ||
3942 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) {
3943 int32_t mbs;
3944 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) {
3945 return INVALID_OPERATION;
3946 }
3947 params.nCirMBs = mbs;
3948 }
3949
3950 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive ||
3951 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) {
3952 int32_t mbs;
3953 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) {
3954 return INVALID_OPERATION;
3955 }
3956 params.nAirMBs = mbs;
3957
3958 int32_t ref;
3959 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) {
3960 return INVALID_OPERATION;
3961 }
3962 params.nAirRef = ref;
3963 }
3964
3965 status_t err = mOMX->setParameter(
3966 mNode, OMX_IndexParamVideoIntraRefresh,
3967 ¶ms, sizeof(params));
3968 return err;
3969 }
3970
setPFramesSpacing(float iFramesInterval,int32_t frameRate,uint32_t BFramesSpacing=0)3971 static OMX_U32 setPFramesSpacing(
3972 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) {
3973 // BFramesSpacing is the number of B frames between I/P frames
3974 // PFramesSpacing (the value to be returned) is the number of P frames between I frames
3975 //
3976 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1
3977 // ^^^ ^^^ ^^^
3978 // number of B frames number of P I frame
3979 //
3980 // = (PFramesSpacing + 1) * (BFramesSpacing + 1)
3981 //
3982 // E.g.
3983 // I P I : I-interval: 8, nPFrames 1, nBFrames 3
3984 // BBB BBB
3985
3986 if (iFramesInterval < 0) { // just 1 key frame
3987 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1
3988 } else if (iFramesInterval == 0) { // just key frames
3989 return 0;
3990 }
3991
3992 // round down as key-frame-interval is an upper limit
3993 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval);
3994 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1);
3995 return ret > 0 ? ret - 1 : 0;
3996 }
3997
getBitrateMode(const sp<AMessage> & msg)3998 static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) {
3999 int32_t tmp;
4000 if (!msg->findInt32("bitrate-mode", &tmp)) {
4001 return OMX_Video_ControlRateVariable;
4002 }
4003
4004 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp);
4005 }
4006
setupMPEG4EncoderParameters(const sp<AMessage> & msg)4007 status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
4008 int32_t bitrate;
4009 float iFrameInterval;
4010 if (!msg->findInt32("bitrate", &bitrate)
4011 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4012 return INVALID_OPERATION;
4013 }
4014
4015 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4016
4017 float frameRate;
4018 if (!msg->findFloat("frame-rate", &frameRate)) {
4019 int32_t tmp;
4020 if (!msg->findInt32("frame-rate", &tmp)) {
4021 return INVALID_OPERATION;
4022 }
4023 frameRate = (float)tmp;
4024 }
4025
4026 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
4027 InitOMXParams(&mpeg4type);
4028 mpeg4type.nPortIndex = kPortIndexOutput;
4029
4030 status_t err = mOMX->getParameter(
4031 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
4032
4033 if (err != OK) {
4034 return err;
4035 }
4036
4037 mpeg4type.nSliceHeaderSpacing = 0;
4038 mpeg4type.bSVH = OMX_FALSE;
4039 mpeg4type.bGov = OMX_FALSE;
4040
4041 mpeg4type.nAllowedPictureTypes =
4042 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
4043
4044 mpeg4type.nBFrames = 0;
4045 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames);
4046 if (mpeg4type.nPFrames == 0) {
4047 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
4048 }
4049 mpeg4type.nIDCVLCThreshold = 0;
4050 mpeg4type.bACPred = OMX_TRUE;
4051 mpeg4type.nMaxPacketSize = 256;
4052 mpeg4type.nTimeIncRes = 1000;
4053 mpeg4type.nHeaderExtension = 0;
4054 mpeg4type.bReversibleVLC = OMX_FALSE;
4055
4056 int32_t profile;
4057 if (msg->findInt32("profile", &profile)) {
4058 int32_t level;
4059 if (!msg->findInt32("level", &level)) {
4060 return INVALID_OPERATION;
4061 }
4062
4063 err = verifySupportForProfileAndLevel(profile, level);
4064
4065 if (err != OK) {
4066 return err;
4067 }
4068
4069 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile);
4070 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level);
4071 }
4072
4073 err = mOMX->setParameter(
4074 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
4075
4076 if (err != OK) {
4077 return err;
4078 }
4079
4080 err = configureBitrate(bitrate, bitrateMode);
4081
4082 if (err != OK) {
4083 return err;
4084 }
4085
4086 return setupErrorCorrectionParameters();
4087 }
4088
setupH263EncoderParameters(const sp<AMessage> & msg)4089 status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) {
4090 int32_t bitrate;
4091 float iFrameInterval;
4092 if (!msg->findInt32("bitrate", &bitrate)
4093 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4094 return INVALID_OPERATION;
4095 }
4096
4097 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4098
4099 float frameRate;
4100 if (!msg->findFloat("frame-rate", &frameRate)) {
4101 int32_t tmp;
4102 if (!msg->findInt32("frame-rate", &tmp)) {
4103 return INVALID_OPERATION;
4104 }
4105 frameRate = (float)tmp;
4106 }
4107
4108 OMX_VIDEO_PARAM_H263TYPE h263type;
4109 InitOMXParams(&h263type);
4110 h263type.nPortIndex = kPortIndexOutput;
4111
4112 status_t err = mOMX->getParameter(
4113 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
4114
4115 if (err != OK) {
4116 return err;
4117 }
4118
4119 h263type.nAllowedPictureTypes =
4120 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
4121
4122 h263type.nBFrames = 0;
4123 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames);
4124 if (h263type.nPFrames == 0) {
4125 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
4126 }
4127
4128 int32_t profile;
4129 if (msg->findInt32("profile", &profile)) {
4130 int32_t level;
4131 if (!msg->findInt32("level", &level)) {
4132 return INVALID_OPERATION;
4133 }
4134
4135 err = verifySupportForProfileAndLevel(profile, level);
4136
4137 if (err != OK) {
4138 return err;
4139 }
4140
4141 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile);
4142 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level);
4143 }
4144
4145 h263type.bPLUSPTYPEAllowed = OMX_FALSE;
4146 h263type.bForceRoundingTypeToZero = OMX_FALSE;
4147 h263type.nPictureHeaderRepetition = 0;
4148 h263type.nGOBHeaderInterval = 0;
4149
4150 err = mOMX->setParameter(
4151 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
4152
4153 if (err != OK) {
4154 return err;
4155 }
4156
4157 err = configureBitrate(bitrate, bitrateMode);
4158
4159 if (err != OK) {
4160 return err;
4161 }
4162
4163 return setupErrorCorrectionParameters();
4164 }
4165
4166 // static
getAVCLevelFor(int width,int height,int rate,int bitrate,OMX_VIDEO_AVCPROFILETYPE profile)4167 int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor(
4168 int width, int height, int rate, int bitrate,
4169 OMX_VIDEO_AVCPROFILETYPE profile) {
4170 // convert bitrate to main/baseline profile kbps equivalent
4171 switch (profile) {
4172 case OMX_VIDEO_AVCProfileHigh10:
4173 bitrate = divUp(bitrate, 3000); break;
4174 case OMX_VIDEO_AVCProfileHigh:
4175 bitrate = divUp(bitrate, 1250); break;
4176 default:
4177 bitrate = divUp(bitrate, 1000); break;
4178 }
4179
4180 // convert size and rate to MBs
4181 width = divUp(width, 16);
4182 height = divUp(height, 16);
4183 int mbs = width * height;
4184 rate *= mbs;
4185 int maxDimension = max(width, height);
4186
4187 static const int limits[][5] = {
4188 /* MBps MB dim bitrate level */
4189 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 },
4190 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b },
4191 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 },
4192 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 },
4193 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 },
4194 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 },
4195 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 },
4196 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 },
4197 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 },
4198 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 },
4199 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 },
4200 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 },
4201 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 },
4202 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 },
4203 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 },
4204 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 },
4205 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 },
4206 };
4207
4208 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) {
4209 const int (&limit)[5] = limits[i];
4210 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2]
4211 && bitrate <= limit[3]) {
4212 return limit[4];
4213 }
4214 }
4215 return 0;
4216 }
4217
setupAVCEncoderParameters(const sp<AMessage> & msg)4218 status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
4219 int32_t bitrate;
4220 float iFrameInterval;
4221 if (!msg->findInt32("bitrate", &bitrate)
4222 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4223 return INVALID_OPERATION;
4224 }
4225
4226 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4227
4228 float frameRate;
4229 if (!msg->findFloat("frame-rate", &frameRate)) {
4230 int32_t tmp;
4231 if (!msg->findInt32("frame-rate", &tmp)) {
4232 return INVALID_OPERATION;
4233 }
4234 frameRate = (float)tmp;
4235 }
4236
4237 status_t err = OK;
4238 int32_t intraRefreshMode = 0;
4239 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) {
4240 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode);
4241 if (err != OK) {
4242 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x",
4243 err, intraRefreshMode);
4244 return err;
4245 }
4246 }
4247
4248 OMX_VIDEO_PARAM_AVCTYPE h264type;
4249 InitOMXParams(&h264type);
4250 h264type.nPortIndex = kPortIndexOutput;
4251
4252 err = mOMX->getParameter(
4253 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
4254
4255 if (err != OK) {
4256 return err;
4257 }
4258
4259 h264type.nAllowedPictureTypes =
4260 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
4261
4262 int32_t profile;
4263 if (msg->findInt32("profile", &profile)) {
4264 int32_t level;
4265 if (!msg->findInt32("level", &level)) {
4266 return INVALID_OPERATION;
4267 }
4268
4269 err = verifySupportForProfileAndLevel(profile, level);
4270
4271 if (err != OK) {
4272 return err;
4273 }
4274
4275 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile);
4276 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level);
4277 } else {
4278 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
4279 #if 0 /* DON'T YET DEFAULT TO HIGHEST PROFILE */
4280 // Use largest supported profile for AVC recording if profile is not specified.
4281 for (OMX_VIDEO_AVCPROFILETYPE profile : {
4282 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) {
4283 if (verifySupportForProfileAndLevel(profile, 0) == OK) {
4284 h264type.eProfile = profile;
4285 break;
4286 }
4287 }
4288 #endif
4289 }
4290
4291 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]",
4292 asString(h264type.eProfile), asString(h264type.eLevel));
4293
4294 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
4295 h264type.nSliceHeaderSpacing = 0;
4296 h264type.bUseHadamard = OMX_TRUE;
4297 h264type.nRefFrames = 1;
4298 h264type.nBFrames = 0;
4299 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
4300 if (h264type.nPFrames == 0) {
4301 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
4302 }
4303 h264type.nRefIdx10ActiveMinus1 = 0;
4304 h264type.nRefIdx11ActiveMinus1 = 0;
4305 h264type.bEntropyCodingCABAC = OMX_FALSE;
4306 h264type.bWeightedPPrediction = OMX_FALSE;
4307 h264type.bconstIpred = OMX_FALSE;
4308 h264type.bDirect8x8Inference = OMX_FALSE;
4309 h264type.bDirectSpatialTemporal = OMX_FALSE;
4310 h264type.nCabacInitIdc = 0;
4311 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain ||
4312 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) {
4313 h264type.nSliceHeaderSpacing = 0;
4314 h264type.bUseHadamard = OMX_TRUE;
4315 h264type.nRefFrames = 2;
4316 h264type.nBFrames = 1;
4317 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
4318 h264type.nAllowedPictureTypes =
4319 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB;
4320 h264type.nRefIdx10ActiveMinus1 = 0;
4321 h264type.nRefIdx11ActiveMinus1 = 0;
4322 h264type.bEntropyCodingCABAC = OMX_TRUE;
4323 h264type.bWeightedPPrediction = OMX_TRUE;
4324 h264type.bconstIpred = OMX_TRUE;
4325 h264type.bDirect8x8Inference = OMX_TRUE;
4326 h264type.bDirectSpatialTemporal = OMX_TRUE;
4327 h264type.nCabacInitIdc = 1;
4328 }
4329
4330 if (h264type.nBFrames != 0) {
4331 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
4332 }
4333
4334 h264type.bEnableUEP = OMX_FALSE;
4335 h264type.bEnableFMO = OMX_FALSE;
4336 h264type.bEnableASO = OMX_FALSE;
4337 h264type.bEnableRS = OMX_FALSE;
4338 h264type.bFrameMBsOnly = OMX_TRUE;
4339 h264type.bMBAFF = OMX_FALSE;
4340 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
4341
4342 err = mOMX->setParameter(
4343 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
4344
4345 if (err != OK) {
4346 return err;
4347 }
4348
4349 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering
4350 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering
4351 // is preferred.
4352 AString tsSchema;
4353 int32_t preferBFrames = (int32_t)false;
4354 if (msg->findString("ts-schema", &tsSchema)
4355 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) {
4356 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering;
4357 InitOMXParams(&layering);
4358 layering.nPortIndex = kPortIndexOutput;
4359 if (mOMX->getParameter(
4360 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
4361 &layering, sizeof(layering)) == OK
4362 && layering.eSupportedPatterns
4363 && layering.nBLayerCountMax == 0) {
4364 h264type.nBFrames = 0;
4365 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
4366 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB;
4367 ALOGI("disabling B-frames");
4368 err = mOMX->setParameter(
4369 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
4370
4371 if (err != OK) {
4372 return err;
4373 }
4374 }
4375 }
4376
4377 return configureBitrate(bitrate, bitrateMode);
4378 }
4379
setupHEVCEncoderParameters(const sp<AMessage> & msg)4380 status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) {
4381 int32_t bitrate;
4382 float iFrameInterval;
4383 if (!msg->findInt32("bitrate", &bitrate)
4384 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4385 return INVALID_OPERATION;
4386 }
4387
4388 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4389
4390 float frameRate;
4391 if (!msg->findFloat("frame-rate", &frameRate)) {
4392 int32_t tmp;
4393 if (!msg->findInt32("frame-rate", &tmp)) {
4394 return INVALID_OPERATION;
4395 }
4396 frameRate = (float)tmp;
4397 }
4398
4399 OMX_VIDEO_PARAM_HEVCTYPE hevcType;
4400 InitOMXParams(&hevcType);
4401 hevcType.nPortIndex = kPortIndexOutput;
4402
4403 status_t err = OK;
4404 err = mOMX->getParameter(
4405 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
4406 if (err != OK) {
4407 return err;
4408 }
4409
4410 int32_t profile;
4411 if (msg->findInt32("profile", &profile)) {
4412 int32_t level;
4413 if (!msg->findInt32("level", &level)) {
4414 return INVALID_OPERATION;
4415 }
4416
4417 err = verifySupportForProfileAndLevel(profile, level);
4418 if (err != OK) {
4419 return err;
4420 }
4421
4422 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile);
4423 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level);
4424 }
4425 // TODO: finer control?
4426 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1;
4427
4428 err = mOMX->setParameter(
4429 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
4430 if (err != OK) {
4431 return err;
4432 }
4433
4434 return configureBitrate(bitrate, bitrateMode);
4435 }
4436
setupVPXEncoderParameters(const sp<AMessage> & msg,sp<AMessage> & outputFormat)4437 status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) {
4438 int32_t bitrate;
4439 float iFrameInterval = 0;
4440 size_t tsLayers = 0;
4441 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern =
4442 OMX_VIDEO_VPXTemporalLayerPatternNone;
4443 static const uint32_t kVp8LayerRateAlloction
4444 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS]
4445 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = {
4446 {100, 100, 100}, // 1 layer
4447 { 60, 100, 100}, // 2 layers {60%, 40%}
4448 { 40, 60, 100}, // 3 layers {40%, 20%, 40%}
4449 };
4450 if (!msg->findInt32("bitrate", &bitrate)) {
4451 return INVALID_OPERATION;
4452 }
4453 msg->findAsFloat("i-frame-interval", &iFrameInterval);
4454
4455 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4456
4457 float frameRate;
4458 if (!msg->findFloat("frame-rate", &frameRate)) {
4459 int32_t tmp;
4460 if (!msg->findInt32("frame-rate", &tmp)) {
4461 return INVALID_OPERATION;
4462 }
4463 frameRate = (float)tmp;
4464 }
4465
4466 AString tsSchema;
4467 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType =
4468 OMX_VIDEO_AndroidTemporalLayeringPatternNone;
4469
4470 if (msg->findString("ts-schema", &tsSchema)) {
4471 unsigned int numLayers = 0;
4472 unsigned int numBLayers = 0;
4473 int tags;
4474 char dummy;
4475 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
4476 && numLayers > 0) {
4477 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
4478 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
4479 tsLayers = numLayers;
4480 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
4481 &numLayers, &dummy, &numBLayers, &dummy))
4482 && (tags == 1 || (tags == 3 && dummy == '+'))
4483 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
4484 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
4485 // VPX does not have a concept of B-frames, so just count all layers
4486 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
4487 tsLayers = numLayers + numBLayers;
4488 } else {
4489 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str());
4490 }
4491 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS);
4492 }
4493
4494 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
4495 InitOMXParams(&vp8type);
4496 vp8type.nPortIndex = kPortIndexOutput;
4497 status_t err = mOMX->getParameter(
4498 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
4499 &vp8type, sizeof(vp8type));
4500
4501 if (err == OK) {
4502 if (iFrameInterval > 0) {
4503 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1;
4504 }
4505 vp8type.eTemporalPattern = pattern;
4506 vp8type.nTemporalLayerCount = tsLayers;
4507 if (tsLayers > 0) {
4508 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) {
4509 vp8type.nTemporalLayerBitrateRatio[i] =
4510 kVp8LayerRateAlloction[tsLayers - 1][i];
4511 }
4512 }
4513 if (bitrateMode == OMX_Video_ControlRateConstant) {
4514 vp8type.nMinQuantizer = 2;
4515 vp8type.nMaxQuantizer = 63;
4516 }
4517
4518 err = mOMX->setParameter(
4519 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
4520 &vp8type, sizeof(vp8type));
4521 if (err != OK) {
4522 ALOGW("Extended VP8 parameters set failed: %d", err);
4523 } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) {
4524 // advertise even single layer WebRTC layering, as it is defined
4525 outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers));
4526 } else if (tsLayers > 0) {
4527 // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid
4528 outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers));
4529 }
4530 }
4531
4532 return configureBitrate(bitrate, bitrateMode);
4533 }
4534
verifySupportForProfileAndLevel(int32_t profile,int32_t level)4535 status_t ACodec::verifySupportForProfileAndLevel(
4536 int32_t profile, int32_t level) {
4537 OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
4538 InitOMXParams(¶ms);
4539 params.nPortIndex = kPortIndexOutput;
4540
4541 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
4542 params.nProfileIndex = index;
4543 status_t err = mOMX->getParameter(
4544 mNode,
4545 OMX_IndexParamVideoProfileLevelQuerySupported,
4546 ¶ms,
4547 sizeof(params));
4548
4549 if (err != OK) {
4550 return err;
4551 }
4552
4553 int32_t supportedProfile = static_cast<int32_t>(params.eProfile);
4554 int32_t supportedLevel = static_cast<int32_t>(params.eLevel);
4555
4556 if (profile == supportedProfile && level <= supportedLevel) {
4557 return OK;
4558 }
4559
4560 if (index == kMaxIndicesToCheck) {
4561 ALOGW("[%s] stopping checking profiles after %u: %x/%x",
4562 mComponentName.c_str(), index,
4563 params.eProfile, params.eLevel);
4564 }
4565 }
4566 return ERROR_UNSUPPORTED;
4567 }
4568
configureBitrate(int32_t bitrate,OMX_VIDEO_CONTROLRATETYPE bitrateMode)4569 status_t ACodec::configureBitrate(
4570 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) {
4571 OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
4572 InitOMXParams(&bitrateType);
4573 bitrateType.nPortIndex = kPortIndexOutput;
4574
4575 status_t err = mOMX->getParameter(
4576 mNode, OMX_IndexParamVideoBitrate,
4577 &bitrateType, sizeof(bitrateType));
4578
4579 if (err != OK) {
4580 return err;
4581 }
4582
4583 bitrateType.eControlRate = bitrateMode;
4584 bitrateType.nTargetBitrate = bitrate;
4585
4586 return mOMX->setParameter(
4587 mNode, OMX_IndexParamVideoBitrate,
4588 &bitrateType, sizeof(bitrateType));
4589 }
4590
setupErrorCorrectionParameters()4591 status_t ACodec::setupErrorCorrectionParameters() {
4592 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
4593 InitOMXParams(&errorCorrectionType);
4594 errorCorrectionType.nPortIndex = kPortIndexOutput;
4595
4596 status_t err = mOMX->getParameter(
4597 mNode, OMX_IndexParamVideoErrorCorrection,
4598 &errorCorrectionType, sizeof(errorCorrectionType));
4599
4600 if (err != OK) {
4601 return OK; // Optional feature. Ignore this failure
4602 }
4603
4604 errorCorrectionType.bEnableHEC = OMX_FALSE;
4605 errorCorrectionType.bEnableResync = OMX_TRUE;
4606 errorCorrectionType.nResynchMarkerSpacing = 256;
4607 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
4608 errorCorrectionType.bEnableRVLC = OMX_FALSE;
4609
4610 return mOMX->setParameter(
4611 mNode, OMX_IndexParamVideoErrorCorrection,
4612 &errorCorrectionType, sizeof(errorCorrectionType));
4613 }
4614
setVideoFormatOnPort(OMX_U32 portIndex,int32_t width,int32_t height,OMX_VIDEO_CODINGTYPE compressionFormat,float frameRate)4615 status_t ACodec::setVideoFormatOnPort(
4616 OMX_U32 portIndex,
4617 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat,
4618 float frameRate) {
4619 OMX_PARAM_PORTDEFINITIONTYPE def;
4620 InitOMXParams(&def);
4621 def.nPortIndex = portIndex;
4622
4623 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
4624
4625 status_t err = mOMX->getParameter(
4626 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
4627 if (err != OK) {
4628 return err;
4629 }
4630
4631 if (portIndex == kPortIndexInput) {
4632 // XXX Need a (much) better heuristic to compute input buffer sizes.
4633 const size_t X = 64 * 1024;
4634 if (def.nBufferSize < X) {
4635 def.nBufferSize = X;
4636 }
4637 }
4638
4639 if (def.eDomain != OMX_PortDomainVideo) {
4640 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain);
4641 return FAILED_TRANSACTION;
4642 }
4643
4644 video_def->nFrameWidth = width;
4645 video_def->nFrameHeight = height;
4646
4647 if (portIndex == kPortIndexInput) {
4648 video_def->eCompressionFormat = compressionFormat;
4649 video_def->eColorFormat = OMX_COLOR_FormatUnused;
4650 if (frameRate >= 0) {
4651 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
4652 }
4653 }
4654
4655 err = mOMX->setParameter(
4656 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
4657
4658 return err;
4659 }
4660
initNativeWindow()4661 status_t ACodec::initNativeWindow() {
4662 if (mNativeWindow != NULL) {
4663 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE);
4664 }
4665
4666 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
4667 return OK;
4668 }
4669
countBuffersOwnedByComponent(OMX_U32 portIndex) const4670 size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const {
4671 size_t n = 0;
4672
4673 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
4674 const BufferInfo &info = mBuffers[portIndex].itemAt(i);
4675
4676 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) {
4677 ++n;
4678 }
4679 }
4680
4681 return n;
4682 }
4683
countBuffersOwnedByNativeWindow() const4684 size_t ACodec::countBuffersOwnedByNativeWindow() const {
4685 size_t n = 0;
4686
4687 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
4688 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i);
4689
4690 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
4691 ++n;
4692 }
4693 }
4694
4695 return n;
4696 }
4697
waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs()4698 void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {
4699 if (mNativeWindow == NULL) {
4700 return;
4701 }
4702
4703 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers
4704 && dequeueBufferFromNativeWindow() != NULL) {
4705 // these buffers will be submitted as regular buffers; account for this
4706 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) {
4707 --mMetadataBuffersToSubmit;
4708 }
4709 }
4710 }
4711
allYourBuffersAreBelongToUs(OMX_U32 portIndex)4712 bool ACodec::allYourBuffersAreBelongToUs(
4713 OMX_U32 portIndex) {
4714 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
4715 BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
4716
4717 if (info->mStatus != BufferInfo::OWNED_BY_US
4718 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
4719 ALOGV("[%s] Buffer %u on port %u still has status %d",
4720 mComponentName.c_str(),
4721 info->mBufferID, portIndex, info->mStatus);
4722 return false;
4723 }
4724 }
4725
4726 return true;
4727 }
4728
allYourBuffersAreBelongToUs()4729 bool ACodec::allYourBuffersAreBelongToUs() {
4730 return allYourBuffersAreBelongToUs(kPortIndexInput)
4731 && allYourBuffersAreBelongToUs(kPortIndexOutput);
4732 }
4733
deferMessage(const sp<AMessage> & msg)4734 void ACodec::deferMessage(const sp<AMessage> &msg) {
4735 mDeferredQueue.push_back(msg);
4736 }
4737
processDeferredMessages()4738 void ACodec::processDeferredMessages() {
4739 List<sp<AMessage> > queue = mDeferredQueue;
4740 mDeferredQueue.clear();
4741
4742 List<sp<AMessage> >::iterator it = queue.begin();
4743 while (it != queue.end()) {
4744 onMessageReceived(*it++);
4745 }
4746 }
4747
4748 // static
describeDefaultColorFormat(DescribeColorFormat2Params & params)4749 bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) {
4750 MediaImage2 &image = params.sMediaImage;
4751 memset(&image, 0, sizeof(image));
4752
4753 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
4754 image.mNumPlanes = 0;
4755
4756 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
4757 image.mWidth = params.nFrameWidth;
4758 image.mHeight = params.nFrameHeight;
4759
4760 // only supporting YUV420
4761 if (fmt != OMX_COLOR_FormatYUV420Planar &&
4762 fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
4763 fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
4764 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
4765 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
4766 ALOGW("do not know color format 0x%x = %d", fmt, fmt);
4767 return false;
4768 }
4769
4770 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
4771 if (params.nStride != 0 && params.nSliceHeight == 0) {
4772 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
4773 params.nFrameHeight);
4774 params.nSliceHeight = params.nFrameHeight;
4775 }
4776
4777 // we need stride and slice-height to be non-zero and sensible. These values were chosen to
4778 // prevent integer overflows further down the line, and do not indicate support for
4779 // 32kx32k video.
4780 if (params.nStride == 0 || params.nSliceHeight == 0
4781 || params.nStride > 32768 || params.nSliceHeight > 32768) {
4782 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
4783 fmt, fmt, params.nStride, params.nSliceHeight);
4784 return false;
4785 }
4786
4787 // set-up YUV format
4788 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
4789 image.mNumPlanes = 3;
4790 image.mBitDepth = 8;
4791 image.mBitDepthAllocated = 8;
4792 image.mPlane[image.Y].mOffset = 0;
4793 image.mPlane[image.Y].mColInc = 1;
4794 image.mPlane[image.Y].mRowInc = params.nStride;
4795 image.mPlane[image.Y].mHorizSubsampling = 1;
4796 image.mPlane[image.Y].mVertSubsampling = 1;
4797
4798 switch ((int)fmt) {
4799 case HAL_PIXEL_FORMAT_YV12:
4800 if (params.bUsingNativeBuffers) {
4801 size_t ystride = align(params.nStride, 16);
4802 size_t cstride = align(params.nStride / 2, 16);
4803 image.mPlane[image.Y].mRowInc = ystride;
4804
4805 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
4806 image.mPlane[image.V].mColInc = 1;
4807 image.mPlane[image.V].mRowInc = cstride;
4808 image.mPlane[image.V].mHorizSubsampling = 2;
4809 image.mPlane[image.V].mVertSubsampling = 2;
4810
4811 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
4812 + (cstride * params.nSliceHeight / 2);
4813 image.mPlane[image.U].mColInc = 1;
4814 image.mPlane[image.U].mRowInc = cstride;
4815 image.mPlane[image.U].mHorizSubsampling = 2;
4816 image.mPlane[image.U].mVertSubsampling = 2;
4817 break;
4818 } else {
4819 // fall through as YV12 is used for YUV420Planar by some codecs
4820 }
4821
4822 case OMX_COLOR_FormatYUV420Planar:
4823 case OMX_COLOR_FormatYUV420PackedPlanar:
4824 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
4825 image.mPlane[image.U].mColInc = 1;
4826 image.mPlane[image.U].mRowInc = params.nStride / 2;
4827 image.mPlane[image.U].mHorizSubsampling = 2;
4828 image.mPlane[image.U].mVertSubsampling = 2;
4829
4830 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
4831 + (params.nStride * params.nSliceHeight / 4);
4832 image.mPlane[image.V].mColInc = 1;
4833 image.mPlane[image.V].mRowInc = params.nStride / 2;
4834 image.mPlane[image.V].mHorizSubsampling = 2;
4835 image.mPlane[image.V].mVertSubsampling = 2;
4836 break;
4837
4838 case OMX_COLOR_FormatYUV420SemiPlanar:
4839 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
4840 case OMX_COLOR_FormatYUV420PackedSemiPlanar:
4841 // NV12
4842 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
4843 image.mPlane[image.U].mColInc = 2;
4844 image.mPlane[image.U].mRowInc = params.nStride;
4845 image.mPlane[image.U].mHorizSubsampling = 2;
4846 image.mPlane[image.U].mVertSubsampling = 2;
4847
4848 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
4849 image.mPlane[image.V].mColInc = 2;
4850 image.mPlane[image.V].mRowInc = params.nStride;
4851 image.mPlane[image.V].mHorizSubsampling = 2;
4852 image.mPlane[image.V].mVertSubsampling = 2;
4853 break;
4854
4855 default:
4856 TRESPASS();
4857 }
4858 return true;
4859 }
4860
4861 // static
describeColorFormat(const sp<IOMX> & omx,IOMX::node_id node,DescribeColorFormat2Params & describeParams)4862 bool ACodec::describeColorFormat(
4863 const sp<IOMX> &omx, IOMX::node_id node,
4864 DescribeColorFormat2Params &describeParams)
4865 {
4866 OMX_INDEXTYPE describeColorFormatIndex;
4867 if (omx->getExtensionIndex(
4868 node, "OMX.google.android.index.describeColorFormat",
4869 &describeColorFormatIndex) == OK) {
4870 DescribeColorFormatParams describeParamsV1(describeParams);
4871 if (omx->getParameter(
4872 node, describeColorFormatIndex,
4873 &describeParamsV1, sizeof(describeParamsV1)) == OK) {
4874 describeParams.initFromV1(describeParamsV1);
4875 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
4876 }
4877 } else if (omx->getExtensionIndex(
4878 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
4879 && omx->getParameter(
4880 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
4881 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
4882 }
4883
4884 return describeDefaultColorFormat(describeParams);
4885 }
4886
4887 // static
isFlexibleColorFormat(const sp<IOMX> & omx,IOMX::node_id node,uint32_t colorFormat,bool usingNativeBuffers,OMX_U32 * flexibleEquivalent)4888 bool ACodec::isFlexibleColorFormat(
4889 const sp<IOMX> &omx, IOMX::node_id node,
4890 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
4891 DescribeColorFormat2Params describeParams;
4892 InitOMXParams(&describeParams);
4893 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
4894 // reasonable dummy values
4895 describeParams.nFrameWidth = 128;
4896 describeParams.nFrameHeight = 128;
4897 describeParams.nStride = 128;
4898 describeParams.nSliceHeight = 128;
4899 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
4900
4901 CHECK(flexibleEquivalent != NULL);
4902
4903 if (!describeColorFormat(omx, node, describeParams)) {
4904 return false;
4905 }
4906
4907 const MediaImage2 &img = describeParams.sMediaImage;
4908 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
4909 if (img.mNumPlanes != 3
4910 || img.mPlane[img.Y].mHorizSubsampling != 1
4911 || img.mPlane[img.Y].mVertSubsampling != 1) {
4912 return false;
4913 }
4914
4915 // YUV 420
4916 if (img.mPlane[img.U].mHorizSubsampling == 2
4917 && img.mPlane[img.U].mVertSubsampling == 2
4918 && img.mPlane[img.V].mHorizSubsampling == 2
4919 && img.mPlane[img.V].mVertSubsampling == 2) {
4920 // possible flexible YUV420 format
4921 if (img.mBitDepth <= 8) {
4922 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
4923 return true;
4924 }
4925 }
4926 }
4927 return false;
4928 }
4929
getPortFormat(OMX_U32 portIndex,sp<AMessage> & notify)4930 status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {
4931 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output";
4932 OMX_PARAM_PORTDEFINITIONTYPE def;
4933 InitOMXParams(&def);
4934 def.nPortIndex = portIndex;
4935
4936 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
4937 if (err != OK) {
4938 return err;
4939 }
4940
4941 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) {
4942 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex);
4943 return BAD_VALUE;
4944 }
4945
4946 switch (def.eDomain) {
4947 case OMX_PortDomainVideo:
4948 {
4949 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
4950 switch ((int)videoDef->eCompressionFormat) {
4951 case OMX_VIDEO_CodingUnused:
4952 {
4953 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput));
4954 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
4955
4956 notify->setInt32("stride", videoDef->nStride);
4957 notify->setInt32("slice-height", videoDef->nSliceHeight);
4958 notify->setInt32("color-format", videoDef->eColorFormat);
4959
4960 if (mNativeWindow == NULL) {
4961 DescribeColorFormat2Params describeParams;
4962 InitOMXParams(&describeParams);
4963 describeParams.eColorFormat = videoDef->eColorFormat;
4964 describeParams.nFrameWidth = videoDef->nFrameWidth;
4965 describeParams.nFrameHeight = videoDef->nFrameHeight;
4966 describeParams.nStride = videoDef->nStride;
4967 describeParams.nSliceHeight = videoDef->nSliceHeight;
4968 describeParams.bUsingNativeBuffers = OMX_FALSE;
4969
4970 if (describeColorFormat(mOMX, mNode, describeParams)) {
4971 notify->setBuffer(
4972 "image-data",
4973 ABuffer::CreateAsCopy(
4974 &describeParams.sMediaImage,
4975 sizeof(describeParams.sMediaImage)));
4976
4977 MediaImage2 &img = describeParams.sMediaImage;
4978 MediaImage2::PlaneInfo *plane = img.mPlane;
4979 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }",
4980 mComponentName.c_str(), img.mWidth, img.mHeight,
4981 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc,
4982 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc,
4983 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc);
4984 }
4985 }
4986
4987 int32_t width = (int32_t)videoDef->nFrameWidth;
4988 int32_t height = (int32_t)videoDef->nFrameHeight;
4989
4990 if (portIndex == kPortIndexOutput) {
4991 OMX_CONFIG_RECTTYPE rect;
4992 InitOMXParams(&rect);
4993 rect.nPortIndex = portIndex;
4994
4995 if (mOMX->getConfig(
4996 mNode,
4997 (portIndex == kPortIndexOutput ?
4998 OMX_IndexConfigCommonOutputCrop :
4999 OMX_IndexConfigCommonInputCrop),
5000 &rect, sizeof(rect)) != OK) {
5001 rect.nLeft = 0;
5002 rect.nTop = 0;
5003 rect.nWidth = videoDef->nFrameWidth;
5004 rect.nHeight = videoDef->nFrameHeight;
5005 }
5006
5007 if (rect.nLeft < 0 ||
5008 rect.nTop < 0 ||
5009 rect.nLeft + rect.nWidth > videoDef->nFrameWidth ||
5010 rect.nTop + rect.nHeight > videoDef->nFrameHeight) {
5011 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)",
5012 rect.nLeft, rect.nTop,
5013 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight,
5014 videoDef->nFrameWidth, videoDef->nFrameHeight);
5015 return BAD_VALUE;
5016 }
5017
5018 notify->setRect(
5019 "crop",
5020 rect.nLeft,
5021 rect.nTop,
5022 rect.nLeft + rect.nWidth - 1,
5023 rect.nTop + rect.nHeight - 1);
5024
5025 width = rect.nWidth;
5026 height = rect.nHeight;
5027
5028 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN;
5029 (void)getColorAspectsAndDataSpaceForVideoDecoder(
5030 width, height, mConfigFormat, notify,
5031 mUsingNativeWindow ? &dataSpace : NULL);
5032 if (mUsingNativeWindow) {
5033 notify->setInt32("android._dataspace", dataSpace);
5034 }
5035 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify);
5036 } else {
5037 (void)getInputColorAspectsForVideoEncoder(notify);
5038 if (mConfigFormat->contains("hdr-static-info")) {
5039 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify);
5040 }
5041 }
5042
5043 break;
5044 }
5045
5046 case OMX_VIDEO_CodingVP8:
5047 case OMX_VIDEO_CodingVP9:
5048 {
5049 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
5050 InitOMXParams(&vp8type);
5051 vp8type.nPortIndex = kPortIndexOutput;
5052 status_t err = mOMX->getParameter(
5053 mNode,
5054 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
5055 &vp8type,
5056 sizeof(vp8type));
5057
5058 if (err == OK) {
5059 if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC
5060 && vp8type.nTemporalLayerCount > 0
5061 && vp8type.nTemporalLayerCount
5062 <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
5063 // advertise as android.generic if we configured for android.generic
5064 AString origSchema;
5065 if (notify->findString("ts-schema", &origSchema)
5066 && origSchema.startsWith("android.generic")) {
5067 notify->setString("ts-schema", AStringPrintf(
5068 "android.generic.%u", vp8type.nTemporalLayerCount));
5069 } else {
5070 notify->setString("ts-schema", AStringPrintf(
5071 "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount));
5072 }
5073 }
5074 }
5075 // Fall through to set up mime.
5076 }
5077
5078 default:
5079 {
5080 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) {
5081 // should be CodingUnused
5082 ALOGE("Raw port video compression format is %s(%d)",
5083 asString(videoDef->eCompressionFormat),
5084 videoDef->eCompressionFormat);
5085 return BAD_VALUE;
5086 }
5087 AString mime;
5088 if (GetMimeTypeForVideoCoding(
5089 videoDef->eCompressionFormat, &mime) != OK) {
5090 notify->setString("mime", "application/octet-stream");
5091 } else {
5092 notify->setString("mime", mime.c_str());
5093 }
5094 uint32_t intraRefreshPeriod = 0;
5095 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK
5096 && intraRefreshPeriod > 0) {
5097 notify->setInt32("intra-refresh-period", intraRefreshPeriod);
5098 }
5099 break;
5100 }
5101 }
5102 notify->setInt32("width", videoDef->nFrameWidth);
5103 notify->setInt32("height", videoDef->nFrameHeight);
5104 ALOGV("[%s] %s format is %s", mComponentName.c_str(),
5105 portIndex == kPortIndexInput ? "input" : "output",
5106 notify->debugString().c_str());
5107
5108 break;
5109 }
5110
5111 case OMX_PortDomainAudio:
5112 {
5113 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
5114
5115 switch ((int)audioDef->eEncoding) {
5116 case OMX_AUDIO_CodingPCM:
5117 {
5118 OMX_AUDIO_PARAM_PCMMODETYPE params;
5119 InitOMXParams(¶ms);
5120 params.nPortIndex = portIndex;
5121
5122 err = mOMX->getParameter(
5123 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
5124 if (err != OK) {
5125 return err;
5126 }
5127
5128 if (params.nChannels <= 0
5129 || (params.nChannels != 1 && !params.bInterleaved)
5130 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) {
5131 ALOGE("unsupported PCM port: %u channels%s, %u-bit",
5132 params.nChannels,
5133 params.bInterleaved ? " interleaved" : "",
5134 params.nBitPerSample);
5135 return FAILED_TRANSACTION;
5136 }
5137
5138 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
5139 notify->setInt32("channel-count", params.nChannels);
5140 notify->setInt32("sample-rate", params.nSamplingRate);
5141
5142 AudioEncoding encoding = kAudioEncodingPcm16bit;
5143 if (params.eNumData == OMX_NumericalDataUnsigned
5144 && params.nBitPerSample == 8u) {
5145 encoding = kAudioEncodingPcm8bit;
5146 } else if (params.eNumData == OMX_NumericalDataFloat
5147 && params.nBitPerSample == 32u) {
5148 encoding = kAudioEncodingPcmFloat;
5149 } else if (params.nBitPerSample != 16u
5150 || params.eNumData != OMX_NumericalDataSigned) {
5151 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ",
5152 asString(params.eNumData), params.eNumData,
5153 asString(params.ePCMMode), params.ePCMMode);
5154 return FAILED_TRANSACTION;
5155 }
5156 notify->setInt32("pcm-encoding", encoding);
5157
5158 if (mChannelMaskPresent) {
5159 notify->setInt32("channel-mask", mChannelMask);
5160 }
5161 break;
5162 }
5163
5164 case OMX_AUDIO_CodingAAC:
5165 {
5166 OMX_AUDIO_PARAM_AACPROFILETYPE params;
5167 InitOMXParams(¶ms);
5168 params.nPortIndex = portIndex;
5169
5170 err = mOMX->getParameter(
5171 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params));
5172 if (err != OK) {
5173 return err;
5174 }
5175
5176 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
5177 notify->setInt32("channel-count", params.nChannels);
5178 notify->setInt32("sample-rate", params.nSampleRate);
5179 break;
5180 }
5181
5182 case OMX_AUDIO_CodingAMR:
5183 {
5184 OMX_AUDIO_PARAM_AMRTYPE params;
5185 InitOMXParams(¶ms);
5186 params.nPortIndex = portIndex;
5187
5188 err = mOMX->getParameter(
5189 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params));
5190 if (err != OK) {
5191 return err;
5192 }
5193
5194 notify->setInt32("channel-count", 1);
5195 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
5196 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
5197 notify->setInt32("sample-rate", 16000);
5198 } else {
5199 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
5200 notify->setInt32("sample-rate", 8000);
5201 }
5202 break;
5203 }
5204
5205 case OMX_AUDIO_CodingFLAC:
5206 {
5207 OMX_AUDIO_PARAM_FLACTYPE params;
5208 InitOMXParams(¶ms);
5209 params.nPortIndex = portIndex;
5210
5211 err = mOMX->getParameter(
5212 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params));
5213 if (err != OK) {
5214 return err;
5215 }
5216
5217 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
5218 notify->setInt32("channel-count", params.nChannels);
5219 notify->setInt32("sample-rate", params.nSampleRate);
5220 break;
5221 }
5222
5223 case OMX_AUDIO_CodingMP3:
5224 {
5225 OMX_AUDIO_PARAM_MP3TYPE params;
5226 InitOMXParams(¶ms);
5227 params.nPortIndex = portIndex;
5228
5229 err = mOMX->getParameter(
5230 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params));
5231 if (err != OK) {
5232 return err;
5233 }
5234
5235 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);
5236 notify->setInt32("channel-count", params.nChannels);
5237 notify->setInt32("sample-rate", params.nSampleRate);
5238 break;
5239 }
5240
5241 case OMX_AUDIO_CodingVORBIS:
5242 {
5243 OMX_AUDIO_PARAM_VORBISTYPE params;
5244 InitOMXParams(¶ms);
5245 params.nPortIndex = portIndex;
5246
5247 err = mOMX->getParameter(
5248 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params));
5249 if (err != OK) {
5250 return err;
5251 }
5252
5253 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);
5254 notify->setInt32("channel-count", params.nChannels);
5255 notify->setInt32("sample-rate", params.nSampleRate);
5256 break;
5257 }
5258
5259 case OMX_AUDIO_CodingAndroidAC3:
5260 {
5261 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params;
5262 InitOMXParams(¶ms);
5263 params.nPortIndex = portIndex;
5264
5265 err = mOMX->getParameter(
5266 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
5267 ¶ms, sizeof(params));
5268 if (err != OK) {
5269 return err;
5270 }
5271
5272 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3);
5273 notify->setInt32("channel-count", params.nChannels);
5274 notify->setInt32("sample-rate", params.nSampleRate);
5275 break;
5276 }
5277
5278 case OMX_AUDIO_CodingAndroidEAC3:
5279 {
5280 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params;
5281 InitOMXParams(¶ms);
5282 params.nPortIndex = portIndex;
5283
5284 err = mOMX->getParameter(
5285 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
5286 ¶ms, sizeof(params));
5287 if (err != OK) {
5288 return err;
5289 }
5290
5291 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3);
5292 notify->setInt32("channel-count", params.nChannels);
5293 notify->setInt32("sample-rate", params.nSampleRate);
5294 break;
5295 }
5296
5297 case OMX_AUDIO_CodingAndroidOPUS:
5298 {
5299 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params;
5300 InitOMXParams(¶ms);
5301 params.nPortIndex = portIndex;
5302
5303 err = mOMX->getParameter(
5304 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
5305 ¶ms, sizeof(params));
5306 if (err != OK) {
5307 return err;
5308 }
5309
5310 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS);
5311 notify->setInt32("channel-count", params.nChannels);
5312 notify->setInt32("sample-rate", params.nSampleRate);
5313 break;
5314 }
5315
5316 case OMX_AUDIO_CodingG711:
5317 {
5318 OMX_AUDIO_PARAM_PCMMODETYPE params;
5319 InitOMXParams(¶ms);
5320 params.nPortIndex = portIndex;
5321
5322 err = mOMX->getParameter(
5323 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
5324 if (err != OK) {
5325 return err;
5326 }
5327
5328 const char *mime = NULL;
5329 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) {
5330 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW;
5331 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) {
5332 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW;
5333 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear
5334 mime = MEDIA_MIMETYPE_AUDIO_RAW;
5335 }
5336 notify->setString("mime", mime);
5337 notify->setInt32("channel-count", params.nChannels);
5338 notify->setInt32("sample-rate", params.nSamplingRate);
5339 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit);
5340 break;
5341 }
5342
5343 case OMX_AUDIO_CodingGSMFR:
5344 {
5345 OMX_AUDIO_PARAM_PCMMODETYPE params;
5346 InitOMXParams(¶ms);
5347 params.nPortIndex = portIndex;
5348
5349 err = mOMX->getParameter(
5350 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
5351 if (err != OK) {
5352 return err;
5353 }
5354
5355 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM);
5356 notify->setInt32("channel-count", params.nChannels);
5357 notify->setInt32("sample-rate", params.nSamplingRate);
5358 break;
5359 }
5360
5361 default:
5362 ALOGE("Unsupported audio coding: %s(%d)\n",
5363 asString(audioDef->eEncoding), audioDef->eEncoding);
5364 return BAD_TYPE;
5365 }
5366 break;
5367 }
5368
5369 default:
5370 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain);
5371 return BAD_TYPE;
5372 }
5373
5374 return OK;
5375 }
5376
onDataSpaceChanged(android_dataspace dataSpace,const ColorAspects & aspects)5377 void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) {
5378 // aspects are normally communicated in ColorAspects
5379 int32_t range, standard, transfer;
5380 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
5381
5382 // if some aspects are unspecified, use dataspace fields
5383 if (range != 0) {
5384 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
5385 }
5386 if (standard != 0) {
5387 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
5388 }
5389 if (transfer != 0) {
5390 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
5391 }
5392
5393 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
5394 if (range != 0) {
5395 mOutputFormat->setInt32("color-range", range);
5396 }
5397 if (standard != 0) {
5398 mOutputFormat->setInt32("color-standard", standard);
5399 }
5400 if (transfer != 0) {
5401 mOutputFormat->setInt32("color-transfer", transfer);
5402 }
5403
5404 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
5405 "(R:%d(%s), S:%d(%s), T:%d(%s))",
5406 dataSpace,
5407 aspects.mRange, asString(aspects.mRange),
5408 aspects.mPrimaries, asString(aspects.mPrimaries),
5409 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs),
5410 aspects.mTransfer, asString(aspects.mTransfer),
5411 range, asString((ColorRange)range),
5412 standard, asString((ColorStandard)standard),
5413 transfer, asString((ColorTransfer)transfer));
5414 }
5415
onOutputFormatChanged(sp<const AMessage> expectedFormat)5416 void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) {
5417 // store new output format, at the same time mark that this is no longer the first frame
5418 mOutputFormat = mBaseOutputFormat->dup();
5419
5420 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) {
5421 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str());
5422 return;
5423 }
5424
5425 if (expectedFormat != NULL) {
5426 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat);
5427 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat);
5428 if (changes->countEntries() != 0 || to->countEntries() != 0) {
5429 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s",
5430 mComponentName.c_str(),
5431 changes->debugString(4).c_str(), to->debugString(4).c_str());
5432 }
5433 }
5434
5435 if (!mIsVideo && !mIsEncoder) {
5436 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit;
5437 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
5438 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
5439 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
5440
5441 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding);
5442 if (mConverter[kPortIndexOutput] != NULL) {
5443 mOutputFormat->setInt32("pcm-encoding", pcmEncoding);
5444 }
5445 }
5446
5447 if (mTunneled) {
5448 sendFormatChange();
5449 }
5450 }
5451
addKeyFormatChangesToRenderBufferNotification(sp<AMessage> & notify)5452 void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) {
5453 AString mime;
5454 CHECK(mOutputFormat->findString("mime", &mime));
5455
5456 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) {
5457 // notify renderer of the crop change and dataspace change
5458 // NOTE: native window uses extended right-bottom coordinate
5459 int32_t left, top, right, bottom;
5460 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
5461 notify->setRect("crop", left, top, right + 1, bottom + 1);
5462 }
5463
5464 int32_t dataSpace;
5465 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
5466 notify->setInt32("dataspace", dataSpace);
5467 }
5468 }
5469 }
5470
sendFormatChange()5471 void ACodec::sendFormatChange() {
5472 AString mime;
5473 CHECK(mOutputFormat->findString("mime", &mime));
5474
5475 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) {
5476 int32_t channelCount;
5477 CHECK(mOutputFormat->findInt32("channel-count", &channelCount));
5478 if (mSkipCutBuffer != NULL) {
5479 size_t prevbufsize = mSkipCutBuffer->size();
5480 if (prevbufsize != 0) {
5481 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize);
5482 }
5483 }
5484 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount);
5485 }
5486
5487 sp<AMessage> notify = mNotify->dup();
5488 notify->setInt32("what", kWhatOutputFormatChanged);
5489 notify->setMessage("format", mOutputFormat);
5490 notify->post();
5491
5492 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent
5493 mLastOutputFormat = mOutputFormat;
5494 }
5495
signalError(OMX_ERRORTYPE error,status_t internalError)5496 void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
5497 sp<AMessage> notify = mNotify->dup();
5498 notify->setInt32("what", CodecBase::kWhatError);
5499 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError);
5500
5501 if (internalError == UNKNOWN_ERROR) { // find better error code
5502 const status_t omxStatus = statusFromOMXError(error);
5503 if (omxStatus != 0) {
5504 internalError = omxStatus;
5505 } else {
5506 ALOGW("Invalid OMX error %#x", error);
5507 }
5508 }
5509
5510 mFatalError = true;
5511
5512 notify->setInt32("err", internalError);
5513 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error.
5514 notify->post();
5515 }
5516
5517 ////////////////////////////////////////////////////////////////////////////////
5518
PortDescription()5519 ACodec::PortDescription::PortDescription() {
5520 }
5521
requestIDRFrame()5522 status_t ACodec::requestIDRFrame() {
5523 if (!mIsEncoder) {
5524 return ERROR_UNSUPPORTED;
5525 }
5526
5527 OMX_CONFIG_INTRAREFRESHVOPTYPE params;
5528 InitOMXParams(¶ms);
5529
5530 params.nPortIndex = kPortIndexOutput;
5531 params.IntraRefreshVOP = OMX_TRUE;
5532
5533 return mOMX->setConfig(
5534 mNode,
5535 OMX_IndexConfigVideoIntraVOPRefresh,
5536 ¶ms,
5537 sizeof(params));
5538 }
5539
addBuffer(IOMX::buffer_id id,const sp<ABuffer> & buffer,const sp<NativeHandle> & handle,const sp<RefBase> & memRef)5540 void ACodec::PortDescription::addBuffer(
5541 IOMX::buffer_id id, const sp<ABuffer> &buffer,
5542 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) {
5543 mBufferIDs.push_back(id);
5544 mBuffers.push_back(buffer);
5545 mHandles.push_back(handle);
5546 mMemRefs.push_back(memRef);
5547 }
5548
countBuffers()5549 size_t ACodec::PortDescription::countBuffers() {
5550 return mBufferIDs.size();
5551 }
5552
bufferIDAt(size_t index) const5553 IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const {
5554 return mBufferIDs.itemAt(index);
5555 }
5556
bufferAt(size_t index) const5557 sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const {
5558 return mBuffers.itemAt(index);
5559 }
5560
handleAt(size_t index) const5561 sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const {
5562 return mHandles.itemAt(index);
5563 }
5564
memRefAt(size_t index) const5565 sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const {
5566 return mMemRefs.itemAt(index);
5567 }
5568
5569 ////////////////////////////////////////////////////////////////////////////////
5570
BaseState(ACodec * codec,const sp<AState> & parentState)5571 ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
5572 : AState(parentState),
5573 mCodec(codec) {
5574 }
5575
getPortMode(OMX_U32)5576 ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
5577 OMX_U32 /* portIndex */) {
5578 return KEEP_BUFFERS;
5579 }
5580
onMessageReceived(const sp<AMessage> & msg)5581 bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
5582 switch (msg->what()) {
5583 case kWhatInputBufferFilled:
5584 {
5585 onInputBufferFilled(msg);
5586 break;
5587 }
5588
5589 case kWhatOutputBufferDrained:
5590 {
5591 onOutputBufferDrained(msg);
5592 break;
5593 }
5594
5595 case ACodec::kWhatOMXMessageList:
5596 {
5597 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true;
5598 }
5599
5600 case ACodec::kWhatOMXMessageItem:
5601 {
5602 // no need to check as we already did it for kWhatOMXMessageList
5603 return onOMXMessage(msg);
5604 }
5605
5606 case ACodec::kWhatOMXMessage:
5607 {
5608 return checkOMXMessage(msg) ? onOMXMessage(msg) : true;
5609 }
5610
5611 case ACodec::kWhatSetSurface:
5612 {
5613 sp<AReplyToken> replyID;
5614 CHECK(msg->senderAwaitsResponse(&replyID));
5615
5616 sp<RefBase> obj;
5617 CHECK(msg->findObject("surface", &obj));
5618
5619 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
5620
5621 sp<AMessage> response = new AMessage;
5622 response->setInt32("err", err);
5623 response->postReply(replyID);
5624 break;
5625 }
5626
5627 case ACodec::kWhatCreateInputSurface:
5628 case ACodec::kWhatSetInputSurface:
5629 case ACodec::kWhatSignalEndOfInputStream:
5630 {
5631 // This may result in an app illegal state exception.
5632 ALOGE("Message 0x%x was not handled", msg->what());
5633 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION);
5634 return true;
5635 }
5636
5637 case ACodec::kWhatOMXDied:
5638 {
5639 // This will result in kFlagSawMediaServerDie handling in MediaCodec.
5640 ALOGE("OMX/mediaserver died, signalling error!");
5641 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
5642 break;
5643 }
5644
5645 case ACodec::kWhatReleaseCodecInstance:
5646 {
5647 ALOGI("[%s] forcing the release of codec",
5648 mCodec->mComponentName.c_str());
5649 status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
5650 ALOGE_IF("[%s] failed to release codec instance: err=%d",
5651 mCodec->mComponentName.c_str(), err);
5652 sp<AMessage> notify = mCodec->mNotify->dup();
5653 notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
5654 notify->post();
5655 break;
5656 }
5657
5658 default:
5659 return false;
5660 }
5661
5662 return true;
5663 }
5664
checkOMXMessage(const sp<AMessage> & msg)5665 bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) {
5666 // there is a possibility that this is an outstanding message for a
5667 // codec that we have already destroyed
5668 if (mCodec->mNode == 0) {
5669 ALOGI("ignoring message as already freed component: %s",
5670 msg->debugString().c_str());
5671 return false;
5672 }
5673
5674 IOMX::node_id nodeID;
5675 CHECK(msg->findInt32("node", (int32_t*)&nodeID));
5676 if (nodeID != mCodec->mNode) {
5677 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode);
5678 return false;
5679 }
5680 return true;
5681 }
5682
onOMXMessageList(const sp<AMessage> & msg)5683 bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) {
5684 sp<RefBase> obj;
5685 CHECK(msg->findObject("messages", &obj));
5686 sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
5687
5688 bool receivedRenderedEvents = false;
5689 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
5690 it != msgList->getList().cend(); ++it) {
5691 (*it)->setWhat(ACodec::kWhatOMXMessageItem);
5692 mCodec->handleMessage(*it);
5693 int32_t type;
5694 CHECK((*it)->findInt32("type", &type));
5695 if (type == omx_message::FRAME_RENDERED) {
5696 receivedRenderedEvents = true;
5697 }
5698 }
5699
5700 if (receivedRenderedEvents) {
5701 // NOTE: all buffers are rendered in this case
5702 mCodec->notifyOfRenderedFrames();
5703 }
5704 return true;
5705 }
5706
onOMXMessage(const sp<AMessage> & msg)5707 bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
5708 int32_t type;
5709 CHECK(msg->findInt32("type", &type));
5710
5711 switch (type) {
5712 case omx_message::EVENT:
5713 {
5714 int32_t event, data1, data2;
5715 CHECK(msg->findInt32("event", &event));
5716 CHECK(msg->findInt32("data1", &data1));
5717 CHECK(msg->findInt32("data2", &data2));
5718
5719 if (event == OMX_EventCmdComplete
5720 && data1 == OMX_CommandFlush
5721 && data2 == (int32_t)OMX_ALL) {
5722 // Use of this notification is not consistent across
5723 // implementations. We'll drop this notification and rely
5724 // on flush-complete notifications on the individual port
5725 // indices instead.
5726
5727 return true;
5728 }
5729
5730 return onOMXEvent(
5731 static_cast<OMX_EVENTTYPE>(event),
5732 static_cast<OMX_U32>(data1),
5733 static_cast<OMX_U32>(data2));
5734 }
5735
5736 case omx_message::EMPTY_BUFFER_DONE:
5737 {
5738 IOMX::buffer_id bufferID;
5739 int32_t fenceFd;
5740
5741 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
5742 CHECK(msg->findInt32("fence_fd", &fenceFd));
5743
5744 return onOMXEmptyBufferDone(bufferID, fenceFd);
5745 }
5746
5747 case omx_message::FILL_BUFFER_DONE:
5748 {
5749 IOMX::buffer_id bufferID;
5750 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
5751
5752 int32_t rangeOffset, rangeLength, flags, fenceFd;
5753 int64_t timeUs;
5754
5755 CHECK(msg->findInt32("range_offset", &rangeOffset));
5756 CHECK(msg->findInt32("range_length", &rangeLength));
5757 CHECK(msg->findInt32("flags", &flags));
5758 CHECK(msg->findInt64("timestamp", &timeUs));
5759 CHECK(msg->findInt32("fence_fd", &fenceFd));
5760
5761 return onOMXFillBufferDone(
5762 bufferID,
5763 (size_t)rangeOffset, (size_t)rangeLength,
5764 (OMX_U32)flags,
5765 timeUs,
5766 fenceFd);
5767 }
5768
5769 case omx_message::FRAME_RENDERED:
5770 {
5771 int64_t mediaTimeUs, systemNano;
5772
5773 CHECK(msg->findInt64("media_time_us", &mediaTimeUs));
5774 CHECK(msg->findInt64("system_nano", &systemNano));
5775
5776 return onOMXFrameRendered(
5777 mediaTimeUs, systemNano);
5778 }
5779
5780 default:
5781 ALOGE("Unexpected message type: %d", type);
5782 return false;
5783 }
5784 }
5785
onOMXFrameRendered(int64_t mediaTimeUs __unused,nsecs_t systemNano __unused)5786 bool ACodec::BaseState::onOMXFrameRendered(
5787 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) {
5788 // ignore outside of Executing and PortSettingsChanged states
5789 return true;
5790 }
5791
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)5792 bool ACodec::BaseState::onOMXEvent(
5793 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
5794 if (event == OMX_EventDataSpaceChanged) {
5795 ColorAspects aspects;
5796 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF);
5797 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF);
5798 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF);
5799 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF);
5800
5801 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects);
5802 return true;
5803 }
5804
5805 if (event != OMX_EventError) {
5806 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)",
5807 mCodec->mComponentName.c_str(), event, data1, data2);
5808
5809 return false;
5810 }
5811
5812 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1);
5813
5814 // verify OMX component sends back an error we expect.
5815 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1;
5816 if (!isOMXError(omxError)) {
5817 ALOGW("Invalid OMX error %#x", omxError);
5818 omxError = OMX_ErrorUndefined;
5819 }
5820 mCodec->signalError(omxError);
5821
5822 return true;
5823 }
5824
onOMXEmptyBufferDone(IOMX::buffer_id bufferID,int fenceFd)5825 bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) {
5826 ALOGV("[%s] onOMXEmptyBufferDone %u",
5827 mCodec->mComponentName.c_str(), bufferID);
5828
5829 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
5830 BufferInfo::Status status = BufferInfo::getSafeStatus(info);
5831 if (status != BufferInfo::OWNED_BY_COMPONENT) {
5832 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
5833 mCodec->dumpBuffers(kPortIndexInput);
5834 if (fenceFd >= 0) {
5835 ::close(fenceFd);
5836 }
5837 return false;
5838 }
5839 info->mStatus = BufferInfo::OWNED_BY_US;
5840
5841 // input buffers cannot take fences, so wait for any fence now
5842 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone");
5843 fenceFd = -1;
5844
5845 // still save fence for completeness
5846 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone");
5847
5848 // We're in "store-metadata-in-buffers" mode, the underlying
5849 // OMX component had access to data that's implicitly refcounted
5850 // by this "MediaBuffer" object. Now that the OMX component has
5851 // told us that it's done with the input buffer, we can decrement
5852 // the mediaBuffer's reference count.
5853 info->mData->setMediaBufferBase(NULL);
5854
5855 PortMode mode = getPortMode(kPortIndexInput);
5856
5857 switch (mode) {
5858 case KEEP_BUFFERS:
5859 break;
5860
5861 case RESUBMIT_BUFFERS:
5862 postFillThisBuffer(info);
5863 break;
5864
5865 case FREE_BUFFERS:
5866 default:
5867 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers");
5868 return false;
5869 }
5870
5871 return true;
5872 }
5873
postFillThisBuffer(BufferInfo * info)5874 void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
5875 if (mCodec->mPortEOS[kPortIndexInput]) {
5876 return;
5877 }
5878
5879 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
5880
5881 sp<AMessage> notify = mCodec->mNotify->dup();
5882 notify->setInt32("what", CodecBase::kWhatFillThisBuffer);
5883 notify->setInt32("buffer-id", info->mBufferID);
5884
5885 info->mData->meta()->clear();
5886 notify->setBuffer("buffer", info->mData);
5887
5888 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec);
5889 reply->setInt32("buffer-id", info->mBufferID);
5890
5891 notify->setMessage("reply", reply);
5892
5893 notify->post();
5894
5895 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
5896 }
5897
onInputBufferFilled(const sp<AMessage> & msg)5898 void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
5899 IOMX::buffer_id bufferID;
5900 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
5901 sp<ABuffer> buffer;
5902 int32_t err = OK;
5903 bool eos = false;
5904 PortMode mode = getPortMode(kPortIndexInput);
5905
5906 if (!msg->findBuffer("buffer", &buffer)) {
5907 /* these are unfilled buffers returned by client */
5908 CHECK(msg->findInt32("err", &err));
5909
5910 if (err == OK) {
5911 /* buffers with no errors are returned on MediaCodec.flush */
5912 mode = KEEP_BUFFERS;
5913 } else {
5914 ALOGV("[%s] saw error %d instead of an input buffer",
5915 mCodec->mComponentName.c_str(), err);
5916 eos = true;
5917 }
5918
5919 buffer.clear();
5920 }
5921
5922 int32_t tmp;
5923 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
5924 eos = true;
5925 err = ERROR_END_OF_STREAM;
5926 }
5927
5928 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID);
5929 BufferInfo::Status status = BufferInfo::getSafeStatus(info);
5930 if (status != BufferInfo::OWNED_BY_UPSTREAM) {
5931 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID);
5932 mCodec->dumpBuffers(kPortIndexInput);
5933 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
5934 return;
5935 }
5936
5937 info->mStatus = BufferInfo::OWNED_BY_US;
5938
5939 switch (mode) {
5940 case KEEP_BUFFERS:
5941 {
5942 if (eos) {
5943 if (!mCodec->mPortEOS[kPortIndexInput]) {
5944 mCodec->mPortEOS[kPortIndexInput] = true;
5945 mCodec->mInputEOSResult = err;
5946 }
5947 }
5948 break;
5949 }
5950
5951 case RESUBMIT_BUFFERS:
5952 {
5953 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) {
5954 // Do not send empty input buffer w/o EOS to the component.
5955 if (buffer->size() == 0 && !eos) {
5956 postFillThisBuffer(info);
5957 break;
5958 }
5959
5960 int64_t timeUs;
5961 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
5962
5963 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME;
5964
5965 MetadataBufferType metaType = mCodec->mInputMetadataType;
5966 int32_t isCSD = 0;
5967 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) {
5968 if (mCodec->mIsLegacyVP9Decoder) {
5969 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data",
5970 mCodec->mComponentName.c_str(), bufferID);
5971 postFillThisBuffer(info);
5972 break;
5973 }
5974 flags |= OMX_BUFFERFLAG_CODECCONFIG;
5975 metaType = kMetadataBufferTypeInvalid;
5976 }
5977
5978 if (eos) {
5979 flags |= OMX_BUFFERFLAG_EOS;
5980 }
5981
5982 if (buffer != info->mCodecData) {
5983 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)",
5984 mCodec->mComponentName.c_str(),
5985 bufferID,
5986 buffer.get(), info->mCodecData.get());
5987
5988 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput];
5989 if (converter == NULL || isCSD) {
5990 converter = getCopyConverter();
5991 }
5992 status_t err = converter->convert(buffer, info->mCodecData);
5993 if (err != OK) {
5994 mCodec->signalError(OMX_ErrorUndefined, err);
5995 return;
5996 }
5997 }
5998
5999 if (flags & OMX_BUFFERFLAG_CODECCONFIG) {
6000 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data",
6001 mCodec->mComponentName.c_str(), bufferID);
6002 } else if (flags & OMX_BUFFERFLAG_EOS) {
6003 ALOGV("[%s] calling emptyBuffer %u w/ EOS",
6004 mCodec->mComponentName.c_str(), bufferID);
6005 } else {
6006 #if TRACK_BUFFER_TIMING
6007 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us",
6008 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);
6009 #else
6010 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us",
6011 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);
6012 #endif
6013 }
6014
6015 #if TRACK_BUFFER_TIMING
6016 ACodec::BufferStats stats;
6017 stats.mEmptyBufferTimeUs = ALooper::GetNowUs();
6018 stats.mFillBufferDoneTimeUs = -1ll;
6019 mCodec->mBufferStats.add(timeUs, stats);
6020 #endif
6021
6022 if (mCodec->storingMetadataInDecodedBuffers()) {
6023 // try to submit an output buffer for each input buffer
6024 PortMode outputMode = getPortMode(kPortIndexOutput);
6025
6026 ALOGV("MetadataBuffersToSubmit=%u portMode=%s",
6027 mCodec->mMetadataBuffersToSubmit,
6028 (outputMode == FREE_BUFFERS ? "FREE" :
6029 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
6030 if (outputMode == RESUBMIT_BUFFERS) {
6031 mCodec->submitOutputMetadataBuffer();
6032 }
6033 }
6034 info->checkReadFence("onInputBufferFilled");
6035
6036 status_t err2 = OK;
6037 switch (metaType) {
6038 case kMetadataBufferTypeInvalid:
6039 break;
6040 #ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
6041 case kMetadataBufferTypeNativeHandleSource:
6042 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) {
6043 VideoNativeHandleMetadata *vnhmd =
6044 (VideoNativeHandleMetadata*)info->mCodecData->base();
6045 err2 = mCodec->mOMX->updateNativeHandleInMeta(
6046 mCodec->mNode, kPortIndexInput,
6047 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */),
6048 bufferID);
6049 }
6050 break;
6051 case kMetadataBufferTypeANWBuffer:
6052 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) {
6053 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base();
6054 err2 = mCodec->mOMX->updateGraphicBufferInMeta(
6055 mCodec->mNode, kPortIndexInput,
6056 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */),
6057 bufferID);
6058 }
6059 break;
6060 #endif
6061 default:
6062 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode",
6063 asString(metaType), info->mCodecData->size(),
6064 sizeof(buffer_handle_t) * 8);
6065 err2 = ERROR_UNSUPPORTED;
6066 break;
6067 }
6068
6069 if (err2 == OK) {
6070 err2 = mCodec->mOMX->emptyBuffer(
6071 mCodec->mNode,
6072 bufferID,
6073 0,
6074 info->mCodecData->size(),
6075 flags,
6076 timeUs,
6077 info->mFenceFd);
6078 }
6079 info->mFenceFd = -1;
6080 if (err2 != OK) {
6081 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
6082 return;
6083 }
6084 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
6085
6086 if (!eos && err == OK) {
6087 getMoreInputDataIfPossible();
6088 } else {
6089 ALOGV("[%s] Signalled EOS (%d) on the input port",
6090 mCodec->mComponentName.c_str(), err);
6091
6092 mCodec->mPortEOS[kPortIndexInput] = true;
6093 mCodec->mInputEOSResult = err;
6094 }
6095 } else if (!mCodec->mPortEOS[kPortIndexInput]) {
6096 if (err != OK && err != ERROR_END_OF_STREAM) {
6097 ALOGV("[%s] Signalling EOS on the input port due to error %d",
6098 mCodec->mComponentName.c_str(), err);
6099 } else {
6100 ALOGV("[%s] Signalling EOS on the input port",
6101 mCodec->mComponentName.c_str());
6102 }
6103
6104 ALOGV("[%s] calling emptyBuffer %u signalling EOS",
6105 mCodec->mComponentName.c_str(), bufferID);
6106
6107 info->checkReadFence("onInputBufferFilled");
6108 status_t err2 = mCodec->mOMX->emptyBuffer(
6109 mCodec->mNode,
6110 bufferID,
6111 0,
6112 0,
6113 OMX_BUFFERFLAG_EOS,
6114 0,
6115 info->mFenceFd);
6116 info->mFenceFd = -1;
6117 if (err2 != OK) {
6118 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2));
6119 return;
6120 }
6121 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
6122
6123 mCodec->mPortEOS[kPortIndexInput] = true;
6124 mCodec->mInputEOSResult = err;
6125 }
6126 break;
6127 }
6128
6129 case FREE_BUFFERS:
6130 break;
6131
6132 default:
6133 ALOGE("invalid port mode: %d", mode);
6134 break;
6135 }
6136 }
6137
getMoreInputDataIfPossible()6138 void ACodec::BaseState::getMoreInputDataIfPossible() {
6139 if (mCodec->mPortEOS[kPortIndexInput]) {
6140 return;
6141 }
6142
6143 BufferInfo *eligible = NULL;
6144
6145 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
6146 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
6147
6148 #if 0
6149 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) {
6150 // There's already a "read" pending.
6151 return;
6152 }
6153 #endif
6154
6155 if (info->mStatus == BufferInfo::OWNED_BY_US) {
6156 eligible = info;
6157 }
6158 }
6159
6160 if (eligible == NULL) {
6161 return;
6162 }
6163
6164 postFillThisBuffer(eligible);
6165 }
6166
onOMXFillBufferDone(IOMX::buffer_id bufferID,size_t rangeOffset,size_t rangeLength,OMX_U32 flags,int64_t timeUs,int fenceFd)6167 bool ACodec::BaseState::onOMXFillBufferDone(
6168 IOMX::buffer_id bufferID,
6169 size_t rangeOffset, size_t rangeLength,
6170 OMX_U32 flags,
6171 int64_t timeUs,
6172 int fenceFd) {
6173 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",
6174 mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
6175
6176 ssize_t index;
6177 status_t err= OK;
6178
6179 #if TRACK_BUFFER_TIMING
6180 index = mCodec->mBufferStats.indexOfKey(timeUs);
6181 if (index >= 0) {
6182 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index);
6183 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs();
6184
6185 ALOGI("frame PTS %lld: %lld",
6186 timeUs,
6187 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs);
6188
6189 mCodec->mBufferStats.removeItemsAt(index);
6190 stats = NULL;
6191 }
6192 #endif
6193
6194 BufferInfo *info =
6195 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
6196 BufferInfo::Status status = BufferInfo::getSafeStatus(info);
6197 if (status != BufferInfo::OWNED_BY_COMPONENT) {
6198 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
6199 mCodec->dumpBuffers(kPortIndexOutput);
6200 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
6201 if (fenceFd >= 0) {
6202 ::close(fenceFd);
6203 }
6204 return true;
6205 }
6206
6207 info->mDequeuedAt = ++mCodec->mDequeueCounter;
6208 info->mStatus = BufferInfo::OWNED_BY_US;
6209
6210 if (info->mRenderInfo != NULL) {
6211 // The fence for an emptied buffer must have signaled, but there still could be queued
6212 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
6213 // as we will soon requeue this buffer to the surface. While in theory we could still keep
6214 // track of buffers that are requeued to the surface, it is better to add support to the
6215 // buffer-queue to notify us of released buffers and their fences (in the future).
6216 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
6217 }
6218
6219 // byte buffers cannot take fences, so wait for any fence now
6220 if (mCodec->mNativeWindow == NULL) {
6221 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
6222 fenceFd = -1;
6223 }
6224 info->setReadFence(fenceFd, "onOMXFillBufferDone");
6225
6226 PortMode mode = getPortMode(kPortIndexOutput);
6227
6228 switch (mode) {
6229 case KEEP_BUFFERS:
6230 break;
6231
6232 case RESUBMIT_BUFFERS:
6233 {
6234 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS)
6235 || mCodec->mPortEOS[kPortIndexOutput])) {
6236 ALOGV("[%s] calling fillBuffer %u",
6237 mCodec->mComponentName.c_str(), info->mBufferID);
6238
6239 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
6240 info->mFenceFd = -1;
6241 if (err != OK) {
6242 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6243 return true;
6244 }
6245
6246 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
6247 break;
6248 }
6249
6250 sp<AMessage> reply =
6251 new AMessage(kWhatOutputBufferDrained, mCodec);
6252
6253 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) {
6254 // pretend that output format has changed on the first frame (we used to do this)
6255 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) {
6256 mCodec->onOutputFormatChanged(mCodec->mOutputFormat);
6257 }
6258 mCodec->addKeyFormatChangesToRenderBufferNotification(reply);
6259 mCodec->sendFormatChange();
6260 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) {
6261 // If potentially rendering onto a surface, always save key format data (crop &
6262 // data space) so that we can set it if and once the buffer is rendered.
6263 mCodec->addKeyFormatChangesToRenderBufferNotification(reply);
6264 }
6265
6266 if (mCodec->usingMetadataOnEncoderOutput()) {
6267 native_handle_t *handle = NULL;
6268 VideoNativeHandleMetadata &nativeMeta =
6269 *(VideoNativeHandleMetadata *)info->mData->data();
6270 if (info->mData->size() >= sizeof(nativeMeta)
6271 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) {
6272 #ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
6273 // handle is only valid on 32-bit/mediaserver process
6274 handle = NULL;
6275 #else
6276 handle = (native_handle_t *)nativeMeta.pHandle;
6277 #endif
6278 }
6279 info->mData->meta()->setPointer("handle", handle);
6280 info->mData->meta()->setInt32("rangeOffset", rangeOffset);
6281 info->mData->meta()->setInt32("rangeLength", rangeLength);
6282 } else if (info->mData == info->mCodecData) {
6283 info->mData->setRange(rangeOffset, rangeLength);
6284 } else {
6285 info->mCodecData->setRange(rangeOffset, rangeLength);
6286 // in this case we know that mConverter is not null
6287 status_t err = mCodec->mConverter[kPortIndexOutput]->convert(
6288 info->mCodecData, info->mData);
6289 if (err != OK) {
6290 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6291 return true;
6292 }
6293 }
6294 #if 0
6295 if (mCodec->mNativeWindow == NULL) {
6296 if (IsIDR(info->mData)) {
6297 ALOGI("IDR frame");
6298 }
6299 }
6300 #endif
6301
6302 if (mCodec->mSkipCutBuffer != NULL) {
6303 mCodec->mSkipCutBuffer->submit(info->mData);
6304 }
6305 info->mData->meta()->setInt64("timeUs", timeUs);
6306
6307 sp<AMessage> notify = mCodec->mNotify->dup();
6308 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
6309 notify->setInt32("buffer-id", info->mBufferID);
6310 notify->setBuffer("buffer", info->mData);
6311 notify->setInt32("flags", flags);
6312
6313 reply->setInt32("buffer-id", info->mBufferID);
6314
6315 notify->setMessage("reply", reply);
6316
6317 notify->post();
6318
6319 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM;
6320
6321 if (flags & OMX_BUFFERFLAG_EOS) {
6322 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str());
6323
6324 sp<AMessage> notify = mCodec->mNotify->dup();
6325 notify->setInt32("what", CodecBase::kWhatEOS);
6326 notify->setInt32("err", mCodec->mInputEOSResult);
6327 notify->post();
6328
6329 mCodec->mPortEOS[kPortIndexOutput] = true;
6330 }
6331 break;
6332 }
6333
6334 case FREE_BUFFERS:
6335 err = mCodec->freeBuffer(kPortIndexOutput, index);
6336 if (err != OK) {
6337 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6338 return true;
6339 }
6340 break;
6341
6342 default:
6343 ALOGE("Invalid port mode: %d", mode);
6344 return false;
6345 }
6346
6347 return true;
6348 }
6349
onOutputBufferDrained(const sp<AMessage> & msg)6350 void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
6351 IOMX::buffer_id bufferID;
6352 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
6353 ssize_t index;
6354 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
6355 BufferInfo::Status status = BufferInfo::getSafeStatus(info);
6356 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) {
6357 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID);
6358 mCodec->dumpBuffers(kPortIndexOutput);
6359 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
6360 return;
6361 }
6362
6363 android_native_rect_t crop;
6364 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)
6365 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) {
6366 mCodec->mLastNativeWindowCrop = crop;
6367 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop);
6368 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);
6369 }
6370
6371 int32_t dataSpace;
6372 if (msg->findInt32("dataspace", &dataSpace)
6373 && dataSpace != mCodec->mLastNativeWindowDataSpace) {
6374 status_t err = native_window_set_buffers_data_space(
6375 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace);
6376 mCodec->mLastNativeWindowDataSpace = dataSpace;
6377 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err);
6378 }
6379
6380 int32_t render;
6381 if (mCodec->mNativeWindow != NULL
6382 && msg->findInt32("render", &render) && render != 0
6383 && info->mData != NULL && info->mData->size() != 0) {
6384 ATRACE_NAME("render");
6385 // The client wants this buffer to be rendered.
6386
6387 // save buffers sent to the surface so we can get render time when they return
6388 int64_t mediaTimeUs = -1;
6389 info->mData->meta()->findInt64("timeUs", &mediaTimeUs);
6390 if (mediaTimeUs >= 0) {
6391 mCodec->mRenderTracker.onFrameQueued(
6392 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
6393 }
6394
6395 int64_t timestampNs = 0;
6396 if (!msg->findInt64("timestampNs", ×tampNs)) {
6397 // use media timestamp if client did not request a specific render timestamp
6398 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) {
6399 ALOGV("using buffer PTS of %lld", (long long)timestampNs);
6400 timestampNs *= 1000;
6401 }
6402 }
6403
6404 status_t err;
6405 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
6406 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
6407
6408 info->checkReadFence("onOutputBufferDrained before queueBuffer");
6409 err = mCodec->mNativeWindow->queueBuffer(
6410 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
6411 info->mFenceFd = -1;
6412 if (err == OK) {
6413 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
6414 } else {
6415 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err);
6416 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6417 info->mStatus = BufferInfo::OWNED_BY_US;
6418 // keeping read fence as write fence to avoid clobbering
6419 info->mIsReadFence = false;
6420 }
6421 } else {
6422 if (mCodec->mNativeWindow != NULL &&
6423 (info->mData == NULL || info->mData->size() != 0)) {
6424 // move read fence into write fence to avoid clobbering
6425 info->mIsReadFence = false;
6426 ATRACE_NAME("frame-drop");
6427 }
6428 info->mStatus = BufferInfo::OWNED_BY_US;
6429 }
6430
6431 PortMode mode = getPortMode(kPortIndexOutput);
6432
6433 switch (mode) {
6434 case KEEP_BUFFERS:
6435 {
6436 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below?
6437
6438 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
6439 // We cannot resubmit the buffer we just rendered, dequeue
6440 // the spare instead.
6441
6442 info = mCodec->dequeueBufferFromNativeWindow();
6443 }
6444 break;
6445 }
6446
6447 case RESUBMIT_BUFFERS:
6448 {
6449 if (!mCodec->mPortEOS[kPortIndexOutput]) {
6450 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
6451 // We cannot resubmit the buffer we just rendered, dequeue
6452 // the spare instead.
6453
6454 info = mCodec->dequeueBufferFromNativeWindow();
6455 }
6456
6457 if (info != NULL) {
6458 ALOGV("[%s] calling fillBuffer %u",
6459 mCodec->mComponentName.c_str(), info->mBufferID);
6460 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS");
6461 status_t err = mCodec->mOMX->fillBuffer(
6462 mCodec->mNode, info->mBufferID, info->mFenceFd);
6463 info->mFenceFd = -1;
6464 if (err == OK) {
6465 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
6466 } else {
6467 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6468 }
6469 }
6470 }
6471 break;
6472 }
6473
6474 case FREE_BUFFERS:
6475 {
6476 status_t err = mCodec->freeBuffer(kPortIndexOutput, index);
6477 if (err != OK) {
6478 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6479 }
6480 break;
6481 }
6482
6483 default:
6484 ALOGE("Invalid port mode: %d", mode);
6485 return;
6486 }
6487 }
6488
6489 ////////////////////////////////////////////////////////////////////////////////
6490
UninitializedState(ACodec * codec)6491 ACodec::UninitializedState::UninitializedState(ACodec *codec)
6492 : BaseState(codec) {
6493 }
6494
stateEntered()6495 void ACodec::UninitializedState::stateEntered() {
6496 ALOGV("Now uninitialized");
6497
6498 if (mDeathNotifier != NULL) {
6499 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier);
6500 mDeathNotifier.clear();
6501 }
6502
6503 mCodec->mUsingNativeWindow = false;
6504 mCodec->mNativeWindow.clear();
6505 mCodec->mNativeWindowUsageBits = 0;
6506 mCodec->mNode = 0;
6507 mCodec->mOMX.clear();
6508 mCodec->mQuirks = 0;
6509 mCodec->mFlags = 0;
6510 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid;
6511 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid;
6512 mCodec->mConverter[0].clear();
6513 mCodec->mConverter[1].clear();
6514 mCodec->mComponentName.clear();
6515 }
6516
onMessageReceived(const sp<AMessage> & msg)6517 bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
6518 bool handled = false;
6519
6520 switch (msg->what()) {
6521 case ACodec::kWhatSetup:
6522 {
6523 onSetup(msg);
6524
6525 handled = true;
6526 break;
6527 }
6528
6529 case ACodec::kWhatAllocateComponent:
6530 {
6531 onAllocateComponent(msg);
6532 handled = true;
6533 break;
6534 }
6535
6536 case ACodec::kWhatShutdown:
6537 {
6538 int32_t keepComponentAllocated;
6539 CHECK(msg->findInt32(
6540 "keepComponentAllocated", &keepComponentAllocated));
6541 ALOGW_IF(keepComponentAllocated,
6542 "cannot keep component allocated on shutdown in Uninitialized state");
6543
6544 sp<AMessage> notify = mCodec->mNotify->dup();
6545 notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
6546 notify->post();
6547
6548 handled = true;
6549 break;
6550 }
6551
6552 case ACodec::kWhatFlush:
6553 {
6554 sp<AMessage> notify = mCodec->mNotify->dup();
6555 notify->setInt32("what", CodecBase::kWhatFlushCompleted);
6556 notify->post();
6557
6558 handled = true;
6559 break;
6560 }
6561
6562 case ACodec::kWhatReleaseCodecInstance:
6563 {
6564 // nothing to do, as we have already signaled shutdown
6565 handled = true;
6566 break;
6567 }
6568
6569 default:
6570 return BaseState::onMessageReceived(msg);
6571 }
6572
6573 return handled;
6574 }
6575
onSetup(const sp<AMessage> & msg)6576 void ACodec::UninitializedState::onSetup(
6577 const sp<AMessage> &msg) {
6578 if (onAllocateComponent(msg)
6579 && mCodec->mLoadedState->onConfigureComponent(msg)) {
6580 mCodec->mLoadedState->onStart();
6581 }
6582 }
6583
onAllocateComponent(const sp<AMessage> & msg)6584 bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
6585 ALOGV("onAllocateComponent");
6586
6587 CHECK(mCodec->mNode == 0);
6588
6589 OMXClient client;
6590 if (client.connect() != OK) {
6591 mCodec->signalError(OMX_ErrorUndefined, NO_INIT);
6592 return false;
6593 }
6594
6595 sp<IOMX> omx = client.interface();
6596
6597 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);
6598
6599 Vector<AString> matchingCodecs;
6600
6601 AString mime;
6602
6603 AString componentName;
6604 uint32_t quirks = 0;
6605 int32_t encoder = false;
6606 if (msg->findString("componentName", &componentName)) {
6607 sp<IMediaCodecList> list = MediaCodecList::getInstance();
6608 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) {
6609 matchingCodecs.add(componentName);
6610 }
6611 } else {
6612 CHECK(msg->findString("mime", &mime));
6613
6614 if (!msg->findInt32("encoder", &encoder)) {
6615 encoder = false;
6616 }
6617
6618 MediaCodecList::findMatchingCodecs(
6619 mime.c_str(),
6620 encoder, // createEncoder
6621 0, // flags
6622 &matchingCodecs);
6623 }
6624
6625 sp<CodecObserver> observer = new CodecObserver;
6626 IOMX::node_id node = 0;
6627
6628 status_t err = NAME_NOT_FOUND;
6629 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();
6630 ++matchIndex) {
6631 componentName = matchingCodecs[matchIndex];
6632 quirks = MediaCodecList::getQuirksFor(componentName.c_str());
6633
6634 pid_t tid = gettid();
6635 int prevPriority = androidGetThreadPriority(tid);
6636 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
6637 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node);
6638 androidSetThreadPriority(tid, prevPriority);
6639
6640 if (err == OK) {
6641 break;
6642 } else {
6643 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());
6644 }
6645
6646 node = 0;
6647 }
6648
6649 if (node == 0) {
6650 if (!mime.empty()) {
6651 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.",
6652 encoder ? "en" : "de", mime.c_str(), err);
6653 } else {
6654 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err);
6655 }
6656
6657 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));
6658 return false;
6659 }
6660
6661 mDeathNotifier = new DeathNotifier(notify);
6662 if (mCodec->mNodeBinder == NULL ||
6663 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) {
6664 // This was a local binder, if it dies so do we, we won't care
6665 // about any notifications in the afterlife.
6666 mDeathNotifier.clear();
6667 }
6668
6669 notify = new AMessage(kWhatOMXMessageList, mCodec);
6670 observer->setNotificationMessage(notify);
6671
6672 mCodec->mComponentName = componentName;
6673 mCodec->mRenderTracker.setComponentName(componentName);
6674 mCodec->mFlags = 0;
6675
6676 if (componentName.endsWith(".secure")) {
6677 mCodec->mFlags |= kFlagIsSecure;
6678 mCodec->mFlags |= kFlagIsGrallocUsageProtected;
6679 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
6680 }
6681
6682 mCodec->mQuirks = quirks;
6683 mCodec->mOMX = omx;
6684 mCodec->mNode = node;
6685
6686 {
6687 sp<AMessage> notify = mCodec->mNotify->dup();
6688 notify->setInt32("what", CodecBase::kWhatComponentAllocated);
6689 notify->setString("componentName", mCodec->mComponentName.c_str());
6690 notify->post();
6691 }
6692
6693 mCodec->changeState(mCodec->mLoadedState);
6694
6695 return true;
6696 }
6697
6698 ////////////////////////////////////////////////////////////////////////////////
6699
LoadedState(ACodec * codec)6700 ACodec::LoadedState::LoadedState(ACodec *codec)
6701 : BaseState(codec) {
6702 }
6703
stateEntered()6704 void ACodec::LoadedState::stateEntered() {
6705 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
6706
6707 mCodec->mPortEOS[kPortIndexInput] =
6708 mCodec->mPortEOS[kPortIndexOutput] = false;
6709
6710 mCodec->mInputEOSResult = OK;
6711
6712 mCodec->mDequeueCounter = 0;
6713 mCodec->mMetadataBuffersToSubmit = 0;
6714 mCodec->mRepeatFrameDelayUs = -1ll;
6715 mCodec->mInputFormat.clear();
6716 mCodec->mOutputFormat.clear();
6717 mCodec->mBaseOutputFormat.clear();
6718
6719 if (mCodec->mShutdownInProgress) {
6720 bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
6721
6722 mCodec->mShutdownInProgress = false;
6723 mCodec->mKeepComponentAllocated = false;
6724
6725 onShutdown(keepComponentAllocated);
6726 }
6727 mCodec->mExplicitShutdown = false;
6728
6729 mCodec->processDeferredMessages();
6730 }
6731
onShutdown(bool keepComponentAllocated)6732 void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
6733 if (!keepComponentAllocated) {
6734 (void)mCodec->mOMX->freeNode(mCodec->mNode);
6735
6736 mCodec->changeState(mCodec->mUninitializedState);
6737 }
6738
6739 if (mCodec->mExplicitShutdown) {
6740 sp<AMessage> notify = mCodec->mNotify->dup();
6741 notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
6742 notify->post();
6743 mCodec->mExplicitShutdown = false;
6744 }
6745 }
6746
onMessageReceived(const sp<AMessage> & msg)6747 bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
6748 bool handled = false;
6749
6750 switch (msg->what()) {
6751 case ACodec::kWhatConfigureComponent:
6752 {
6753 onConfigureComponent(msg);
6754 handled = true;
6755 break;
6756 }
6757
6758 case ACodec::kWhatCreateInputSurface:
6759 {
6760 onCreateInputSurface(msg);
6761 handled = true;
6762 break;
6763 }
6764
6765 case ACodec::kWhatSetInputSurface:
6766 {
6767 onSetInputSurface(msg);
6768 handled = true;
6769 break;
6770 }
6771
6772 case ACodec::kWhatStart:
6773 {
6774 onStart();
6775 handled = true;
6776 break;
6777 }
6778
6779 case ACodec::kWhatShutdown:
6780 {
6781 int32_t keepComponentAllocated;
6782 CHECK(msg->findInt32(
6783 "keepComponentAllocated", &keepComponentAllocated));
6784
6785 mCodec->mExplicitShutdown = true;
6786 onShutdown(keepComponentAllocated);
6787
6788 handled = true;
6789 break;
6790 }
6791
6792 case ACodec::kWhatFlush:
6793 {
6794 sp<AMessage> notify = mCodec->mNotify->dup();
6795 notify->setInt32("what", CodecBase::kWhatFlushCompleted);
6796 notify->post();
6797
6798 handled = true;
6799 break;
6800 }
6801
6802 default:
6803 return BaseState::onMessageReceived(msg);
6804 }
6805
6806 return handled;
6807 }
6808
onConfigureComponent(const sp<AMessage> & msg)6809 bool ACodec::LoadedState::onConfigureComponent(
6810 const sp<AMessage> &msg) {
6811 ALOGV("onConfigureComponent");
6812
6813 CHECK(mCodec->mNode != 0);
6814
6815 status_t err = OK;
6816 AString mime;
6817 if (!msg->findString("mime", &mime)) {
6818 err = BAD_VALUE;
6819 } else {
6820 err = mCodec->configureCodec(mime.c_str(), msg);
6821 }
6822 if (err != OK) {
6823 ALOGE("[%s] configureCodec returning error %d",
6824 mCodec->mComponentName.c_str(), err);
6825
6826 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
6827 return false;
6828 }
6829
6830 {
6831 sp<AMessage> notify = mCodec->mNotify->dup();
6832 notify->setInt32("what", CodecBase::kWhatComponentConfigured);
6833 notify->setMessage("input-format", mCodec->mInputFormat);
6834 notify->setMessage("output-format", mCodec->mOutputFormat);
6835 notify->post();
6836 }
6837
6838 return true;
6839 }
6840
setupInputSurface()6841 status_t ACodec::LoadedState::setupInputSurface() {
6842 status_t err = OK;
6843
6844 if (mCodec->mRepeatFrameDelayUs > 0ll) {
6845 err = mCodec->mOMX->setInternalOption(
6846 mCodec->mNode,
6847 kPortIndexInput,
6848 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY,
6849 &mCodec->mRepeatFrameDelayUs,
6850 sizeof(mCodec->mRepeatFrameDelayUs));
6851
6852 if (err != OK) {
6853 ALOGE("[%s] Unable to configure option to repeat previous "
6854 "frames (err %d)",
6855 mCodec->mComponentName.c_str(),
6856 err);
6857 return err;
6858 }
6859 }
6860
6861 if (mCodec->mMaxPtsGapUs > 0ll) {
6862 err = mCodec->mOMX->setInternalOption(
6863 mCodec->mNode,
6864 kPortIndexInput,
6865 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP,
6866 &mCodec->mMaxPtsGapUs,
6867 sizeof(mCodec->mMaxPtsGapUs));
6868
6869 if (err != OK) {
6870 ALOGE("[%s] Unable to configure max timestamp gap (err %d)",
6871 mCodec->mComponentName.c_str(),
6872 err);
6873 return err;
6874 }
6875 }
6876
6877 if (mCodec->mMaxFps > 0) {
6878 err = mCodec->mOMX->setInternalOption(
6879 mCodec->mNode,
6880 kPortIndexInput,
6881 IOMX::INTERNAL_OPTION_MAX_FPS,
6882 &mCodec->mMaxFps,
6883 sizeof(mCodec->mMaxFps));
6884
6885 if (err != OK) {
6886 ALOGE("[%s] Unable to configure max fps (err %d)",
6887 mCodec->mComponentName.c_str(),
6888 err);
6889 return err;
6890 }
6891 }
6892
6893 if (mCodec->mTimePerCaptureUs > 0ll
6894 && mCodec->mTimePerFrameUs > 0ll) {
6895 int64_t timeLapse[2];
6896 timeLapse[0] = mCodec->mTimePerFrameUs;
6897 timeLapse[1] = mCodec->mTimePerCaptureUs;
6898 err = mCodec->mOMX->setInternalOption(
6899 mCodec->mNode,
6900 kPortIndexInput,
6901 IOMX::INTERNAL_OPTION_TIME_LAPSE,
6902 &timeLapse[0],
6903 sizeof(timeLapse));
6904
6905 if (err != OK) {
6906 ALOGE("[%s] Unable to configure time lapse (err %d)",
6907 mCodec->mComponentName.c_str(),
6908 err);
6909 return err;
6910 }
6911 }
6912
6913 if (mCodec->mCreateInputBuffersSuspended) {
6914 bool suspend = true;
6915 err = mCodec->mOMX->setInternalOption(
6916 mCodec->mNode,
6917 kPortIndexInput,
6918 IOMX::INTERNAL_OPTION_SUSPEND,
6919 &suspend,
6920 sizeof(suspend));
6921
6922 if (err != OK) {
6923 ALOGE("[%s] Unable to configure option to suspend (err %d)",
6924 mCodec->mComponentName.c_str(),
6925 err);
6926 return err;
6927 }
6928 }
6929
6930 uint32_t usageBits;
6931 if (mCodec->mOMX->getParameter(
6932 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
6933 &usageBits, sizeof(usageBits)) == OK) {
6934 mCodec->mInputFormat->setInt32(
6935 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
6936 }
6937
6938 sp<ABuffer> colorAspectsBuffer;
6939 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) {
6940 err = mCodec->mOMX->setInternalOption(
6941 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS,
6942 colorAspectsBuffer->base(), colorAspectsBuffer->capacity());
6943 if (err != OK) {
6944 ALOGE("[%s] Unable to configure color aspects (err %d)",
6945 mCodec->mComponentName.c_str(), err);
6946 return err;
6947 }
6948 }
6949 return OK;
6950 }
6951
onCreateInputSurface(const sp<AMessage> &)6952 void ACodec::LoadedState::onCreateInputSurface(
6953 const sp<AMessage> & /* msg */) {
6954 ALOGV("onCreateInputSurface");
6955
6956 sp<AMessage> notify = mCodec->mNotify->dup();
6957 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
6958
6959 android_dataspace dataSpace;
6960 status_t err =
6961 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
6962 notify->setMessage("input-format", mCodec->mInputFormat);
6963 notify->setMessage("output-format", mCodec->mOutputFormat);
6964
6965 sp<IGraphicBufferProducer> bufferProducer;
6966 if (err == OK) {
6967 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
6968 err = mCodec->mOMX->createInputSurface(
6969 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer,
6970 &mCodec->mInputMetadataType);
6971 // framework uses ANW buffers internally instead of gralloc handles
6972 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) {
6973 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
6974 }
6975 }
6976
6977 if (err == OK) {
6978 err = setupInputSurface();
6979 }
6980
6981 if (err == OK) {
6982 notify->setObject("input-surface",
6983 new BufferProducerWrapper(bufferProducer));
6984 } else {
6985 // Can't use mCodec->signalError() here -- MediaCodec won't forward
6986 // the error through because it's in the "configured" state. We
6987 // send a kWhatInputSurfaceCreated with an error value instead.
6988 ALOGE("[%s] onCreateInputSurface returning error %d",
6989 mCodec->mComponentName.c_str(), err);
6990 notify->setInt32("err", err);
6991 }
6992 notify->post();
6993 }
6994
onSetInputSurface(const sp<AMessage> & msg)6995 void ACodec::LoadedState::onSetInputSurface(
6996 const sp<AMessage> &msg) {
6997 ALOGV("onSetInputSurface");
6998
6999 sp<AMessage> notify = mCodec->mNotify->dup();
7000 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted);
7001
7002 sp<RefBase> obj;
7003 CHECK(msg->findObject("input-surface", &obj));
7004 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get());
7005
7006 android_dataspace dataSpace;
7007 status_t err =
7008 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace);
7009 notify->setMessage("input-format", mCodec->mInputFormat);
7010 notify->setMessage("output-format", mCodec->mOutputFormat);
7011
7012 if (err == OK) {
7013 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
7014 err = mCodec->mOMX->setInputSurface(
7015 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(),
7016 &mCodec->mInputMetadataType);
7017 // framework uses ANW buffers internally instead of gralloc handles
7018 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) {
7019 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer;
7020 }
7021 }
7022
7023 if (err == OK) {
7024 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace);
7025 err = setupInputSurface();
7026 }
7027
7028 if (err != OK) {
7029 // Can't use mCodec->signalError() here -- MediaCodec won't forward
7030 // the error through because it's in the "configured" state. We
7031 // send a kWhatInputSurfaceAccepted with an error value instead.
7032 ALOGE("[%s] onSetInputSurface returning error %d",
7033 mCodec->mComponentName.c_str(), err);
7034 notify->setInt32("err", err);
7035 }
7036 notify->post();
7037 }
7038
onStart()7039 void ACodec::LoadedState::onStart() {
7040 ALOGV("onStart");
7041
7042 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
7043 if (err != OK) {
7044 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
7045 } else {
7046 mCodec->changeState(mCodec->mLoadedToIdleState);
7047 }
7048 }
7049
7050 ////////////////////////////////////////////////////////////////////////////////
7051
LoadedToIdleState(ACodec * codec)7052 ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec)
7053 : BaseState(codec) {
7054 }
7055
stateEntered()7056 void ACodec::LoadedToIdleState::stateEntered() {
7057 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str());
7058
7059 status_t err;
7060 if ((err = allocateBuffers()) != OK) {
7061 ALOGE("Failed to allocate buffers after transitioning to IDLE state "
7062 "(error 0x%08x)",
7063 err);
7064
7065 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
7066
7067 mCodec->mOMX->sendCommand(
7068 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded);
7069 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
7070 mCodec->freeBuffersOnPort(kPortIndexInput);
7071 }
7072 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
7073 mCodec->freeBuffersOnPort(kPortIndexOutput);
7074 }
7075
7076 mCodec->changeState(mCodec->mLoadedState);
7077 }
7078 }
7079
allocateBuffers()7080 status_t ACodec::LoadedToIdleState::allocateBuffers() {
7081 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput);
7082
7083 if (err != OK) {
7084 return err;
7085 }
7086
7087 return mCodec->allocateBuffersOnPort(kPortIndexOutput);
7088 }
7089
onMessageReceived(const sp<AMessage> & msg)7090 bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
7091 switch (msg->what()) {
7092 case kWhatSetParameters:
7093 case kWhatShutdown:
7094 {
7095 mCodec->deferMessage(msg);
7096 return true;
7097 }
7098
7099 case kWhatSignalEndOfInputStream:
7100 {
7101 mCodec->onSignalEndOfInputStream();
7102 return true;
7103 }
7104
7105 case kWhatResume:
7106 {
7107 // We'll be active soon enough.
7108 return true;
7109 }
7110
7111 case kWhatFlush:
7112 {
7113 // We haven't even started yet, so we're flushed alright...
7114 sp<AMessage> notify = mCodec->mNotify->dup();
7115 notify->setInt32("what", CodecBase::kWhatFlushCompleted);
7116 notify->post();
7117 return true;
7118 }
7119
7120 default:
7121 return BaseState::onMessageReceived(msg);
7122 }
7123 }
7124
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7125 bool ACodec::LoadedToIdleState::onOMXEvent(
7126 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7127 switch (event) {
7128 case OMX_EventCmdComplete:
7129 {
7130 status_t err = OK;
7131 if (data1 != (OMX_U32)OMX_CommandStateSet
7132 || data2 != (OMX_U32)OMX_StateIdle) {
7133 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)",
7134 asString((OMX_COMMANDTYPE)data1), data1,
7135 asString((OMX_STATETYPE)data2), data2);
7136 err = FAILED_TRANSACTION;
7137 }
7138
7139 if (err == OK) {
7140 err = mCodec->mOMX->sendCommand(
7141 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting);
7142 }
7143
7144 if (err != OK) {
7145 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
7146 } else {
7147 mCodec->changeState(mCodec->mIdleToExecutingState);
7148 }
7149
7150 return true;
7151 }
7152
7153 default:
7154 return BaseState::onOMXEvent(event, data1, data2);
7155 }
7156 }
7157
7158 ////////////////////////////////////////////////////////////////////////////////
7159
IdleToExecutingState(ACodec * codec)7160 ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec)
7161 : BaseState(codec) {
7162 }
7163
stateEntered()7164 void ACodec::IdleToExecutingState::stateEntered() {
7165 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str());
7166 }
7167
onMessageReceived(const sp<AMessage> & msg)7168 bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
7169 switch (msg->what()) {
7170 case kWhatSetParameters:
7171 case kWhatShutdown:
7172 {
7173 mCodec->deferMessage(msg);
7174 return true;
7175 }
7176
7177 case kWhatResume:
7178 {
7179 // We'll be active soon enough.
7180 return true;
7181 }
7182
7183 case kWhatFlush:
7184 {
7185 // We haven't even started yet, so we're flushed alright...
7186 sp<AMessage> notify = mCodec->mNotify->dup();
7187 notify->setInt32("what", CodecBase::kWhatFlushCompleted);
7188 notify->post();
7189
7190 return true;
7191 }
7192
7193 case kWhatSignalEndOfInputStream:
7194 {
7195 mCodec->onSignalEndOfInputStream();
7196 return true;
7197 }
7198
7199 default:
7200 return BaseState::onMessageReceived(msg);
7201 }
7202 }
7203
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7204 bool ACodec::IdleToExecutingState::onOMXEvent(
7205 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7206 switch (event) {
7207 case OMX_EventCmdComplete:
7208 {
7209 if (data1 != (OMX_U32)OMX_CommandStateSet
7210 || data2 != (OMX_U32)OMX_StateExecuting) {
7211 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)",
7212 asString((OMX_COMMANDTYPE)data1), data1,
7213 asString((OMX_STATETYPE)data2), data2);
7214 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7215 return true;
7216 }
7217
7218 mCodec->mExecutingState->resume();
7219 mCodec->changeState(mCodec->mExecutingState);
7220
7221 return true;
7222 }
7223
7224 default:
7225 return BaseState::onOMXEvent(event, data1, data2);
7226 }
7227 }
7228
7229 ////////////////////////////////////////////////////////////////////////////////
7230
ExecutingState(ACodec * codec)7231 ACodec::ExecutingState::ExecutingState(ACodec *codec)
7232 : BaseState(codec),
7233 mActive(false) {
7234 }
7235
getPortMode(OMX_U32)7236 ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode(
7237 OMX_U32 /* portIndex */) {
7238 return RESUBMIT_BUFFERS;
7239 }
7240
submitOutputMetaBuffers()7241 void ACodec::ExecutingState::submitOutputMetaBuffers() {
7242 // submit as many buffers as there are input buffers with the codec
7243 // in case we are in port reconfiguring
7244 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) {
7245 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
7246
7247 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) {
7248 if (mCodec->submitOutputMetadataBuffer() != OK)
7249 break;
7250 }
7251 }
7252
7253 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
7254 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
7255 }
7256
submitRegularOutputBuffers()7257 void ACodec::ExecutingState::submitRegularOutputBuffers() {
7258 bool failed = false;
7259 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {
7260 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);
7261
7262 if (mCodec->mNativeWindow != NULL) {
7263 if (info->mStatus != BufferInfo::OWNED_BY_US
7264 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
7265 ALOGE("buffers should be owned by us or the surface");
7266 failed = true;
7267 break;
7268 }
7269
7270 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
7271 continue;
7272 }
7273 } else {
7274 if (info->mStatus != BufferInfo::OWNED_BY_US) {
7275 ALOGE("buffers should be owned by us");
7276 failed = true;
7277 break;
7278 }
7279 }
7280
7281 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID);
7282
7283 info->checkWriteFence("submitRegularOutputBuffers");
7284 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd);
7285 info->mFenceFd = -1;
7286 if (err != OK) {
7287 failed = true;
7288 break;
7289 }
7290
7291 info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
7292 }
7293
7294 if (failed) {
7295 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7296 }
7297 }
7298
submitOutputBuffers()7299 void ACodec::ExecutingState::submitOutputBuffers() {
7300 submitRegularOutputBuffers();
7301 if (mCodec->storingMetadataInDecodedBuffers()) {
7302 submitOutputMetaBuffers();
7303 }
7304 }
7305
resume()7306 void ACodec::ExecutingState::resume() {
7307 if (mActive) {
7308 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str());
7309 return;
7310 }
7311
7312 submitOutputBuffers();
7313
7314 // Post all available input buffers
7315 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) {
7316 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str());
7317 }
7318
7319 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
7320 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
7321 if (info->mStatus == BufferInfo::OWNED_BY_US) {
7322 postFillThisBuffer(info);
7323 }
7324 }
7325
7326 mActive = true;
7327 }
7328
stateEntered()7329 void ACodec::ExecutingState::stateEntered() {
7330 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
7331
7332 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
7333 mCodec->processDeferredMessages();
7334 }
7335
onMessageReceived(const sp<AMessage> & msg)7336 bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
7337 bool handled = false;
7338
7339 switch (msg->what()) {
7340 case kWhatShutdown:
7341 {
7342 int32_t keepComponentAllocated;
7343 CHECK(msg->findInt32(
7344 "keepComponentAllocated", &keepComponentAllocated));
7345
7346 mCodec->mShutdownInProgress = true;
7347 mCodec->mExplicitShutdown = true;
7348 mCodec->mKeepComponentAllocated = keepComponentAllocated;
7349
7350 mActive = false;
7351
7352 status_t err = mCodec->mOMX->sendCommand(
7353 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle);
7354 if (err != OK) {
7355 if (keepComponentAllocated) {
7356 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7357 }
7358 // TODO: do some recovery here.
7359 } else {
7360 mCodec->changeState(mCodec->mExecutingToIdleState);
7361 }
7362
7363 handled = true;
7364 break;
7365 }
7366
7367 case kWhatFlush:
7368 {
7369 ALOGV("[%s] ExecutingState flushing now "
7370 "(codec owns %zu/%zu input, %zu/%zu output).",
7371 mCodec->mComponentName.c_str(),
7372 mCodec->countBuffersOwnedByComponent(kPortIndexInput),
7373 mCodec->mBuffers[kPortIndexInput].size(),
7374 mCodec->countBuffersOwnedByComponent(kPortIndexOutput),
7375 mCodec->mBuffers[kPortIndexOutput].size());
7376
7377 mActive = false;
7378
7379 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL);
7380 if (err != OK) {
7381 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7382 } else {
7383 mCodec->changeState(mCodec->mFlushingState);
7384 }
7385
7386 handled = true;
7387 break;
7388 }
7389
7390 case kWhatResume:
7391 {
7392 resume();
7393
7394 handled = true;
7395 break;
7396 }
7397
7398 case kWhatRequestIDRFrame:
7399 {
7400 status_t err = mCodec->requestIDRFrame();
7401 if (err != OK) {
7402 ALOGW("Requesting an IDR frame failed.");
7403 }
7404
7405 handled = true;
7406 break;
7407 }
7408
7409 case kWhatSetParameters:
7410 {
7411 sp<AMessage> params;
7412 CHECK(msg->findMessage("params", ¶ms));
7413
7414 status_t err = mCodec->setParameters(params);
7415
7416 sp<AMessage> reply;
7417 if (msg->findMessage("reply", &reply)) {
7418 reply->setInt32("err", err);
7419 reply->post();
7420 }
7421
7422 handled = true;
7423 break;
7424 }
7425
7426 case ACodec::kWhatSignalEndOfInputStream:
7427 {
7428 mCodec->onSignalEndOfInputStream();
7429 handled = true;
7430 break;
7431 }
7432
7433 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
7434 case kWhatSubmitOutputMetadataBufferIfEOS:
7435 {
7436 if (mCodec->mPortEOS[kPortIndexInput] &&
7437 !mCodec->mPortEOS[kPortIndexOutput]) {
7438 status_t err = mCodec->submitOutputMetadataBuffer();
7439 if (err == OK) {
7440 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
7441 }
7442 }
7443 return true;
7444 }
7445
7446 default:
7447 handled = BaseState::onMessageReceived(msg);
7448 break;
7449 }
7450
7451 return handled;
7452 }
7453
setParameters(const sp<AMessage> & params)7454 status_t ACodec::setParameters(const sp<AMessage> ¶ms) {
7455 int32_t videoBitrate;
7456 if (params->findInt32("video-bitrate", &videoBitrate)) {
7457 OMX_VIDEO_CONFIG_BITRATETYPE configParams;
7458 InitOMXParams(&configParams);
7459 configParams.nPortIndex = kPortIndexOutput;
7460 configParams.nEncodeBitrate = videoBitrate;
7461
7462 status_t err = mOMX->setConfig(
7463 mNode,
7464 OMX_IndexConfigVideoBitrate,
7465 &configParams,
7466 sizeof(configParams));
7467
7468 if (err != OK) {
7469 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d",
7470 videoBitrate, err);
7471
7472 return err;
7473 }
7474 }
7475
7476 int64_t timeOffsetUs;
7477 if (params->findInt64("time-offset-us", &timeOffsetUs)) {
7478 status_t err = mOMX->setInternalOption(
7479 mNode,
7480 kPortIndexInput,
7481 IOMX::INTERNAL_OPTION_TIME_OFFSET,
7482 &timeOffsetUs,
7483 sizeof(timeOffsetUs));
7484
7485 if (err != OK) {
7486 ALOGE("[%s] Unable to set input buffer time offset (err %d)",
7487 mComponentName.c_str(),
7488 err);
7489 return err;
7490 }
7491 }
7492
7493 int64_t skipFramesBeforeUs;
7494 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
7495 status_t err =
7496 mOMX->setInternalOption(
7497 mNode,
7498 kPortIndexInput,
7499 IOMX::INTERNAL_OPTION_START_TIME,
7500 &skipFramesBeforeUs,
7501 sizeof(skipFramesBeforeUs));
7502
7503 if (err != OK) {
7504 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err);
7505 return err;
7506 }
7507 }
7508
7509 int32_t dropInputFrames;
7510 if (params->findInt32("drop-input-frames", &dropInputFrames)) {
7511 bool suspend = dropInputFrames != 0;
7512
7513 status_t err =
7514 mOMX->setInternalOption(
7515 mNode,
7516 kPortIndexInput,
7517 IOMX::INTERNAL_OPTION_SUSPEND,
7518 &suspend,
7519 sizeof(suspend));
7520
7521 if (err != OK) {
7522 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err);
7523 return err;
7524 }
7525 }
7526
7527 int32_t dummy;
7528 if (params->findInt32("request-sync", &dummy)) {
7529 status_t err = requestIDRFrame();
7530
7531 if (err != OK) {
7532 ALOGE("Requesting a sync frame failed w/ err %d", err);
7533 return err;
7534 }
7535 }
7536
7537 float rate;
7538 if (params->findFloat("operating-rate", &rate) && rate > 0) {
7539 status_t err = setOperatingRate(rate, mIsVideo);
7540 if (err != OK) {
7541 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err);
7542 return err;
7543 }
7544 }
7545
7546 int32_t intraRefreshPeriod = 0;
7547 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod)
7548 && intraRefreshPeriod > 0) {
7549 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false);
7550 if (err != OK) {
7551 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional",
7552 mComponentName.c_str());
7553 err = OK;
7554 }
7555 }
7556
7557 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat);
7558 if (err != OK) {
7559 err = OK; // ignore failure
7560 }
7561
7562 return err;
7563 }
7564
onSignalEndOfInputStream()7565 void ACodec::onSignalEndOfInputStream() {
7566 sp<AMessage> notify = mNotify->dup();
7567 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS);
7568
7569 status_t err = mOMX->signalEndOfInputStream(mNode);
7570 if (err != OK) {
7571 notify->setInt32("err", err);
7572 }
7573 notify->post();
7574 }
7575
onOMXFrameRendered(int64_t mediaTimeUs,nsecs_t systemNano)7576 bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
7577 mCodec->onFrameRendered(mediaTimeUs, systemNano);
7578 return true;
7579 }
7580
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7581 bool ACodec::ExecutingState::onOMXEvent(
7582 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7583 switch (event) {
7584 case OMX_EventPortSettingsChanged:
7585 {
7586 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
7587
7588 mCodec->onOutputFormatChanged();
7589
7590 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
7591 mCodec->mMetadataBuffersToSubmit = 0;
7592 CHECK_EQ(mCodec->mOMX->sendCommand(
7593 mCodec->mNode,
7594 OMX_CommandPortDisable, kPortIndexOutput),
7595 (status_t)OK);
7596
7597 mCodec->freeOutputBuffersNotOwnedByComponent();
7598
7599 mCodec->changeState(mCodec->mOutputPortSettingsChangedState);
7600 } else if (data2 != OMX_IndexConfigCommonOutputCrop
7601 && data2 != OMX_IndexConfigAndroidIntraRefresh) {
7602 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x",
7603 mCodec->mComponentName.c_str(), data2);
7604 }
7605
7606 return true;
7607 }
7608
7609 case OMX_EventBufferFlag:
7610 {
7611 return true;
7612 }
7613
7614 default:
7615 return BaseState::onOMXEvent(event, data1, data2);
7616 }
7617 }
7618
7619 ////////////////////////////////////////////////////////////////////////////////
7620
OutputPortSettingsChangedState(ACodec * codec)7621 ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState(
7622 ACodec *codec)
7623 : BaseState(codec) {
7624 }
7625
getPortMode(OMX_U32 portIndex)7626 ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode(
7627 OMX_U32 portIndex) {
7628 if (portIndex == kPortIndexOutput) {
7629 return FREE_BUFFERS;
7630 }
7631
7632 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput);
7633
7634 return RESUBMIT_BUFFERS;
7635 }
7636
onMessageReceived(const sp<AMessage> & msg)7637 bool ACodec::OutputPortSettingsChangedState::onMessageReceived(
7638 const sp<AMessage> &msg) {
7639 bool handled = false;
7640
7641 switch (msg->what()) {
7642 case kWhatFlush:
7643 case kWhatShutdown:
7644 case kWhatResume:
7645 case kWhatSetParameters:
7646 {
7647 if (msg->what() == kWhatResume) {
7648 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
7649 }
7650
7651 mCodec->deferMessage(msg);
7652 handled = true;
7653 break;
7654 }
7655
7656 default:
7657 handled = BaseState::onMessageReceived(msg);
7658 break;
7659 }
7660
7661 return handled;
7662 }
7663
stateEntered()7664 void ACodec::OutputPortSettingsChangedState::stateEntered() {
7665 ALOGV("[%s] Now handling output port settings change",
7666 mCodec->mComponentName.c_str());
7667 }
7668
onOMXFrameRendered(int64_t mediaTimeUs,nsecs_t systemNano)7669 bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
7670 int64_t mediaTimeUs, nsecs_t systemNano) {
7671 mCodec->onFrameRendered(mediaTimeUs, systemNano);
7672 return true;
7673 }
7674
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7675 bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
7676 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7677 switch (event) {
7678 case OMX_EventCmdComplete:
7679 {
7680 if (data1 == (OMX_U32)OMX_CommandPortDisable) {
7681 if (data2 != (OMX_U32)kPortIndexOutput) {
7682 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2);
7683 return false;
7684 }
7685
7686 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str());
7687
7688 status_t err = OK;
7689 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) {
7690 ALOGE("disabled port should be empty, but has %zu buffers",
7691 mCodec->mBuffers[kPortIndexOutput].size());
7692 err = FAILED_TRANSACTION;
7693 } else {
7694 mCodec->mDealer[kPortIndexOutput].clear();
7695 }
7696
7697 if (err == OK) {
7698 err = mCodec->mOMX->sendCommand(
7699 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput);
7700 }
7701
7702 if (err == OK) {
7703 err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
7704 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
7705 "reconfiguration: (%d)", err);
7706 }
7707
7708 if (err != OK) {
7709 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
7710
7711 // This is technically not correct, but appears to be
7712 // the only way to free the component instance.
7713 // Controlled transitioning from excecuting->idle
7714 // and idle->loaded seem impossible probably because
7715 // the output port never finishes re-enabling.
7716 mCodec->mShutdownInProgress = true;
7717 mCodec->mKeepComponentAllocated = false;
7718 mCodec->changeState(mCodec->mLoadedState);
7719 }
7720
7721 return true;
7722 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) {
7723 if (data2 != (OMX_U32)kPortIndexOutput) {
7724 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2);
7725 return false;
7726 }
7727
7728 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str());
7729
7730 if (mCodec->mExecutingState->active()) {
7731 mCodec->mExecutingState->submitOutputBuffers();
7732 }
7733
7734 mCodec->changeState(mCodec->mExecutingState);
7735
7736 return true;
7737 }
7738
7739 return false;
7740 }
7741
7742 default:
7743 return false;
7744 }
7745 }
7746
7747 ////////////////////////////////////////////////////////////////////////////////
7748
ExecutingToIdleState(ACodec * codec)7749 ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec)
7750 : BaseState(codec),
7751 mComponentNowIdle(false) {
7752 }
7753
onMessageReceived(const sp<AMessage> & msg)7754 bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {
7755 bool handled = false;
7756
7757 switch (msg->what()) {
7758 case kWhatFlush:
7759 {
7760 // Don't send me a flush request if you previously wanted me
7761 // to shutdown.
7762 ALOGW("Ignoring flush request in ExecutingToIdleState");
7763 break;
7764 }
7765
7766 case kWhatShutdown:
7767 {
7768 // We're already doing that...
7769
7770 handled = true;
7771 break;
7772 }
7773
7774 default:
7775 handled = BaseState::onMessageReceived(msg);
7776 break;
7777 }
7778
7779 return handled;
7780 }
7781
stateEntered()7782 void ACodec::ExecutingToIdleState::stateEntered() {
7783 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
7784
7785 mComponentNowIdle = false;
7786 mCodec->mLastOutputFormat.clear();
7787 }
7788
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7789 bool ACodec::ExecutingToIdleState::onOMXEvent(
7790 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7791 switch (event) {
7792 case OMX_EventCmdComplete:
7793 {
7794 if (data1 != (OMX_U32)OMX_CommandStateSet
7795 || data2 != (OMX_U32)OMX_StateIdle) {
7796 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)",
7797 asString((OMX_COMMANDTYPE)data1), data1,
7798 asString((OMX_STATETYPE)data2), data2);
7799 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7800 return true;
7801 }
7802
7803 mComponentNowIdle = true;
7804
7805 changeStateIfWeOwnAllBuffers();
7806
7807 return true;
7808 }
7809
7810 case OMX_EventPortSettingsChanged:
7811 case OMX_EventBufferFlag:
7812 {
7813 // We're shutting down and don't care about this anymore.
7814 return true;
7815 }
7816
7817 default:
7818 return BaseState::onOMXEvent(event, data1, data2);
7819 }
7820 }
7821
changeStateIfWeOwnAllBuffers()7822 void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {
7823 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) {
7824 status_t err = mCodec->mOMX->sendCommand(
7825 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded);
7826 if (err == OK) {
7827 err = mCodec->freeBuffersOnPort(kPortIndexInput);
7828 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput);
7829 if (err == OK) {
7830 err = err2;
7831 }
7832 }
7833
7834 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown)
7835 && mCodec->mNativeWindow != NULL) {
7836 // We push enough 1x1 blank buffers to ensure that one of
7837 // them has made it to the display. This allows the OMX
7838 // component teardown to zero out any protected buffers
7839 // without the risk of scanning out one of those buffers.
7840 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get());
7841 }
7842
7843 if (err != OK) {
7844 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7845 return;
7846 }
7847
7848 mCodec->changeState(mCodec->mIdleToLoadedState);
7849 }
7850 }
7851
onInputBufferFilled(const sp<AMessage> & msg)7852 void ACodec::ExecutingToIdleState::onInputBufferFilled(
7853 const sp<AMessage> &msg) {
7854 BaseState::onInputBufferFilled(msg);
7855
7856 changeStateIfWeOwnAllBuffers();
7857 }
7858
onOutputBufferDrained(const sp<AMessage> & msg)7859 void ACodec::ExecutingToIdleState::onOutputBufferDrained(
7860 const sp<AMessage> &msg) {
7861 BaseState::onOutputBufferDrained(msg);
7862
7863 changeStateIfWeOwnAllBuffers();
7864 }
7865
7866 ////////////////////////////////////////////////////////////////////////////////
7867
IdleToLoadedState(ACodec * codec)7868 ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec)
7869 : BaseState(codec) {
7870 }
7871
onMessageReceived(const sp<AMessage> & msg)7872 bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) {
7873 bool handled = false;
7874
7875 switch (msg->what()) {
7876 case kWhatShutdown:
7877 {
7878 // We're already doing that...
7879
7880 handled = true;
7881 break;
7882 }
7883
7884 case kWhatFlush:
7885 {
7886 // Don't send me a flush request if you previously wanted me
7887 // to shutdown.
7888 ALOGE("Got flush request in IdleToLoadedState");
7889 break;
7890 }
7891
7892 default:
7893 handled = BaseState::onMessageReceived(msg);
7894 break;
7895 }
7896
7897 return handled;
7898 }
7899
stateEntered()7900 void ACodec::IdleToLoadedState::stateEntered() {
7901 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str());
7902 }
7903
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7904 bool ACodec::IdleToLoadedState::onOMXEvent(
7905 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7906 switch (event) {
7907 case OMX_EventCmdComplete:
7908 {
7909 if (data1 != (OMX_U32)OMX_CommandStateSet
7910 || data2 != (OMX_U32)OMX_StateLoaded) {
7911 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)",
7912 asString((OMX_COMMANDTYPE)data1), data1,
7913 asString((OMX_STATETYPE)data2), data2);
7914 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7915 return true;
7916 }
7917
7918 mCodec->changeState(mCodec->mLoadedState);
7919
7920 return true;
7921 }
7922
7923 default:
7924 return BaseState::onOMXEvent(event, data1, data2);
7925 }
7926 }
7927
7928 ////////////////////////////////////////////////////////////////////////////////
7929
FlushingState(ACodec * codec)7930 ACodec::FlushingState::FlushingState(ACodec *codec)
7931 : BaseState(codec) {
7932 }
7933
stateEntered()7934 void ACodec::FlushingState::stateEntered() {
7935 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str());
7936
7937 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false;
7938 }
7939
onMessageReceived(const sp<AMessage> & msg)7940 bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) {
7941 bool handled = false;
7942
7943 switch (msg->what()) {
7944 case kWhatShutdown:
7945 {
7946 mCodec->deferMessage(msg);
7947 break;
7948 }
7949
7950 case kWhatFlush:
7951 {
7952 // We're already doing this right now.
7953 handled = true;
7954 break;
7955 }
7956
7957 default:
7958 handled = BaseState::onMessageReceived(msg);
7959 break;
7960 }
7961
7962 return handled;
7963 }
7964
onOMXEvent(OMX_EVENTTYPE event,OMX_U32 data1,OMX_U32 data2)7965 bool ACodec::FlushingState::onOMXEvent(
7966 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
7967 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)",
7968 mCodec->mComponentName.c_str(), event, (OMX_S32)data1);
7969
7970 switch (event) {
7971 case OMX_EventCmdComplete:
7972 {
7973 if (data1 != (OMX_U32)OMX_CommandFlush) {
7974 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState",
7975 asString((OMX_COMMANDTYPE)data1), data1, data2);
7976 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION);
7977 return true;
7978 }
7979
7980 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) {
7981 if (mFlushComplete[data2]) {
7982 ALOGW("Flush already completed for %s port",
7983 data2 == kPortIndexInput ? "input" : "output");
7984 return true;
7985 }
7986 mFlushComplete[data2] = true;
7987
7988 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) {
7989 changeStateIfWeOwnAllBuffers();
7990 }
7991 } else if (data2 == OMX_ALL) {
7992 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) {
7993 ALOGW("received flush complete event for OMX_ALL before ports have been"
7994 "flushed (%d/%d)",
7995 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]);
7996 return false;
7997 }
7998
7999 changeStateIfWeOwnAllBuffers();
8000 } else {
8001 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2);
8002 }
8003
8004 return true;
8005 }
8006
8007 case OMX_EventPortSettingsChanged:
8008 {
8009 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec);
8010 msg->setInt32("type", omx_message::EVENT);
8011 msg->setInt32("node", mCodec->mNode);
8012 msg->setInt32("event", event);
8013 msg->setInt32("data1", data1);
8014 msg->setInt32("data2", data2);
8015
8016 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged",
8017 mCodec->mComponentName.c_str());
8018
8019 mCodec->deferMessage(msg);
8020
8021 return true;
8022 }
8023
8024 default:
8025 return BaseState::onOMXEvent(event, data1, data2);
8026 }
8027
8028 return true;
8029 }
8030
onOutputBufferDrained(const sp<AMessage> & msg)8031 void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) {
8032 BaseState::onOutputBufferDrained(msg);
8033
8034 changeStateIfWeOwnAllBuffers();
8035 }
8036
onInputBufferFilled(const sp<AMessage> & msg)8037 void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) {
8038 BaseState::onInputBufferFilled(msg);
8039
8040 changeStateIfWeOwnAllBuffers();
8041 }
8042
changeStateIfWeOwnAllBuffers()8043 void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
8044 if (mFlushComplete[kPortIndexInput]
8045 && mFlushComplete[kPortIndexOutput]
8046 && mCodec->allYourBuffersAreBelongToUs()) {
8047 // We now own all buffers except possibly those still queued with
8048 // the native window for rendering. Let's get those back as well.
8049 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
8050
8051 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
8052
8053 sp<AMessage> notify = mCodec->mNotify->dup();
8054 notify->setInt32("what", CodecBase::kWhatFlushCompleted);
8055 notify->post();
8056
8057 mCodec->mPortEOS[kPortIndexInput] =
8058 mCodec->mPortEOS[kPortIndexOutput] = false;
8059
8060 mCodec->mInputEOSResult = OK;
8061
8062 if (mCodec->mSkipCutBuffer != NULL) {
8063 mCodec->mSkipCutBuffer->clear();
8064 }
8065
8066 mCodec->changeState(mCodec->mExecutingState);
8067 }
8068 }
8069
queryCapabilities(const AString & name,const AString & mime,bool isEncoder,sp<MediaCodecInfo::Capabilities> * caps)8070 status_t ACodec::queryCapabilities(
8071 const AString &name, const AString &mime, bool isEncoder,
8072 sp<MediaCodecInfo::Capabilities> *caps) {
8073 (*caps).clear();
8074 const char *role = getComponentRole(isEncoder, mime.c_str());
8075 if (role == NULL) {
8076 return BAD_VALUE;
8077 }
8078
8079 OMXClient client;
8080 status_t err = client.connect();
8081 if (err != OK) {
8082 return err;
8083 }
8084
8085 sp<IOMX> omx = client.interface();
8086 sp<CodecObserver> observer = new CodecObserver;
8087 IOMX::node_id node = 0;
8088
8089 err = omx->allocateNode(name.c_str(), observer, NULL, &node);
8090 if (err != OK) {
8091 client.disconnect();
8092 return err;
8093 }
8094
8095 err = setComponentRole(omx, node, role);
8096 if (err != OK) {
8097 omx->freeNode(node);
8098 client.disconnect();
8099 return err;
8100 }
8101
8102 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder();
8103 bool isVideo = mime.startsWithIgnoreCase("video/");
8104
8105 if (isVideo) {
8106 OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
8107 InitOMXParams(¶m);
8108 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput;
8109
8110 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
8111 param.nProfileIndex = index;
8112 status_t err = omx->getParameter(
8113 node, OMX_IndexParamVideoProfileLevelQuerySupported,
8114 ¶m, sizeof(param));
8115 if (err != OK) {
8116 break;
8117 }
8118 builder->addProfileLevel(param.eProfile, param.eLevel);
8119
8120 if (index == kMaxIndicesToCheck) {
8121 ALOGW("[%s] stopping checking profiles after %u: %x/%x",
8122 name.c_str(), index,
8123 param.eProfile, param.eLevel);
8124 }
8125 }
8126
8127 // Color format query
8128 // return colors in the order reported by the OMX component
8129 // prefix "flexible" standard ones with the flexible equivalent
8130 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
8131 InitOMXParams(&portFormat);
8132 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput;
8133 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates
8134 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
8135 portFormat.nIndex = index;
8136 status_t err = omx->getParameter(
8137 node, OMX_IndexParamVideoPortFormat,
8138 &portFormat, sizeof(portFormat));
8139 if (err != OK) {
8140 break;
8141 }
8142
8143 OMX_U32 flexibleEquivalent;
8144 if (isFlexibleColorFormat(
8145 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */,
8146 &flexibleEquivalent)) {
8147 bool marked = false;
8148 for (size_t i = 0; i < supportedColors.size(); ++i) {
8149 if (supportedColors[i] == flexibleEquivalent) {
8150 marked = true;
8151 break;
8152 }
8153 }
8154 if (!marked) {
8155 supportedColors.push(flexibleEquivalent);
8156 builder->addColorFormat(flexibleEquivalent);
8157 }
8158 }
8159 supportedColors.push(portFormat.eColorFormat);
8160 builder->addColorFormat(portFormat.eColorFormat);
8161
8162 if (index == kMaxIndicesToCheck) {
8163 ALOGW("[%s] stopping checking formats after %u: %s(%x)",
8164 name.c_str(), index,
8165 asString(portFormat.eColorFormat), portFormat.eColorFormat);
8166 }
8167 }
8168 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) {
8169 // More audio codecs if they have profiles.
8170 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param;
8171 InitOMXParams(¶m);
8172 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput;
8173 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
8174 param.nProfileIndex = index;
8175 status_t err = omx->getParameter(
8176 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported,
8177 ¶m, sizeof(param));
8178 if (err != OK) {
8179 break;
8180 }
8181 // For audio, level is ignored.
8182 builder->addProfileLevel(param.eProfile, 0 /* level */);
8183
8184 if (index == kMaxIndicesToCheck) {
8185 ALOGW("[%s] stopping checking profiles after %u: %x",
8186 name.c_str(), index,
8187 param.eProfile);
8188 }
8189 }
8190
8191 // NOTE: Without Android extensions, OMX does not provide a way to query
8192 // AAC profile support
8193 if (param.nProfileIndex == 0) {
8194 ALOGW("component %s doesn't support profile query.", name.c_str());
8195 }
8196 }
8197
8198 if (isVideo && !isEncoder) {
8199 native_handle_t *sidebandHandle = NULL;
8200 if (omx->configureVideoTunnelMode(
8201 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) {
8202 // tunneled playback includes adaptive playback
8203 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback
8204 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback);
8205 } else if (omx->storeMetaDataInBuffers(
8206 node, kPortIndexOutput, OMX_TRUE) == OK ||
8207 omx->prepareForAdaptivePlayback(
8208 node, kPortIndexOutput, OMX_TRUE,
8209 1280 /* width */, 720 /* height */) == OK) {
8210 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback);
8211 }
8212 }
8213
8214 if (isVideo && isEncoder) {
8215 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
8216 InitOMXParams(¶ms);
8217 params.nPortIndex = kPortIndexOutput;
8218 // TODO: should we verify if fallback is supported?
8219 if (omx->getConfig(
8220 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh,
8221 ¶ms, sizeof(params)) == OK) {
8222 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh);
8223 }
8224 }
8225
8226 *caps = builder;
8227 omx->freeNode(node);
8228 client.disconnect();
8229 return OK;
8230 }
8231
8232 // These are supposed be equivalent to the logic in
8233 // "audio_channel_out_mask_from_count".
8234 //static
getOMXChannelMapping(size_t numChannels,OMX_AUDIO_CHANNELTYPE map[])8235 status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) {
8236 switch (numChannels) {
8237 case 1:
8238 map[0] = OMX_AUDIO_ChannelCF;
8239 break;
8240 case 2:
8241 map[0] = OMX_AUDIO_ChannelLF;
8242 map[1] = OMX_AUDIO_ChannelRF;
8243 break;
8244 case 3:
8245 map[0] = OMX_AUDIO_ChannelLF;
8246 map[1] = OMX_AUDIO_ChannelRF;
8247 map[2] = OMX_AUDIO_ChannelCF;
8248 break;
8249 case 4:
8250 map[0] = OMX_AUDIO_ChannelLF;
8251 map[1] = OMX_AUDIO_ChannelRF;
8252 map[2] = OMX_AUDIO_ChannelLR;
8253 map[3] = OMX_AUDIO_ChannelRR;
8254 break;
8255 case 5:
8256 map[0] = OMX_AUDIO_ChannelLF;
8257 map[1] = OMX_AUDIO_ChannelRF;
8258 map[2] = OMX_AUDIO_ChannelCF;
8259 map[3] = OMX_AUDIO_ChannelLR;
8260 map[4] = OMX_AUDIO_ChannelRR;
8261 break;
8262 case 6:
8263 map[0] = OMX_AUDIO_ChannelLF;
8264 map[1] = OMX_AUDIO_ChannelRF;
8265 map[2] = OMX_AUDIO_ChannelCF;
8266 map[3] = OMX_AUDIO_ChannelLFE;
8267 map[4] = OMX_AUDIO_ChannelLR;
8268 map[5] = OMX_AUDIO_ChannelRR;
8269 break;
8270 case 7:
8271 map[0] = OMX_AUDIO_ChannelLF;
8272 map[1] = OMX_AUDIO_ChannelRF;
8273 map[2] = OMX_AUDIO_ChannelCF;
8274 map[3] = OMX_AUDIO_ChannelLFE;
8275 map[4] = OMX_AUDIO_ChannelLR;
8276 map[5] = OMX_AUDIO_ChannelRR;
8277 map[6] = OMX_AUDIO_ChannelCS;
8278 break;
8279 case 8:
8280 map[0] = OMX_AUDIO_ChannelLF;
8281 map[1] = OMX_AUDIO_ChannelRF;
8282 map[2] = OMX_AUDIO_ChannelCF;
8283 map[3] = OMX_AUDIO_ChannelLFE;
8284 map[4] = OMX_AUDIO_ChannelLR;
8285 map[5] = OMX_AUDIO_ChannelRR;
8286 map[6] = OMX_AUDIO_ChannelLS;
8287 map[7] = OMX_AUDIO_ChannelRS;
8288 break;
8289 default:
8290 return -EINVAL;
8291 }
8292
8293 return OK;
8294 }
8295
8296 } // namespace android
8297