1 /*
2 * Copyright 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "C2GoldfishAvcDec"
19 #include <inttypes.h>
20 #include <log/log.h>
21 #include <media/stagefright/foundation/AUtils.h>
22 #include <media/stagefright/foundation/MediaDefs.h>
23
24 #include <C2AllocatorGralloc.h>
25 #include <C2PlatformSupport.h>
26 //#include <android/hardware/graphics/common/1.0/types.h>
27
28 #include <android/hardware/graphics/allocator/3.0/IAllocator.h>
29 #include <android/hardware/graphics/mapper/3.0/IMapper.h>
30 #include <hidl/LegacySupport.h>
31
32 #include <media/stagefright/foundation/MediaDefs.h>
33
34 #include <C2Debug.h>
35 #include <C2PlatformSupport.h>
36 #include <Codec2Mapper.h>
37 #include <SimpleC2Interface.h>
38 #include <goldfish_codec2/store/GoldfishComponentStore.h>
39 #include <gralloc_cb_bp.h>
40
41 #include <color_buffer_utils.h>
42
43 #include "C2GoldfishAvcDec.h"
44
45 #define DEBUG 0
46 #if DEBUG
47 #define DDD(...) ALOGD(__VA_ARGS__)
48 #else
49 #define DDD(...) ((void)0)
50 #endif
51
52 using ::android::hardware::graphics::common::V1_0::BufferUsage;
53 using ::android::hardware::graphics::common::V1_2::PixelFormat;
54
55 namespace android {
56
57 namespace {
58 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
59 constexpr char COMPONENT_NAME[] = "c2.goldfish.h264.decoder";
60 constexpr uint32_t kDefaultOutputDelay = 8;
61 /* avc specification allows for a maximum delay of 16 frames.
62 As soft avc decoder supports interlaced, this delay would be 32 fields.
63 And avc decoder implementation has an additional delay of 2 decode calls.
64 So total maximum output delay is 34 */
65 constexpr uint32_t kMaxOutputDelay = 34;
66 constexpr uint32_t kMinInputBytes = 4;
67 } // namespace
68
69 class C2GoldfishAvcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
70 public:
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)71 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
72 : SimpleInterface<void>::BaseParams(
73 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
74 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AVC) {
75 noPrivateBuffers(); // TODO: account for our buffers here
76 noInputReferences();
77 noOutputReferences();
78 noInputLatency();
79 noTimeStretch();
80
81 // TODO: Proper support for reorder depth.
82 addParameter(
83 DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
84 .withDefault(
85 new C2PortActualDelayTuning::output(kDefaultOutputDelay))
86 .withFields({C2F(mActualOutputDelay, value)
87 .inRange(0, kMaxOutputDelay)})
88 .withSetter(
89 Setter<
90 decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
91 .build());
92
93 // TODO: output latency and reordering
94
95 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
96 .withConstValue(new C2ComponentAttributesSetting(
97 C2Component::ATTRIB_IS_TEMPORAL))
98 .build());
99
100 // coded and output picture size is the same for this codec
101 addParameter(
102 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
103 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
104 .withFields({
105 C2F(mSize, width).inRange(2, 4080, 2),
106 C2F(mSize, height).inRange(2, 4080, 2),
107 })
108 .withSetter(SizeSetter)
109 .build());
110
111 addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
112 .withDefault(new C2StreamMaxPictureSizeTuning::output(
113 0u, 320, 240))
114 .withFields({
115 C2F(mSize, width).inRange(2, 4080, 2),
116 C2F(mSize, height).inRange(2, 4080, 2),
117 })
118 .withSetter(MaxPictureSizeSetter, mSize)
119 .build());
120
121 addParameter(
122 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
123 .withDefault(new C2StreamProfileLevelInfo::input(
124 0u, C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
125 C2Config::LEVEL_AVC_5_2))
126 .withFields(
127 {C2F(mProfileLevel, profile)
128 .oneOf({C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
129 C2Config::PROFILE_AVC_BASELINE,
130 C2Config::PROFILE_AVC_MAIN,
131 C2Config::PROFILE_AVC_CONSTRAINED_HIGH,
132 C2Config::PROFILE_AVC_PROGRESSIVE_HIGH,
133 C2Config::PROFILE_AVC_HIGH}),
134 C2F(mProfileLevel, level)
135 .oneOf(
136 {C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B,
137 C2Config::LEVEL_AVC_1_1, C2Config::LEVEL_AVC_1_2,
138 C2Config::LEVEL_AVC_1_3, C2Config::LEVEL_AVC_2,
139 C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2,
140 C2Config::LEVEL_AVC_3, C2Config::LEVEL_AVC_3_1,
141 C2Config::LEVEL_AVC_3_2, C2Config::LEVEL_AVC_4,
142 C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2,
143 C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1,
144 C2Config::LEVEL_AVC_5_2})})
145 .withSetter(ProfileLevelSetter, mSize)
146 .build());
147
148 addParameter(
149 DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
150 .withDefault(new C2StreamMaxBufferSizeInfo::input(
151 0u, kMinInputBufferSize))
152 .withFields({
153 C2F(mMaxInputSize, value).any(),
154 })
155 .calculatedAs(MaxInputSizeSetter, mMaxSize)
156 .build());
157
158 C2ChromaOffsetStruct locations[1] = {
159 C2ChromaOffsetStruct::ITU_YUV_420_0()};
160 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
161 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
162 C2Color::YUV_420);
163 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
164
165 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
166 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
167 C2Color::YUV_420);
168 helper->addStructDescriptors<C2ChromaOffsetStruct>();
169
170 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
171 .withConstValue(defaultColorInfo)
172 .build());
173
174 addParameter(
175 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
176 .withDefault(new C2StreamColorAspectsTuning::output(
177 0u, C2Color::RANGE_UNSPECIFIED,
178 C2Color::PRIMARIES_UNSPECIFIED,
179 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
180 .withFields({C2F(mDefaultColorAspects, range)
181 .inRange(C2Color::RANGE_UNSPECIFIED,
182 C2Color::RANGE_OTHER),
183 C2F(mDefaultColorAspects, primaries)
184 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
185 C2Color::PRIMARIES_OTHER),
186 C2F(mDefaultColorAspects, transfer)
187 .inRange(C2Color::TRANSFER_UNSPECIFIED,
188 C2Color::TRANSFER_OTHER),
189 C2F(mDefaultColorAspects, matrix)
190 .inRange(C2Color::MATRIX_UNSPECIFIED,
191 C2Color::MATRIX_OTHER)})
192 .withSetter(DefaultColorAspectsSetter)
193 .build());
194
195 addParameter(
196 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
197 .withDefault(new C2StreamColorAspectsInfo::input(
198 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
199 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
200 .withFields({C2F(mCodedColorAspects, range)
201 .inRange(C2Color::RANGE_UNSPECIFIED,
202 C2Color::RANGE_OTHER),
203 C2F(mCodedColorAspects, primaries)
204 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
205 C2Color::PRIMARIES_OTHER),
206 C2F(mCodedColorAspects, transfer)
207 .inRange(C2Color::TRANSFER_UNSPECIFIED,
208 C2Color::TRANSFER_OTHER),
209 C2F(mCodedColorAspects, matrix)
210 .inRange(C2Color::MATRIX_UNSPECIFIED,
211 C2Color::MATRIX_OTHER)})
212 .withSetter(CodedColorAspectsSetter)
213 .build());
214
215 addParameter(
216 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
217 .withDefault(new C2StreamColorAspectsInfo::output(
218 0u, C2Color::RANGE_UNSPECIFIED,
219 C2Color::PRIMARIES_UNSPECIFIED,
220 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
221 .withFields({C2F(mColorAspects, range)
222 .inRange(C2Color::RANGE_UNSPECIFIED,
223 C2Color::RANGE_OTHER),
224 C2F(mColorAspects, primaries)
225 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
226 C2Color::PRIMARIES_OTHER),
227 C2F(mColorAspects, transfer)
228 .inRange(C2Color::TRANSFER_UNSPECIFIED,
229 C2Color::TRANSFER_OTHER),
230 C2F(mColorAspects, matrix)
231 .inRange(C2Color::MATRIX_UNSPECIFIED,
232 C2Color::MATRIX_OTHER)})
233 .withSetter(ColorAspectsSetter, mDefaultColorAspects,
234 mCodedColorAspects)
235 .build());
236
237 // TODO: support more formats?
238 addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
239 .withConstValue(new C2StreamPixelFormatInfo::output(
240 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
241 .build());
242 }
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::output> & oldMe,C2P<C2StreamPictureSizeInfo::output> & me)243 static C2R SizeSetter(bool mayBlock,
244 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
245 C2P<C2StreamPictureSizeInfo::output> &me) {
246 (void)mayBlock;
247 DDD("calling sizesetter now %d", oldMe.v.height);
248 DDD("new calling sizesetter now %d", me.v.height);
249
250 C2R res = C2R::Ok();
251 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
252 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
253 me.set().width = oldMe.v.width;
254 }
255 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
256 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
257 me.set().height = oldMe.v.height;
258 }
259 return res;
260 }
261
262 static C2R
MaxPictureSizeSetter(bool mayBlock,C2P<C2StreamMaxPictureSizeTuning::output> & me,const C2P<C2StreamPictureSizeInfo::output> & size)263 MaxPictureSizeSetter(bool mayBlock,
264 C2P<C2StreamMaxPictureSizeTuning::output> &me,
265 const C2P<C2StreamPictureSizeInfo::output> &size) {
266 (void)mayBlock;
267 // TODO: get max width/height from the size's field helpers vs.
268 // hardcoding
269 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4080u);
270 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4080u);
271 return C2R::Ok();
272 }
273
MaxInputSizeSetter(bool mayBlock,C2P<C2StreamMaxBufferSizeInfo::input> & me,const C2P<C2StreamMaxPictureSizeTuning::output> & maxSize)274 static C2R MaxInputSizeSetter(
275 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
276 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
277 (void)mayBlock;
278 // assume compression ratio of 2
279 me.set().value = c2_max((((maxSize.v.width + 15) / 16) *
280 ((maxSize.v.height + 15) / 16) * 192),
281 kMinInputBufferSize);
282 return C2R::Ok();
283 }
284
285 static C2R
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::input> & me,const C2P<C2StreamPictureSizeInfo::output> & size)286 ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
287 const C2P<C2StreamPictureSizeInfo::output> &size) {
288 (void)mayBlock;
289 (void)size;
290 (void)me; // TODO: validate
291 return C2R::Ok();
292 }
293
294 static C2R
DefaultColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsTuning::output> & me)295 DefaultColorAspectsSetter(bool mayBlock,
296 C2P<C2StreamColorAspectsTuning::output> &me) {
297 (void)mayBlock;
298 if (me.v.range > C2Color::RANGE_OTHER) {
299 me.set().range = C2Color::RANGE_OTHER;
300 }
301 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
302 me.set().primaries = C2Color::PRIMARIES_OTHER;
303 }
304 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
305 me.set().transfer = C2Color::TRANSFER_OTHER;
306 }
307 if (me.v.matrix > C2Color::MATRIX_OTHER) {
308 me.set().matrix = C2Color::MATRIX_OTHER;
309 }
310 return C2R::Ok();
311 }
312
313 static C2R
CodedColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::input> & me)314 CodedColorAspectsSetter(bool mayBlock,
315 C2P<C2StreamColorAspectsInfo::input> &me) {
316 (void)mayBlock;
317 if (me.v.range > C2Color::RANGE_OTHER) {
318 me.set().range = C2Color::RANGE_OTHER;
319 }
320 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
321 me.set().primaries = C2Color::PRIMARIES_OTHER;
322 }
323 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
324 me.set().transfer = C2Color::TRANSFER_OTHER;
325 }
326 if (me.v.matrix > C2Color::MATRIX_OTHER) {
327 me.set().matrix = C2Color::MATRIX_OTHER;
328 }
329 return C2R::Ok();
330 }
331
332 static C2R
ColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::output> & me,const C2P<C2StreamColorAspectsTuning::output> & def,const C2P<C2StreamColorAspectsInfo::input> & coded)333 ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
334 const C2P<C2StreamColorAspectsTuning::output> &def,
335 const C2P<C2StreamColorAspectsInfo::input> &coded) {
336 (void)mayBlock;
337 // take default values for all unspecified fields, and coded values for
338 // specified ones
339 me.set().range =
340 coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
341 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
342 ? def.v.primaries
343 : coded.v.primaries;
344 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
345 ? def.v.transfer
346 : coded.v.transfer;
347 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix
348 : coded.v.matrix;
349 return C2R::Ok();
350 }
351
getColorAspects_l()352 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
353 return mColorAspects;
354 }
355
width() const356 int width() const { return mSize->width; }
357
height() const358 int height() const { return mSize->height; }
359
360 private:
361 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
362 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
363 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
364 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
365 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
366 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
367 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
368 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
369 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
370 };
371
ivd_aligned_malloc(void * ctxt,uint32_t alignment,uint32_t size)372 static void *ivd_aligned_malloc(void *ctxt, uint32_t alignment, uint32_t size) {
373 (void)ctxt;
374 return memalign(alignment, size);
375 }
376
ivd_aligned_free(void * ctxt,void * mem)377 static void ivd_aligned_free(void *ctxt, void *mem) {
378 (void)ctxt;
379 free(mem);
380 }
381
C2GoldfishAvcDec(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)382 C2GoldfishAvcDec::C2GoldfishAvcDec(const char *name, c2_node_id_t id,
383 const std::shared_ptr<IntfImpl> &intfImpl)
384 : SimpleC2Component(
385 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
386 mIntf(intfImpl), mOutBufferFlush(nullptr), mWidth(1920), mHeight(1080),
387 mHeaderDecoded(false), mOutIndex(0u) {
388 mWidth = mIntf->width();
389 mHeight = mIntf->height();
390 DDD("creating avc decoder now w %d h %d", mWidth, mHeight);
391 }
392
~C2GoldfishAvcDec()393 C2GoldfishAvcDec::~C2GoldfishAvcDec() { onRelease(); }
394
onInit()395 c2_status_t C2GoldfishAvcDec::onInit() {
396 status_t err = initDecoder();
397 return err == OK ? C2_OK : C2_CORRUPTED;
398 }
399
onStop()400 c2_status_t C2GoldfishAvcDec::onStop() {
401 if (OK != resetDecoder())
402 return C2_CORRUPTED;
403 resetPlugin();
404 return C2_OK;
405 }
406
onReset()407 void C2GoldfishAvcDec::onReset() { (void)onStop(); }
408
onRelease()409 void C2GoldfishAvcDec::onRelease() {
410 deleteContext();
411 if (mOutBlock) {
412 mOutBlock.reset();
413 }
414 }
415
decodeHeaderAfterFlush()416 void C2GoldfishAvcDec::decodeHeaderAfterFlush() {
417 if (mContext && !mCsd0.empty() && !mCsd1.empty()) {
418 mContext->decodeFrame(&(mCsd0[0]), mCsd0.size(), 0);
419 mContext->decodeFrame(&(mCsd1[0]), mCsd1.size(), 0);
420 DDD("resending csd0 and csd1");
421 }
422 }
423
onFlush_sm()424 c2_status_t C2GoldfishAvcDec::onFlush_sm() {
425 if (OK != setFlushMode())
426 return C2_CORRUPTED;
427
428 if (!mContext) {
429 // just ignore if context is not even created
430 return C2_OK;
431 }
432
433 uint32_t bufferSize = mStride * mHeight * 3 / 2;
434 mOutBufferFlush = (uint8_t *)ivd_aligned_malloc(nullptr, 128, bufferSize);
435 if (!mOutBufferFlush) {
436 ALOGE("could not allocate tmp output buffer (for flush) of size %u ",
437 bufferSize);
438 return C2_NO_MEMORY;
439 }
440
441 while (true) {
442 mPts = 0;
443 setDecodeArgs(nullptr, nullptr, 0, 0, 0);
444 mImg = mContext->getImage();
445 if (mImg.data == nullptr) {
446 resetPlugin();
447 break;
448 }
449 }
450
451 if (mOutBufferFlush) {
452 ivd_aligned_free(nullptr, mOutBufferFlush);
453 mOutBufferFlush = nullptr;
454 }
455
456 deleteContext();
457 return C2_OK;
458 }
459
createDecoder()460 status_t C2GoldfishAvcDec::createDecoder() {
461
462 DDD("creating avc context now w %d h %d", mWidth, mHeight);
463 if (mEnableAndroidNativeBuffers) {
464 mContext.reset(new MediaH264Decoder(RenderMode::RENDER_BY_HOST_GPU));
465 } else {
466 mContext.reset(new MediaH264Decoder(RenderMode::RENDER_BY_GUEST_CPU));
467 }
468 mContext->initH264Context(mWidth, mHeight, mWidth, mHeight,
469 MediaH264Decoder::PixelFormat::YUV420P);
470 return OK;
471 }
472
setParams(size_t stride)473 status_t C2GoldfishAvcDec::setParams(size_t stride) {
474 (void)stride;
475 return OK;
476 }
477
initDecoder()478 status_t C2GoldfishAvcDec::initDecoder() {
479 // if (OK != createDecoder()) return UNKNOWN_ERROR;
480 mStride = ALIGN2(mWidth);
481 mSignalledError = false;
482 resetPlugin();
483
484 return OK;
485 }
486
setDecodeArgs(C2ReadView * inBuffer,C2GraphicView * outBuffer,size_t inOffset,size_t inSize,uint32_t tsMarker)487 bool C2GoldfishAvcDec::setDecodeArgs(C2ReadView *inBuffer,
488 C2GraphicView *outBuffer, size_t inOffset,
489 size_t inSize, uint32_t tsMarker) {
490 uint32_t displayStride = mStride;
491 (void)inBuffer;
492 (void)inOffset;
493 (void)inSize;
494 (void)tsMarker;
495 if (outBuffer) {
496 C2PlanarLayout layout;
497 layout = outBuffer->layout();
498 displayStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
499 }
500
501 if (inBuffer) {
502 //= tsMarker;
503 mInPBuffer = const_cast<uint8_t *>(inBuffer->data() + inOffset);
504 mInPBufferSize = inSize;
505 mInTsMarker = tsMarker;
506 insertPts(tsMarker, mPts);
507 }
508
509 // uint32_t displayHeight = mHeight;
510 // size_t lumaSize = displayStride * displayHeight;
511 // size_t chromaSize = lumaSize >> 2;
512
513 if (mStride != displayStride) {
514 mStride = displayStride;
515 if (OK != setParams(mStride))
516 return false;
517 }
518
519 return true;
520 }
521
setFlushMode()522 status_t C2GoldfishAvcDec::setFlushMode() {
523 if (mContext) {
524 mContext->flush();
525 }
526 mHeaderDecoded = false;
527 return OK;
528 }
529
resetDecoder()530 status_t C2GoldfishAvcDec::resetDecoder() {
531 mStride = 0;
532 mSignalledError = false;
533 mHeaderDecoded = false;
534 deleteContext();
535
536 return OK;
537 }
538
resetPlugin()539 void C2GoldfishAvcDec::resetPlugin() {
540 mSignalledOutputEos = false;
541 gettimeofday(&mTimeStart, nullptr);
542 gettimeofday(&mTimeEnd, nullptr);
543 }
544
deleteContext()545 void C2GoldfishAvcDec::deleteContext() {
546 if (mContext) {
547 mContext->destroyH264Context();
548 mContext.reset(nullptr);
549 mPts2Index.clear();
550 mOldPts2Index.clear();
551 mIndex2Pts.clear();
552 }
553 }
554
fillEmptyWork(const std::unique_ptr<C2Work> & work)555 static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
556 uint32_t flags = 0;
557 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
558 flags |= C2FrameData::FLAG_END_OF_STREAM;
559 DDD("signalling eos");
560 }
561 DDD("fill empty work");
562 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
563 work->worklets.front()->output.buffers.clear();
564 work->worklets.front()->output.ordinal = work->input.ordinal;
565 work->workletsProcessed = 1u;
566 }
567
finishWork(uint64_t index,const std::unique_ptr<C2Work> & work)568 void C2GoldfishAvcDec::finishWork(uint64_t index,
569 const std::unique_ptr<C2Work> &work) {
570 std::shared_ptr<C2Buffer> buffer =
571 createGraphicBuffer(std::move(mOutBlock), C2Rect(mWidth, mHeight));
572 mOutBlock = nullptr;
573 {
574 IntfImpl::Lock lock = mIntf->lock();
575 buffer->setInfo(mIntf->getColorAspects_l());
576 }
577
578 class FillWork {
579 public:
580 FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal,
581 const std::shared_ptr<C2Buffer> &buffer)
582 : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) {}
583 ~FillWork() = default;
584
585 void operator()(const std::unique_ptr<C2Work> &work) {
586 work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags;
587 work->worklets.front()->output.buffers.clear();
588 work->worklets.front()->output.ordinal = mOrdinal;
589 work->workletsProcessed = 1u;
590 work->result = C2_OK;
591 if (mBuffer) {
592 work->worklets.front()->output.buffers.push_back(mBuffer);
593 }
594 DDD("timestamp = %lld, index = %lld, w/%s buffer",
595 mOrdinal.timestamp.peekll(), mOrdinal.frameIndex.peekll(),
596 mBuffer ? "" : "o");
597 }
598
599 private:
600 const uint32_t mFlags;
601 const C2WorkOrdinalStruct mOrdinal;
602 const std::shared_ptr<C2Buffer> mBuffer;
603 };
604
605 auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
606 work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
607 work->worklets.front()->output.buffers.clear();
608 work->worklets.front()->output.buffers.push_back(buffer);
609 work->worklets.front()->output.ordinal = work->input.ordinal;
610 work->workletsProcessed = 1u;
611 };
612 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
613 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
614 // TODO: Check if cloneAndSend can be avoided by tracking number of
615 // frames remaining
616 if (eos) {
617 if (buffer) {
618 mOutIndex = index;
619 C2WorkOrdinalStruct outOrdinal = work->input.ordinal;
620 DDD("%s %d: cloneAndSend ", __func__, __LINE__);
621 cloneAndSend(
622 mOutIndex, work,
623 FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer));
624 buffer.reset();
625 }
626 } else {
627 DDD("%s %d: fill", __func__, __LINE__);
628 fillWork(work);
629 }
630 } else {
631 DDD("%s %d: finish", __func__, __LINE__);
632 finish(index, fillWork);
633 }
634 }
635
636 c2_status_t
ensureDecoderState(const std::shared_ptr<C2BlockPool> & pool)637 C2GoldfishAvcDec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
638 if (mOutBlock && (mOutBlock->width() != ALIGN2(mWidth) ||
639 mOutBlock->height() != mHeight)) {
640 mOutBlock.reset();
641 }
642 if (!mOutBlock) {
643 uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
644 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
645 C2MemoryUsage::CPU_WRITE};
646 usage.expected = (uint64_t)(BufferUsage::GPU_DATA_BUFFER);
647 // C2MemoryUsage usage = {(unsigned
648 // int)(BufferUsage::GPU_DATA_BUFFER)};// { C2MemoryUsage::CPU_READ,
649 // C2MemoryUsage::CPU_WRITE };
650 c2_status_t err = pool->fetchGraphicBlock(ALIGN2(mWidth), mHeight,
651 format, usage, &mOutBlock);
652 if (err != C2_OK) {
653 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
654 return err;
655 }
656 if (mEnableAndroidNativeBuffers) {
657 auto c2Handle = mOutBlock->handle();
658 native_handle_t *grallocHandle =
659 UnwrapNativeCodec2GrallocHandle(c2Handle);
660 mHostColorBufferId = getColorBufferHandle(grallocHandle);
661 DDD("found handle %d", mHostColorBufferId);
662 }
663 DDD("provided (%dx%d) required (%dx%d)", mOutBlock->width(),
664 mOutBlock->height(), ALIGN2(mWidth), mHeight);
665 }
666
667 return C2_OK;
668 }
669
checkMode(const std::shared_ptr<C2BlockPool> & pool)670 void C2GoldfishAvcDec::checkMode(const std::shared_ptr<C2BlockPool> &pool) {
671 mWidth = mIntf->width();
672 mHeight = mIntf->height();
673 {
674 // now get the block
675 constexpr uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
676 std::shared_ptr<C2GraphicBlock> block;
677 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
678 C2MemoryUsage::CPU_WRITE};
679 usage.expected = (uint64_t)(BufferUsage::GPU_DATA_BUFFER);
680
681 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight,
682 format, usage, &block);
683 if (err != C2_OK) {
684 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
685 return;
686 }
687 auto c2Handle = block->handle();
688 native_handle_t *grallocHandle =
689 UnwrapNativeCodec2GrallocHandle(c2Handle);
690 int hostColorBufferId = getColorBufferHandle(grallocHandle);
691 if (hostColorBufferId > 0) {
692 DDD("decoding to host color buffer");
693 mEnableAndroidNativeBuffers = true;
694 } else {
695 DDD("decoding to guest byte buffer");
696 mEnableAndroidNativeBuffers = false;
697 }
698 }
699 }
700
getVuiParams(h264_image_t & img)701 void C2GoldfishAvcDec::getVuiParams(h264_image_t &img) {
702
703 VuiColorAspects vuiColorAspects;
704 vuiColorAspects.primaries = img.color_primaries;
705 vuiColorAspects.transfer = img.color_trc;
706 vuiColorAspects.coeffs = img.colorspace;
707 vuiColorAspects.fullRange = img.color_range == 2 ? true : false;
708
709 // convert vui aspects to C2 values if changed
710 if (!(vuiColorAspects == mBitstreamColorAspects)) {
711 mBitstreamColorAspects = vuiColorAspects;
712 ColorAspects sfAspects;
713 C2StreamColorAspectsInfo::input codedAspects = {0u};
714 ColorUtils::convertIsoColorAspectsToCodecAspects(
715 vuiColorAspects.primaries, vuiColorAspects.transfer,
716 vuiColorAspects.coeffs, vuiColorAspects.fullRange, sfAspects);
717 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
718 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
719 }
720 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
721 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
722 }
723 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
724 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
725 }
726 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
727 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
728 }
729 std::vector<std::unique_ptr<C2SettingResult>> failures;
730 (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
731 }
732 }
733
copyImageData(h264_image_t & img)734 void C2GoldfishAvcDec::copyImageData(h264_image_t &img) {
735 getVuiParams(img);
736 if (mEnableAndroidNativeBuffers)
737 return;
738
739 auto writeView = mOutBlock->map().get();
740 if (writeView.error()) {
741 ALOGE("graphic view map failed %d", writeView.error());
742 return;
743 }
744 size_t dstYStride = writeView.layout().planes[C2PlanarLayout::PLANE_Y].rowInc;
745 size_t dstUVStride = writeView.layout().planes[C2PlanarLayout::PLANE_U].rowInc;
746
747 uint8_t *pYBuffer = const_cast<uint8_t *>(writeView.data()[C2PlanarLayout::PLANE_Y]);
748 uint8_t *pUBuffer = const_cast<uint8_t *>(writeView.data()[C2PlanarLayout::PLANE_U]);
749 uint8_t *pVBuffer = const_cast<uint8_t *>(writeView.data()[C2PlanarLayout::PLANE_V]);
750
751 for (int i = 0; i < mHeight; ++i) {
752 memcpy(pYBuffer + i * dstYStride, img.data + i * mWidth, mWidth);
753 }
754 for (int i = 0; i < mHeight / 2; ++i) {
755 memcpy(pUBuffer + i * dstUVStride,
756 img.data + mWidth * mHeight + i * mWidth / 2, mWidth / 2);
757 }
758 for (int i = 0; i < mHeight / 2; ++i) {
759 memcpy(pVBuffer + i * dstUVStride,
760 img.data + mWidth * mHeight * 5 / 4 + i * mWidth / 2,
761 mWidth / 2);
762 }
763 }
764
getWorkIndex(uint64_t pts)765 uint64_t C2GoldfishAvcDec::getWorkIndex(uint64_t pts) {
766 if (!mOldPts2Index.empty()) {
767 auto iter = mOldPts2Index.find(pts);
768 if (iter != mOldPts2Index.end()) {
769 auto index = iter->second;
770 DDD("found index %d for pts %" PRIu64, (int)index, pts);
771 return index;
772 }
773 }
774 auto iter = mPts2Index.find(pts);
775 if (iter != mPts2Index.end()) {
776 auto index = iter->second;
777 DDD("found index %d for pts %" PRIu64, (int)index, pts);
778 return index;
779 }
780 DDD("not found index for pts %" PRIu64, pts);
781 return 0;
782 }
783
insertPts(uint32_t work_index,uint64_t pts)784 void C2GoldfishAvcDec::insertPts(uint32_t work_index, uint64_t pts) {
785 auto iter = mPts2Index.find(pts);
786 if (iter != mPts2Index.end()) {
787 // we have a collision here:
788 // apparently, older session is not done yet,
789 // lets save them
790 DDD("inserted to old pts %" PRIu64 " with index %d", pts, (int)iter->second);
791 mOldPts2Index[iter->first] = iter->second;
792 }
793 DDD("inserted pts %" PRIu64 " with index %d", pts, (int)work_index);
794 mIndex2Pts[work_index] = pts;
795 mPts2Index[pts] = work_index;
796 }
797
removePts(uint64_t pts)798 void C2GoldfishAvcDec::removePts(uint64_t pts) {
799 bool found = false;
800 uint64_t index = 0;
801 // note: check old pts first to see
802 // if we have some left over, check them
803 if (!mOldPts2Index.empty()) {
804 auto iter = mOldPts2Index.find(pts);
805 if (iter != mOldPts2Index.end()) {
806 mOldPts2Index.erase(iter);
807 index = iter->second;
808 found = true;
809 }
810 } else {
811 auto iter = mPts2Index.find(pts);
812 if (iter != mPts2Index.end()) {
813 mPts2Index.erase(iter);
814 index = iter->second;
815 found = true;
816 }
817 }
818
819 if (!found) return;
820
821 auto iter2 = mIndex2Pts.find(index);
822 if (iter2 == mIndex2Pts.end()) return;
823 mIndex2Pts.erase(iter2);
824 }
825
826 // TODO: can overall error checking be improved?
827 // TODO: allow configuration of color format and usage for graphic buffers
828 // instead
829 // of hard coding them to HAL_PIXEL_FORMAT_YV12
830 // TODO: pass coloraspects information to surface
831 // TODO: test support for dynamic change in resolution
832 // TODO: verify if the decoder sent back all frames
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)833 void C2GoldfishAvcDec::process(const std::unique_ptr<C2Work> &work,
834 const std::shared_ptr<C2BlockPool> &pool) {
835 // Initialize output work
836 work->result = C2_OK;
837 work->workletsProcessed = 0u;
838 work->worklets.front()->output.flags = work->input.flags;
839 if (mSignalledError || mSignalledOutputEos) {
840 work->result = C2_BAD_VALUE;
841 return;
842 }
843
844 DDD("process work");
845 if (!mContext) {
846 DDD("creating decoder context to host in process work");
847 checkMode(pool);
848 createDecoder();
849 decodeHeaderAfterFlush();
850 }
851
852 size_t inOffset = 0u;
853 size_t inSize = 0u;
854 uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF;
855 mPts = work->input.ordinal.timestamp.peeku();
856 C2ReadView rView = mDummyReadView;
857 if (!work->input.buffers.empty()) {
858 rView =
859 work->input.buffers[0]->data().linearBlocks().front().map().get();
860 inSize = rView.capacity();
861 if (inSize && rView.error()) {
862 ALOGE("read view map failed %d", rView.error());
863 work->result = rView.error();
864 return;
865 }
866 }
867 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
868 bool hasPicture = (inSize > 0);
869
870 DDD("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
871 (int)work->input.ordinal.timestamp.peeku(),
872 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
873 size_t inPos = 0;
874 while (inPos < inSize && inSize - inPos >= kMinInputBytes) {
875 if (C2_OK != ensureDecoderState(pool)) {
876 mSignalledError = true;
877 work->workletsProcessed = 1u;
878 work->result = C2_CORRUPTED;
879 return;
880 }
881
882 {
883 // C2GraphicView wView;// = mOutBlock->map().get();
884 // if (wView.error()) {
885 // ALOGE("graphic view map failed %d", wView.error());
886 // work->result = wView.error();
887 // return;
888 //}
889 if (!setDecodeArgs(&rView, nullptr, inOffset + inPos,
890 inSize - inPos, workIndex)) {
891 mSignalledError = true;
892 work->workletsProcessed = 1u;
893 work->result = C2_CORRUPTED;
894 return;
895 }
896
897 if (false == mHeaderDecoded) {
898 /* Decode header and get dimensions */
899 setParams(mStride);
900 }
901
902 DDD("flag is %x", work->input.flags);
903 if (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) {
904 hasPicture = false;
905 if (mCsd0.empty()) {
906 mCsd0.assign(mInPBuffer, mInPBuffer + mInPBufferSize);
907 DDD("assign to csd0 with %d bytpes", mInPBufferSize);
908 } else if (mCsd1.empty()) {
909 mCsd1.assign(mInPBuffer, mInPBuffer + mInPBufferSize);
910 DDD("assign to csd1 with %d bytpes", mInPBufferSize);
911 }
912 // this is not really a valid pts from config
913 removePts(mPts);
914 }
915
916 uint32_t delay;
917 GETTIME(&mTimeStart, nullptr);
918 TIME_DIFF(mTimeEnd, mTimeStart, delay);
919 //(void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
920 DDD("decoding");
921 h264_result_t h264Res =
922 mContext->decodeFrame(mInPBuffer, mInPBufferSize, mIndex2Pts[mInTsMarker]);
923 mConsumedBytes = h264Res.bytesProcessed;
924 DDD("decoding consumed %d", (int)mConsumedBytes);
925
926 if (mHostColorBufferId > 0) {
927 mImg = mContext->renderOnHostAndReturnImageMetadata(
928 mHostColorBufferId);
929 } else {
930 mImg = mContext->getImage();
931 }
932 uint32_t decodeTime;
933 GETTIME(&mTimeEnd, nullptr);
934 TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
935 }
936 // TODO: handle res change
937 if (0) {
938 DDD("resolution changed");
939 drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
940 resetDecoder();
941 resetPlugin();
942 work->workletsProcessed = 0u;
943
944 /* Decode header and get new dimensions */
945 setParams(mStride);
946 // (void) ivdec_api_function(mDecHandle, &s_decode_ip,
947 // &s_decode_op);
948 }
949 if (mImg.data != nullptr) {
950 // check for new width and height
951 auto decodedW = mImg.width;
952 auto decodedH = mImg.height;
953 if (decodedW != mWidth || decodedH != mHeight) {
954 mWidth = decodedW;
955 mHeight = decodedH;
956
957 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
958 std::vector<std::unique_ptr<C2SettingResult>> failures;
959 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
960 if (err == OK) {
961 work->worklets.front()->output.configUpdate.push_back(
962 C2Param::Copy(size));
963 ensureDecoderState(pool);
964 } else {
965 ALOGE("Cannot set width and height");
966 mSignalledError = true;
967 work->workletsProcessed = 1u;
968 work->result = C2_CORRUPTED;
969 return;
970 }
971 }
972
973 DDD("got data %" PRIu64 " with pts %" PRIu64, getWorkIndex(mImg.pts), mImg.pts);
974 mHeaderDecoded = true;
975 copyImageData(mImg);
976 finishWork(getWorkIndex(mImg.pts), work);
977 removePts(mImg.pts);
978 } else {
979 work->workletsProcessed = 0u;
980 }
981
982 inPos += mConsumedBytes;
983 }
984 if (eos) {
985 DDD("drain because of eos");
986 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
987 mSignalledOutputEos = true;
988 } else if (!hasPicture) {
989 DDD("no picture, fill empty work");
990 fillEmptyWork(work);
991 }
992
993 work->input.buffers.clear();
994 }
995
996 c2_status_t
drainInternal(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool,const std::unique_ptr<C2Work> & work)997 C2GoldfishAvcDec::drainInternal(uint32_t drainMode,
998 const std::shared_ptr<C2BlockPool> &pool,
999 const std::unique_ptr<C2Work> &work) {
1000 if (drainMode == NO_DRAIN) {
1001 ALOGW("drain with NO_DRAIN: no-op");
1002 return C2_OK;
1003 }
1004 if (drainMode == DRAIN_CHAIN) {
1005 ALOGW("DRAIN_CHAIN not supported");
1006 return C2_OMITTED;
1007 }
1008
1009 if (OK != setFlushMode())
1010 return C2_CORRUPTED;
1011 while (true) {
1012 if (C2_OK != ensureDecoderState(pool)) {
1013 mSignalledError = true;
1014 work->workletsProcessed = 1u;
1015 work->result = C2_CORRUPTED;
1016 return C2_CORRUPTED;
1017 }
1018 /*
1019 C2GraphicView wView = mOutBlock->map().get();
1020 if (wView.error()) {
1021 ALOGE("graphic view map failed %d", wView.error());
1022 return C2_CORRUPTED;
1023 }
1024 if (!setDecodeArgs(nullptr, &wView, 0, 0, 0)) {
1025 mSignalledError = true;
1026 work->workletsProcessed = 1u;
1027 return C2_CORRUPTED;
1028 }
1029 */
1030
1031 if (mHostColorBufferId > 0) {
1032 mImg = mContext->renderOnHostAndReturnImageMetadata(
1033 mHostColorBufferId);
1034 } else {
1035 mImg = mContext->getImage();
1036 }
1037
1038 // TODO: maybe keep rendering to screen
1039 // mImg = mContext->getImage();
1040 if (mImg.data != nullptr) {
1041 DDD("got data in drain mode %" PRIu64 " with pts %" PRIu64, getWorkIndex(mImg.pts), mImg.pts);
1042 copyImageData(mImg);
1043 finishWork(getWorkIndex(mImg.pts), work);
1044 removePts(mImg.pts);
1045 } else {
1046 fillEmptyWork(work);
1047 break;
1048 }
1049 }
1050
1051 return C2_OK;
1052 }
1053
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)1054 c2_status_t C2GoldfishAvcDec::drain(uint32_t drainMode,
1055 const std::shared_ptr<C2BlockPool> &pool) {
1056 DDD("drainInternal because of drain");
1057 return drainInternal(drainMode, pool, nullptr);
1058 }
1059
1060 class C2GoldfishAvcDecFactory : public C2ComponentFactory {
1061 public:
C2GoldfishAvcDecFactory()1062 C2GoldfishAvcDecFactory()
1063 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1064 GoldfishComponentStore::Create()->getParamReflector())) {}
1065
1066 virtual c2_status_t
createComponent(c2_node_id_t id,std::shared_ptr<C2Component> * const component,std::function<void (C2Component *)> deleter)1067 createComponent(c2_node_id_t id,
1068 std::shared_ptr<C2Component> *const component,
1069 std::function<void(C2Component *)> deleter) override {
1070 *component = std::shared_ptr<C2Component>(
1071 new C2GoldfishAvcDec(
1072 COMPONENT_NAME, id,
1073 std::make_shared<C2GoldfishAvcDec::IntfImpl>(mHelper)),
1074 deleter);
1075 return C2_OK;
1076 }
1077
createInterface(c2_node_id_t id,std::shared_ptr<C2ComponentInterface> * const interface,std::function<void (C2ComponentInterface *)> deleter)1078 virtual c2_status_t createInterface(
1079 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1080 std::function<void(C2ComponentInterface *)> deleter) override {
1081 *interface = std::shared_ptr<C2ComponentInterface>(
1082 new SimpleInterface<C2GoldfishAvcDec::IntfImpl>(
1083 COMPONENT_NAME, id,
1084 std::make_shared<C2GoldfishAvcDec::IntfImpl>(mHelper)),
1085 deleter);
1086 return C2_OK;
1087 }
1088
1089 virtual ~C2GoldfishAvcDecFactory() override = default;
1090
1091 private:
1092 std::shared_ptr<C2ReflectorHelper> mHelper;
1093 };
1094
1095 } // namespace android
1096
CreateCodec2Factory()1097 extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1098 DDD("in %s", __func__);
1099 return new ::android::C2GoldfishAvcDecFactory();
1100 }
1101
DestroyCodec2Factory(::C2ComponentFactory * factory)1102 extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1103 DDD("in %s", __func__);
1104 delete factory;
1105 }
1106