1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "C2SoftGav1Dec"
19 #include "C2SoftGav1Dec.h"
20
21 #include <android-base/properties.h>
22 #include <C2Debug.h>
23 #include <C2PlatformSupport.h>
24 #include <Codec2BufferUtils.h>
25 #include <Codec2CommonUtils.h>
26 #include <Codec2Mapper.h>
27 #include <SimpleC2Interface.h>
28 #include <libyuv.h>
29 #include <log/log.h>
30 #include <media/stagefright/foundation/AUtils.h>
31 #include <media/stagefright/foundation/MediaDefs.h>
32
33 // libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
34 #if LIBYUV_VERSION >= 1780
35 #include <algorithm>
36 #define HAVE_LIBYUV_I410_I210_TO_AB30 1
37 #else
38 #define HAVE_LIBYUV_I410_I210_TO_AB30 0
39 #endif
40
41 namespace android {
42
43 // Property used to control the number of threads used in the gav1 decoder.
44 constexpr char kNumThreadsProperty[] = "debug.c2.gav1.numthreads";
45
46 // codecname set and passed in as a compile flag from Android.bp
47 constexpr char COMPONENT_NAME[] = CODECNAME;
48
49 constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
50
51 class C2SoftGav1Dec::IntfImpl : public SimpleInterface<void>::BaseParams {
52 public:
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)53 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
54 : SimpleInterface<void>::BaseParams(
55 helper, COMPONENT_NAME, C2Component::KIND_DECODER,
56 C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
57 noPrivateBuffers(); // TODO: account for our buffers here.
58 noInputReferences();
59 noOutputReferences();
60 noInputLatency();
61 noTimeStretch();
62
63 addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
64 .withConstValue(new C2ComponentAttributesSetting(
65 C2Component::ATTRIB_IS_TEMPORAL))
66 .build());
67
68 addParameter(
69 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
70 .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
71 .withFields({
72 C2F(mSize, width).inRange(2, 4096),
73 C2F(mSize, height).inRange(2, 4096),
74 })
75 .withSetter(SizeSetter)
76 .build());
77
78 addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
79 .withDefault(new C2StreamProfileLevelInfo::input(
80 0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
81 .withFields({C2F(mProfileLevel, profile)
82 .oneOf({C2Config::PROFILE_AV1_0,
83 C2Config::PROFILE_AV1_1}),
84 C2F(mProfileLevel, level)
85 .oneOf({
86 C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
87 C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
88 C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
89 C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
90 C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
91 C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
92 C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
93 C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
94 })})
95 .withSetter(ProfileLevelSetter, mSize)
96 .build());
97
98 mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
99 addParameter(
100 DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
101 .withDefault(mHdr10PlusInfoInput)
102 .withFields({
103 C2F(mHdr10PlusInfoInput, m.value).any(),
104 })
105 .withSetter(Hdr10PlusInfoInputSetter)
106 .build());
107
108 mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
109 addParameter(
110 DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
111 .withDefault(mHdr10PlusInfoOutput)
112 .withFields({
113 C2F(mHdr10PlusInfoOutput, m.value).any(),
114 })
115 .withSetter(Hdr10PlusInfoOutputSetter)
116 .build());
117
118 // default static info
119 C2HdrStaticMetadataStruct defaultStaticInfo{};
120 helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
121 addParameter(
122 DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
123 .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
124 .withFields({
125 C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
126 C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
127 C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
128 C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
129 C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
130 C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
131 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
132 C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
133 C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
134 C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
135 C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
136 C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)
137 })
138 .withSetter(HdrStaticInfoSetter)
139 .build());
140
141 addParameter(
142 DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
143 .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
144 .withFields({
145 C2F(mSize, width).inRange(2, 2048, 2),
146 C2F(mSize, height).inRange(2, 2048, 2),
147 })
148 .withSetter(MaxPictureSizeSetter, mSize)
149 .build());
150
151 addParameter(DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
152 .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
153 .withFields({
154 C2F(mMaxInputSize, value).any(),
155 })
156 .calculatedAs(MaxInputSizeSetter, mMaxSize)
157 .build());
158
159 C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
160 std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
161 C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */,
162 C2Color::YUV_420);
163 memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
164
165 defaultColorInfo = C2StreamColorInfo::output::AllocShared(
166 {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */,
167 C2Color::YUV_420);
168 helper->addStructDescriptors<C2ChromaOffsetStruct>();
169
170 addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
171 .withConstValue(defaultColorInfo)
172 .build());
173
174 addParameter(
175 DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
176 .withDefault(new C2StreamColorAspectsTuning::output(
177 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
178 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
179 .withFields(
180 {C2F(mDefaultColorAspects, range)
181 .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
182 C2F(mDefaultColorAspects, primaries)
183 .inRange(C2Color::PRIMARIES_UNSPECIFIED,
184 C2Color::PRIMARIES_OTHER),
185 C2F(mDefaultColorAspects, transfer)
186 .inRange(C2Color::TRANSFER_UNSPECIFIED,
187 C2Color::TRANSFER_OTHER),
188 C2F(mDefaultColorAspects, matrix)
189 .inRange(C2Color::MATRIX_UNSPECIFIED,
190 C2Color::MATRIX_OTHER)})
191 .withSetter(DefaultColorAspectsSetter)
192 .build());
193
194 addParameter(
195 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
196 .withDefault(new C2StreamColorAspectsInfo::input(
197 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
198 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
199 .withFields({
200 C2F(mCodedColorAspects, range).inRange(
201 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
202 C2F(mCodedColorAspects, primaries).inRange(
203 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
204 C2F(mCodedColorAspects, transfer).inRange(
205 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
206 C2F(mCodedColorAspects, matrix).inRange(
207 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
208 })
209 .withSetter(CodedColorAspectsSetter)
210 .build());
211
212 addParameter(
213 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
214 .withDefault(new C2StreamColorAspectsInfo::output(
215 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
216 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
217 .withFields({
218 C2F(mColorAspects, range).inRange(
219 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
220 C2F(mColorAspects, primaries).inRange(
221 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
222 C2F(mColorAspects, transfer).inRange(
223 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
224 C2F(mColorAspects, matrix).inRange(
225 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
226 })
227 .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
228 .build());
229
230 std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
231 if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
232 pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
233 }
234 // If color format surface isn't added to supported formats, there is no way to know
235 // when the color-format is configured to surface. This is necessary to be able to
236 // choose 10-bit format while decoding 10-bit clips in surface mode.
237 pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
238
239 // TODO: support more formats?
240 addParameter(
241 DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
242 .withDefault(new C2StreamPixelFormatInfo::output(
243 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
244 .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
245 .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
246 .build());
247 }
248
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::output> & oldMe,C2P<C2StreamPictureSizeInfo::output> & me)249 static C2R SizeSetter(bool mayBlock,
250 const C2P<C2StreamPictureSizeInfo::output> &oldMe,
251 C2P<C2StreamPictureSizeInfo::output> &me) {
252 (void)mayBlock;
253 C2R res = C2R::Ok();
254 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
255 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
256 me.set().width = oldMe.v.width;
257 }
258 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
259 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
260 me.set().height = oldMe.v.height;
261 }
262 return res;
263 }
264
MaxPictureSizeSetter(bool mayBlock,C2P<C2StreamMaxPictureSizeTuning::output> & me,const C2P<C2StreamPictureSizeInfo::output> & size)265 static C2R MaxPictureSizeSetter(
266 bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
267 const C2P<C2StreamPictureSizeInfo::output> &size) {
268 (void)mayBlock;
269 // TODO: get max width/height from the size's field helpers vs.
270 // hardcoding
271 me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
272 me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
273 return C2R::Ok();
274 }
275
MaxInputSizeSetter(bool mayBlock,C2P<C2StreamMaxBufferSizeInfo::input> & me,const C2P<C2StreamMaxPictureSizeTuning::output> & maxSize)276 static C2R MaxInputSizeSetter(
277 bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
278 const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
279 (void)mayBlock;
280 // assume compression ratio of 2, but enforce a floor
281 me.set().value = c2_max((((maxSize.v.width + 63) / 64)
282 * ((maxSize.v.height + 63) / 64) * 3072), kMinInputBufferSize);
283 return C2R::Ok();
284 }
285
DefaultColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsTuning::output> & me)286 static C2R DefaultColorAspectsSetter(
287 bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
288 (void)mayBlock;
289 if (me.v.range > C2Color::RANGE_OTHER) {
290 me.set().range = C2Color::RANGE_OTHER;
291 }
292 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
293 me.set().primaries = C2Color::PRIMARIES_OTHER;
294 }
295 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
296 me.set().transfer = C2Color::TRANSFER_OTHER;
297 }
298 if (me.v.matrix > C2Color::MATRIX_OTHER) {
299 me.set().matrix = C2Color::MATRIX_OTHER;
300 }
301 return C2R::Ok();
302 }
303
CodedColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::input> & me)304 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
305 (void)mayBlock;
306 if (me.v.range > C2Color::RANGE_OTHER) {
307 me.set().range = C2Color::RANGE_OTHER;
308 }
309 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
310 me.set().primaries = C2Color::PRIMARIES_OTHER;
311 }
312 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
313 me.set().transfer = C2Color::TRANSFER_OTHER;
314 }
315 if (me.v.matrix > C2Color::MATRIX_OTHER) {
316 me.set().matrix = C2Color::MATRIX_OTHER;
317 }
318 return C2R::Ok();
319 }
320
ColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::output> & me,const C2P<C2StreamColorAspectsTuning::output> & def,const C2P<C2StreamColorAspectsInfo::input> & coded)321 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
322 const C2P<C2StreamColorAspectsTuning::output> &def,
323 const C2P<C2StreamColorAspectsInfo::input> &coded) {
324 (void)mayBlock;
325 // take default values for all unspecified fields, and coded values for specified ones
326 me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
327 me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
328 ? def.v.primaries : coded.v.primaries;
329 me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
330 ? def.v.transfer : coded.v.transfer;
331 me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
332 return C2R::Ok();
333 }
334
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::input> & me,const C2P<C2StreamPictureSizeInfo::output> & size)335 static C2R ProfileLevelSetter(
336 bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
337 const C2P<C2StreamPictureSizeInfo::output> &size) {
338 (void)mayBlock;
339 (void)size;
340 (void)me; // TODO: validate
341 return C2R::Ok();
342 }
343
344 std::shared_ptr<C2StreamColorAspectsTuning::output>
getDefaultColorAspects_l()345 getDefaultColorAspects_l() {
346 return mDefaultColorAspects;
347 }
348
getColorAspects_l()349 std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
350 return mColorAspects;
351 }
352
Hdr10PlusInfoInputSetter(bool mayBlock,C2P<C2StreamHdr10PlusInfo::input> & me)353 static C2R Hdr10PlusInfoInputSetter(bool mayBlock,
354 C2P<C2StreamHdr10PlusInfo::input> &me) {
355 (void)mayBlock;
356 (void)me; // TODO: validate
357 return C2R::Ok();
358 }
359
Hdr10PlusInfoOutputSetter(bool mayBlock,C2P<C2StreamHdr10PlusInfo::output> & me)360 static C2R Hdr10PlusInfoOutputSetter(bool mayBlock,
361 C2P<C2StreamHdr10PlusInfo::output> &me) {
362 (void)mayBlock;
363 (void)me; // TODO: validate
364 return C2R::Ok();
365 }
366
367 // unsafe getters
getPixelFormat_l() const368 std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const { return mPixelFormat; }
369
HdrStaticInfoSetter(bool mayBlock,C2P<C2StreamHdrStaticInfo::output> & me)370 static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output> &me) {
371 (void)mayBlock;
372 if (me.v.mastering.red.x > 1) {
373 me.set().mastering.red.x = 1;
374 }
375 if (me.v.mastering.red.y > 1) {
376 me.set().mastering.red.y = 1;
377 }
378 if (me.v.mastering.green.x > 1) {
379 me.set().mastering.green.x = 1;
380 }
381 if (me.v.mastering.green.y > 1) {
382 me.set().mastering.green.y = 1;
383 }
384 if (me.v.mastering.blue.x > 1) {
385 me.set().mastering.blue.x = 1;
386 }
387 if (me.v.mastering.blue.y > 1) {
388 me.set().mastering.blue.y = 1;
389 }
390 if (me.v.mastering.white.x > 1) {
391 me.set().mastering.white.x = 1;
392 }
393 if (me.v.mastering.white.y > 1) {
394 me.set().mastering.white.y = 1;
395 }
396 if (me.v.mastering.maxLuminance > 65535.0) {
397 me.set().mastering.maxLuminance = 65535.0;
398 }
399 if (me.v.mastering.minLuminance > 6.5535) {
400 me.set().mastering.minLuminance = 6.5535;
401 }
402 if (me.v.maxCll > 65535.0) {
403 me.set().maxCll = 65535.0;
404 }
405 if (me.v.maxFall > 65535.0) {
406 me.set().maxFall = 65535.0;
407 }
408 return C2R::Ok();
409 }
410
411 private:
412 std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
413 std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
414 std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
415 std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
416 std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
417 std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
418 std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
419 std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
420 std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
421 std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
422 std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
423 std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
424 };
425
C2SoftGav1Dec(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)426 C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
427 const std::shared_ptr<IntfImpl> &intfImpl)
428 : SimpleC2Component(
429 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
430 mIntf(intfImpl),
431 mCodecCtx(nullptr) {
432 mTimeStart = mTimeEnd = systemTime();
433 }
434
C2SoftGav1Dec(const char * name,c2_node_id_t id,const std::shared_ptr<C2ReflectorHelper> & helper)435 C2SoftGav1Dec::C2SoftGav1Dec(const char *name, c2_node_id_t id,
436 const std::shared_ptr<C2ReflectorHelper> &helper)
437 : C2SoftGav1Dec(name, id, std::make_shared<IntfImpl>(helper)) {
438 }
439
~C2SoftGav1Dec()440 C2SoftGav1Dec::~C2SoftGav1Dec() { onRelease(); }
441
onInit()442 c2_status_t C2SoftGav1Dec::onInit() {
443 return initDecoder() ? C2_OK : C2_CORRUPTED;
444 }
445
onStop()446 c2_status_t C2SoftGav1Dec::onStop() {
447 mSignalledError = false;
448 mSignalledOutputEos = false;
449 return C2_OK;
450 }
451
onReset()452 void C2SoftGav1Dec::onReset() {
453 (void)onStop();
454 c2_status_t err = onFlush_sm();
455 if (err != C2_OK) {
456 ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
457 destroyDecoder();
458 if (!initDecoder()) {
459 ALOGE("Hard reset failed.");
460 }
461 }
462 }
463
onRelease()464 void C2SoftGav1Dec::onRelease() { destroyDecoder(); }
465
onFlush_sm()466 c2_status_t C2SoftGav1Dec::onFlush_sm() {
467 Libgav1StatusCode status = mCodecCtx->SignalEOS();
468 if (status != kLibgav1StatusOk) {
469 ALOGE("Failed to flush av1 decoder. status: %d.", status);
470 return C2_CORRUPTED;
471 }
472
473 // Dequeue frame (if any) that was enqueued previously.
474 const libgav1::DecoderBuffer *buffer;
475 status = mCodecCtx->DequeueFrame(&buffer);
476 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
477 ALOGE("Failed to dequeue frame after flushing the av1 decoder. status: %d",
478 status);
479 return C2_CORRUPTED;
480 }
481
482 mSignalledError = false;
483 mSignalledOutputEos = false;
484
485 return C2_OK;
486 }
487
GetCPUCoreCount()488 static int GetCPUCoreCount() {
489 int cpuCoreCount = 1;
490 #if defined(_SC_NPROCESSORS_ONLN)
491 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
492 #else
493 // _SC_NPROC_ONLN must be defined...
494 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
495 #endif
496 CHECK(cpuCoreCount >= 1);
497 ALOGV("Number of CPU cores: %d", cpuCoreCount);
498 return cpuCoreCount;
499 }
500
initDecoder()501 bool C2SoftGav1Dec::initDecoder() {
502 mSignalledError = false;
503 mSignalledOutputEos = false;
504 mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
505 {
506 IntfImpl::Lock lock = mIntf->lock();
507 mPixelFormatInfo = mIntf->getPixelFormat_l();
508 }
509 mCodecCtx.reset(new libgav1::Decoder());
510
511 if (mCodecCtx == nullptr) {
512 ALOGE("mCodecCtx is null");
513 return false;
514 }
515
516 libgav1::DecoderSettings settings = {};
517 settings.threads = GetCPUCoreCount();
518 int32_t numThreads = android::base::GetIntProperty(kNumThreadsProperty, 0);
519 if (numThreads > 0 && numThreads < settings.threads) {
520 settings.threads = numThreads;
521 }
522
523 ALOGV("Using libgav1 AV1 software decoder.");
524 Libgav1StatusCode status = mCodecCtx->Init(&settings);
525 if (status != kLibgav1StatusOk) {
526 ALOGE("av1 decoder failed to initialize. status: %d.", status);
527 return false;
528 }
529
530 return true;
531 }
532
destroyDecoder()533 void C2SoftGav1Dec::destroyDecoder() { mCodecCtx = nullptr; }
534
fillEmptyWork(const std::unique_ptr<C2Work> & work)535 void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
536 uint32_t flags = 0;
537 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
538 flags |= C2FrameData::FLAG_END_OF_STREAM;
539 ALOGV("signalling eos");
540 }
541 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
542 work->worklets.front()->output.buffers.clear();
543 work->worklets.front()->output.ordinal = work->input.ordinal;
544 work->workletsProcessed = 1u;
545 }
546
finishWork(uint64_t index,const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2GraphicBlock> & block)547 void C2SoftGav1Dec::finishWork(uint64_t index,
548 const std::unique_ptr<C2Work> &work,
549 const std::shared_ptr<C2GraphicBlock> &block) {
550 std::shared_ptr<C2Buffer> buffer =
551 createGraphicBuffer(block, C2Rect(mWidth, mHeight));
552 {
553 IntfImpl::Lock lock = mIntf->lock();
554 buffer->setInfo(mIntf->getColorAspects_l());
555 }
556 auto fillWork = [buffer, index](const std::unique_ptr<C2Work> &work) {
557 uint32_t flags = 0;
558 if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
559 (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
560 flags |= C2FrameData::FLAG_END_OF_STREAM;
561 ALOGV("signalling eos");
562 }
563 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
564 work->worklets.front()->output.buffers.clear();
565 work->worklets.front()->output.buffers.push_back(buffer);
566 work->worklets.front()->output.ordinal = work->input.ordinal;
567 work->workletsProcessed = 1u;
568 };
569 if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
570 fillWork(work);
571 } else {
572 finish(index, fillWork);
573 }
574 }
575
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)576 void C2SoftGav1Dec::process(const std::unique_ptr<C2Work> &work,
577 const std::shared_ptr<C2BlockPool> &pool) {
578 work->result = C2_OK;
579 work->workletsProcessed = 0u;
580 work->worklets.front()->output.configUpdate.clear();
581 work->worklets.front()->output.flags = work->input.flags;
582 if (mSignalledError || mSignalledOutputEos) {
583 work->result = C2_BAD_VALUE;
584 return;
585 }
586
587 size_t inOffset = 0u;
588 size_t inSize = 0u;
589 C2ReadView rView = mDummyReadView;
590 if (!work->input.buffers.empty()) {
591 rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
592 inSize = rView.capacity();
593 if (inSize && rView.error()) {
594 ALOGE("read view map failed %d", rView.error());
595 work->result = C2_CORRUPTED;
596 return;
597 }
598 }
599
600 bool codecConfig =
601 ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
602 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
603
604 ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", inSize,
605 (int)work->input.ordinal.timestamp.peeku(),
606 (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
607
608 if (codecConfig) {
609 fillEmptyWork(work);
610 return;
611 }
612
613 int64_t frameIndex = work->input.ordinal.frameIndex.peekll();
614 if (inSize) {
615 uint8_t *bitstream = const_cast<uint8_t *>(rView.data() + inOffset);
616
617 mTimeStart = systemTime();
618 nsecs_t delay = mTimeStart - mTimeEnd;
619
620 const Libgav1StatusCode status =
621 mCodecCtx->EnqueueFrame(bitstream, inSize, frameIndex,
622 /*buffer_private_data=*/nullptr);
623
624 mTimeEnd = systemTime();
625 nsecs_t decodeTime = mTimeEnd - mTimeStart;
626 ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
627
628 if (status != kLibgav1StatusOk) {
629 ALOGE("av1 decoder failed to decode frame. status: %d.", status);
630 work->result = C2_CORRUPTED;
631 work->workletsProcessed = 1u;
632 mSignalledError = true;
633 return;
634 }
635
636 }
637
638 (void)outputBuffer(pool, work);
639
640 if (eos) {
641 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
642 mSignalledOutputEos = true;
643 } else if (!inSize) {
644 fillEmptyWork(work);
645 }
646 }
647
getHDRStaticParams(const libgav1::DecoderBuffer * buffer,const std::unique_ptr<C2Work> & work)648 void C2SoftGav1Dec::getHDRStaticParams(const libgav1::DecoderBuffer *buffer,
649 const std::unique_ptr<C2Work> &work) {
650 C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
651 bool infoPresent = false;
652 if (buffer->has_hdr_mdcv) {
653 // hdr_mdcv.primary_chromaticity_* values are in 0.16 fixed-point format.
654 hdrStaticMetadataInfo.mastering.red.x = buffer->hdr_mdcv.primary_chromaticity_x[0] / 65536.0;
655 hdrStaticMetadataInfo.mastering.red.y = buffer->hdr_mdcv.primary_chromaticity_y[0] / 65536.0;
656
657 hdrStaticMetadataInfo.mastering.green.x = buffer->hdr_mdcv.primary_chromaticity_x[1] / 65536.0;
658 hdrStaticMetadataInfo.mastering.green.y = buffer->hdr_mdcv.primary_chromaticity_y[1] / 65536.0;
659
660 hdrStaticMetadataInfo.mastering.blue.x = buffer->hdr_mdcv.primary_chromaticity_x[2] / 65536.0;
661 hdrStaticMetadataInfo.mastering.blue.y = buffer->hdr_mdcv.primary_chromaticity_y[2] / 65536.0;
662
663 // hdr_mdcv.white_point_chromaticity_* values are in 0.16 fixed-point format.
664 hdrStaticMetadataInfo.mastering.white.x = buffer->hdr_mdcv.white_point_chromaticity_x / 65536.0;
665 hdrStaticMetadataInfo.mastering.white.y = buffer->hdr_mdcv.white_point_chromaticity_y / 65536.0;
666
667 // hdr_mdcv.luminance_max is in 24.8 fixed-point format.
668 hdrStaticMetadataInfo.mastering.maxLuminance = buffer->hdr_mdcv.luminance_max / 256.0;
669 // hdr_mdcv.luminance_min is in 18.14 format.
670 hdrStaticMetadataInfo.mastering.minLuminance = buffer->hdr_mdcv.luminance_min / 16384.0;
671 infoPresent = true;
672 }
673
674 if (buffer->has_hdr_cll) {
675 hdrStaticMetadataInfo.maxCll = buffer->hdr_cll.max_cll;
676 hdrStaticMetadataInfo.maxFall = buffer->hdr_cll.max_fall;
677 infoPresent = true;
678 }
679 // config if static info has changed
680 if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
681 mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
682 work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(mHdrStaticMetadataInfo));
683 }
684 }
685
getHDR10PlusInfoData(const libgav1::DecoderBuffer * buffer,const std::unique_ptr<C2Work> & work)686 void C2SoftGav1Dec::getHDR10PlusInfoData(const libgav1::DecoderBuffer *buffer,
687 const std::unique_ptr<C2Work> &work) {
688 if (buffer->has_itut_t35) {
689 std::vector<uint8_t> payload;
690 size_t payloadSize = buffer->itut_t35.payload_size;
691 if (payloadSize > 0) {
692 payload.push_back(buffer->itut_t35.country_code);
693 if (buffer->itut_t35.country_code == 0xFF) {
694 payload.push_back(buffer->itut_t35.country_code_extension_byte);
695 }
696 payload.insert(payload.end(), buffer->itut_t35.payload_bytes,
697 buffer->itut_t35.payload_bytes + buffer->itut_t35.payload_size);
698 }
699
700 std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
701 C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
702 if (!hdr10PlusInfo) {
703 ALOGE("Hdr10PlusInfo allocation failed");
704 mSignalledError = true;
705 work->result = C2_NO_MEMORY;
706 return;
707 }
708 memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
709
710 // config if hdr10Plus info has changed
711 if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
712 mHdr10PlusInfo = std::move(hdr10PlusInfo);
713 work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
714 }
715 }
716 }
717
getVuiParams(const libgav1::DecoderBuffer * buffer)718 void C2SoftGav1Dec::getVuiParams(const libgav1::DecoderBuffer *buffer) {
719 VuiColorAspects vuiColorAspects;
720 vuiColorAspects.primaries = buffer->color_primary;
721 vuiColorAspects.transfer = buffer->transfer_characteristics;
722 vuiColorAspects.coeffs = buffer->matrix_coefficients;
723 vuiColorAspects.fullRange = buffer->color_range;
724
725 // convert vui aspects to C2 values if changed
726 if (!(vuiColorAspects == mBitstreamColorAspects)) {
727 mBitstreamColorAspects = vuiColorAspects;
728 ColorAspects sfAspects;
729 C2StreamColorAspectsInfo::input codedAspects = { 0u };
730 ColorUtils::convertIsoColorAspectsToCodecAspects(
731 vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
732 vuiColorAspects.fullRange, sfAspects);
733 if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
734 codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
735 }
736 if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
737 codedAspects.range = C2Color::RANGE_UNSPECIFIED;
738 }
739 if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
740 codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
741 }
742 if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
743 codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
744 }
745 std::vector<std::unique_ptr<C2SettingResult>> failures;
746 mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
747 }
748 }
749
setError(const std::unique_ptr<C2Work> & work,c2_status_t error)750 void C2SoftGav1Dec::setError(const std::unique_ptr<C2Work> &work, c2_status_t error) {
751 mSignalledError = true;
752 work->result = error;
753 work->workletsProcessed = 1u;
754 }
755
allocTmpFrameBuffer(size_t size)756 bool C2SoftGav1Dec::allocTmpFrameBuffer(size_t size) {
757 if (size > mTmpFrameBufferSize) {
758 mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
759 if (mTmpFrameBuffer == nullptr) {
760 mTmpFrameBufferSize = 0;
761 return false;
762 }
763 mTmpFrameBufferSize = size;
764 }
765 return true;
766 }
767
fillMonochromeRow(int value)768 bool C2SoftGav1Dec::fillMonochromeRow(int value) {
769 const size_t tmpSize = mWidth;
770 const bool needFill = tmpSize > mTmpFrameBufferSize;
771 if (!allocTmpFrameBuffer(tmpSize)) {
772 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
773 return false;
774 }
775 if (needFill) {
776 std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
777 }
778 return true;
779 }
780
outputBuffer(const std::shared_ptr<C2BlockPool> & pool,const std::unique_ptr<C2Work> & work)781 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
782 const std::unique_ptr<C2Work> &work) {
783 if (!(work && pool)) return false;
784
785 const libgav1::DecoderBuffer *buffer;
786 const Libgav1StatusCode status = mCodecCtx->DequeueFrame(&buffer);
787
788 if (status != kLibgav1StatusOk && status != kLibgav1StatusNothingToDequeue) {
789 ALOGE("av1 decoder DequeueFrame failed. status: %d.", status);
790 return false;
791 }
792
793 // |buffer| can be NULL if status was equal to kLibgav1StatusOk or
794 // kLibgav1StatusNothingToDequeue. This is not an error. This could mean one
795 // of two things:
796 // - The EnqueueFrame() call was either a flush (called with nullptr).
797 // - The enqueued frame did not have any displayable frames.
798 if (!buffer) {
799 return false;
800 }
801
802 #if LIBYUV_VERSION < 1871
803 if (buffer->bitdepth > 10) {
804 ALOGE("bitdepth %d is not supported", buffer->bitdepth);
805 mSignalledError = true;
806 work->workletsProcessed = 1u;
807 work->result = C2_CORRUPTED;
808 return false;
809 }
810 #endif
811
812 const int width = buffer->displayed_width[0];
813 const int height = buffer->displayed_height[0];
814 if (width != mWidth || height != mHeight) {
815 mWidth = width;
816 mHeight = height;
817
818 C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
819 std::vector<std::unique_ptr<C2SettingResult>> failures;
820 c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
821 if (err == C2_OK) {
822 work->worklets.front()->output.configUpdate.push_back(
823 C2Param::Copy(size));
824 } else {
825 ALOGE("Config update size failed");
826 mSignalledError = true;
827 work->result = C2_CORRUPTED;
828 work->workletsProcessed = 1u;
829 return false;
830 }
831 }
832
833 getVuiParams(buffer);
834 getHDRStaticParams(buffer, work);
835 getHDR10PlusInfoData(buffer, work);
836
837 #if LIBYUV_VERSION < 1779
838 if (buffer->bitdepth == 10 &&
839 !(buffer->image_format == libgav1::kImageFormatYuv420 ||
840 buffer->image_format == libgav1::kImageFormatMonochrome400)) {
841 ALOGE("image_format %d not supported for 10bit", buffer->image_format);
842 mSignalledError = true;
843 work->workletsProcessed = 1u;
844 work->result = C2_CORRUPTED;
845 return false;
846 }
847 #endif
848
849 const bool isMonochrome =
850 buffer->image_format == libgav1::kImageFormatMonochrome400;
851
852 std::shared_ptr<C2GraphicBlock> block;
853 uint32_t format = HAL_PIXEL_FORMAT_YV12;
854 std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
855 if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
856 IntfImpl::Lock lock = mIntf->lock();
857 codedColorAspects = mIntf->getColorAspects_l();
858 bool allowRGBA1010102 = false;
859 if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
860 codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
861 codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
862 allowRGBA1010102 = true;
863 }
864 format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
865 #if !HAVE_LIBYUV_I410_I210_TO_AB30
866 if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
867 (buffer->image_format != libgav1::kImageFormatYuv420) &&
868 (buffer->bitdepth == 10)) {
869 ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
870 mSignalledError = true;
871 work->result = C2_OMITTED;
872 work->workletsProcessed = 1u;
873 return false;
874 }
875 #endif
876 }
877 if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
878 (buffer->image_format == libgav1::kImageFormatYuv422 ||
879 buffer->image_format == libgav1::kImageFormatYuv444)) {
880 // There are no 12-bit color conversion functions from YUV422/YUV444 to
881 // RGBA_1010102. Use 8-bit YV12 in this case.
882 format = HAL_PIXEL_FORMAT_YV12;
883 }
884 if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
885 // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
886 // in this case.
887 format = HAL_PIXEL_FORMAT_YV12;
888 }
889
890 if (mHalPixelFormat != format) {
891 C2StreamPixelFormatInfo::output pixelFormat(0u, format);
892 std::vector<std::unique_ptr<C2SettingResult>> failures;
893 c2_status_t err = mIntf->config({&pixelFormat }, C2_MAY_BLOCK, &failures);
894 if (err == C2_OK) {
895 work->worklets.front()->output.configUpdate.push_back(
896 C2Param::Copy(pixelFormat));
897 } else {
898 ALOGE("Config update pixelFormat failed");
899 mSignalledError = true;
900 work->workletsProcessed = 1u;
901 work->result = C2_CORRUPTED;
902 return UNKNOWN_ERROR;
903 }
904 mHalPixelFormat = format;
905 }
906
907 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
908
909 // We always create a graphic block that is width aligned to 16 and height
910 // aligned to 2. We set the correct "crop" value of the image in the call to
911 // createGraphicBuffer() by setting the correct image dimensions.
912 c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16),
913 align(mHeight, 2), format, usage,
914 &block);
915
916 if (err != C2_OK) {
917 ALOGE("fetchGraphicBlock for Output failed with status %d", err);
918 work->result = err;
919 return false;
920 }
921
922 C2GraphicView wView = block->map().get();
923
924 if (wView.error()) {
925 ALOGE("graphic view map failed %d", wView.error());
926 work->result = C2_CORRUPTED;
927 return false;
928 }
929
930 ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
931 block->height(), mWidth, mHeight, (int)buffer->user_private_data);
932
933 uint8_t *dstY = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_Y]);
934 uint8_t *dstU = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_U]);
935 uint8_t *dstV = const_cast<uint8_t *>(wView.data()[C2PlanarLayout::PLANE_V]);
936
937 C2PlanarLayout layout = wView.layout();
938 size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
939 size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
940 size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
941
942 if (buffer->bitdepth == 12) {
943 #if LIBYUV_VERSION >= 1871
944 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
945 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
946 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
947 size_t srcYStride = buffer->stride[0] / 2;
948 size_t srcUStride = buffer->stride[1] / 2;
949 size_t srcVStride = buffer->stride[2] / 2;
950 if (isMonochrome) {
951 if (!fillMonochromeRow(2048)) {
952 setError(work, C2_NO_MEMORY);
953 return false;
954 }
955 srcU = srcV = mTmpFrameBuffer.get();
956 srcUStride = srcVStride = 0;
957 }
958 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
959 libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
960 dstY, dstYStride, &libyuv::kYuvV2020Constants,
961 mWidth, mHeight);
962 } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
963 libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
964 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
965 mWidth, mHeight);
966 } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
967 libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
968 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
969 mWidth, mHeight);
970 } else {
971 libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
972 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
973 mWidth, mHeight);
974 }
975 #endif // LIBYUV_VERSION >= 1871
976 } else if (buffer->bitdepth == 10) {
977 const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
978 const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
979 const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
980 size_t srcYStride = buffer->stride[0] / 2;
981 size_t srcUStride = buffer->stride[1] / 2;
982 size_t srcVStride = buffer->stride[2] / 2;
983
984 if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
985 bool processed = false;
986 #if HAVE_LIBYUV_I410_I210_TO_AB30
987 if (buffer->image_format == libgav1::kImageFormatYuv444) {
988 libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
989 dstY, dstYStride, &libyuv::kYuvV2020Constants,
990 mWidth, mHeight);
991 processed = true;
992 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
993 libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
994 dstY, dstYStride, &libyuv::kYuvV2020Constants,
995 mWidth, mHeight);
996 processed = true;
997 }
998 #endif // HAVE_LIBYUV_I410_I210_TO_AB30
999 if (!processed) {
1000 if (isMonochrome) {
1001 if (!fillMonochromeRow(512)) {
1002 setError(work, C2_NO_MEMORY);
1003 return false;
1004 }
1005 srcU = srcV = mTmpFrameBuffer.get();
1006 srcUStride = srcVStride = 0;
1007 }
1008 convertYUV420Planar16ToY410OrRGBA1010102(
1009 (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
1010 srcUStride, srcVStride,
1011 dstYStride / sizeof(uint32_t), mWidth, mHeight,
1012 std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
1013 }
1014 } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
1015 dstYStride /= 2;
1016 dstUStride /= 2;
1017 dstVStride /= 2;
1018 #if LIBYUV_VERSION >= 1779
1019 if (buffer->image_format == libgav1::kImageFormatYuv444 ||
1020 buffer->image_format == libgav1::kImageFormatYuv422) {
1021 // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010 and
1022 // libyuv::I210ToP010 when they are available.
1023 // Note it may be safe to alias dstY in I010ToP010, but the libyuv API doesn't make any
1024 // guarantees.
1025 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1026 if (!allocTmpFrameBuffer(tmpSize)) {
1027 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1028 setError(work, C2_NO_MEMORY);
1029 return false;
1030 }
1031 uint16_t *const tmpY = mTmpFrameBuffer.get();
1032 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
1033 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1034 if (buffer->image_format == libgav1::kImageFormatYuv444) {
1035 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1036 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1037 mWidth, mHeight);
1038 } else {
1039 libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1040 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1041 mWidth, mHeight);
1042 }
1043 libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1044 (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride,
1045 mWidth, mHeight);
1046 } else {
1047 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
1048 srcYStride, srcUStride, srcVStride, dstYStride,
1049 dstUStride, mWidth, mHeight, isMonochrome);
1050 }
1051 #else // LIBYUV_VERSION < 1779
1052 convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
1053 srcYStride, srcUStride, srcVStride, dstYStride,
1054 dstUStride, mWidth, mHeight, isMonochrome);
1055 #endif // LIBYUV_VERSION >= 1779
1056 } else {
1057 #if LIBYUV_VERSION >= 1779
1058 if (buffer->image_format == libgav1::kImageFormatYuv444) {
1059 // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420 when
1060 // it's available.
1061 const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
1062 if (!allocTmpFrameBuffer(tmpSize)) {
1063 ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
1064 setError(work, C2_NO_MEMORY);
1065 return false;
1066 }
1067 uint16_t *const tmpY = mTmpFrameBuffer.get();
1068 uint16_t *const tmpU = tmpY + dstYStride * mHeight;
1069 uint16_t *const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
1070 libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1071 tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
1072 mWidth, mHeight);
1073 libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride,
1074 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1075 mWidth, mHeight);
1076 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1077 libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1078 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1079 mWidth, mHeight);
1080 } else {
1081 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1082 srcUStride, srcVStride, dstYStride, dstUStride,
1083 mWidth, mHeight, isMonochrome);
1084 }
1085 #else // LIBYUV_VERSION < 1779
1086 convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
1087 srcUStride, srcVStride, dstYStride, dstUStride,
1088 mWidth, mHeight, isMonochrome);
1089 #endif // LIBYUV_VERSION >= 1779
1090 }
1091 } else {
1092 const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
1093 const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
1094 const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
1095 size_t srcYStride = buffer->stride[0];
1096 size_t srcUStride = buffer->stride[1];
1097 size_t srcVStride = buffer->stride[2];
1098
1099 if (buffer->image_format == libgav1::kImageFormatYuv444) {
1100 libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1101 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1102 mWidth, mHeight);
1103 } else if (buffer->image_format == libgav1::kImageFormatYuv422) {
1104 libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
1105 dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
1106 mWidth, mHeight);
1107 } else {
1108 convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
1109 srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
1110 isMonochrome);
1111 }
1112 }
1113 finishWork(buffer->user_private_data, work, std::move(block));
1114 block = nullptr;
1115 return true;
1116 }
1117
drainInternal(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool,const std::unique_ptr<C2Work> & work)1118 c2_status_t C2SoftGav1Dec::drainInternal(
1119 uint32_t drainMode, const std::shared_ptr<C2BlockPool> &pool,
1120 const std::unique_ptr<C2Work> &work) {
1121 if (drainMode == NO_DRAIN) {
1122 ALOGW("drain with NO_DRAIN: no-op");
1123 return C2_OK;
1124 }
1125 if (drainMode == DRAIN_CHAIN) {
1126 ALOGW("DRAIN_CHAIN not supported");
1127 return C2_OMITTED;
1128 }
1129
1130 const Libgav1StatusCode status = mCodecCtx->SignalEOS();
1131 if (status != kLibgav1StatusOk) {
1132 ALOGE("Failed to flush av1 decoder. status: %d.", status);
1133 return C2_CORRUPTED;
1134 }
1135
1136 while (outputBuffer(pool, work)) {
1137 }
1138
1139 if (drainMode == DRAIN_COMPONENT_WITH_EOS && work &&
1140 work->workletsProcessed == 0u) {
1141 fillEmptyWork(work);
1142 }
1143
1144 return C2_OK;
1145 }
1146
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)1147 c2_status_t C2SoftGav1Dec::drain(uint32_t drainMode,
1148 const std::shared_ptr<C2BlockPool> &pool) {
1149 return drainInternal(drainMode, pool, nullptr);
1150 }
1151
1152 class C2SoftGav1Factory : public C2ComponentFactory {
1153 public:
C2SoftGav1Factory()1154 C2SoftGav1Factory()
1155 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1156 GetCodec2PlatformComponentStore()->getParamReflector())) {}
1157
createComponent(c2_node_id_t id,std::shared_ptr<C2Component> * const component,std::function<void (C2Component *)> deleter)1158 virtual c2_status_t createComponent(
1159 c2_node_id_t id, std::shared_ptr<C2Component> *const component,
1160 std::function<void(C2Component *)> deleter) override {
1161 *component = std::shared_ptr<C2Component>(
1162 new C2SoftGav1Dec(COMPONENT_NAME, id,
1163 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1164 deleter);
1165 return C2_OK;
1166 }
1167
createInterface(c2_node_id_t id,std::shared_ptr<C2ComponentInterface> * const interface,std::function<void (C2ComponentInterface *)> deleter)1168 virtual c2_status_t createInterface(
1169 c2_node_id_t id, std::shared_ptr<C2ComponentInterface> *const interface,
1170 std::function<void(C2ComponentInterface *)> deleter) override {
1171 *interface = std::shared_ptr<C2ComponentInterface>(
1172 new SimpleInterface<C2SoftGav1Dec::IntfImpl>(
1173 COMPONENT_NAME, id,
1174 std::make_shared<C2SoftGav1Dec::IntfImpl>(mHelper)),
1175 deleter);
1176 return C2_OK;
1177 }
1178
1179 virtual ~C2SoftGav1Factory() override = default;
1180
1181 private:
1182 std::shared_ptr<C2ReflectorHelper> mHelper;
1183 };
1184
1185 } // namespace android
1186
1187 __attribute__((cfi_canonical_jump_table))
CreateCodec2Factory()1188 extern "C" ::C2ComponentFactory *CreateCodec2Factory() {
1189 ALOGV("in %s", __func__);
1190 return new ::android::C2SoftGav1Factory();
1191 }
1192
1193 __attribute__((cfi_canonical_jump_table))
DestroyCodec2Factory(::C2ComponentFactory * factory)1194 extern "C" void DestroyCodec2Factory(::C2ComponentFactory *factory) {
1195 ALOGV("in %s", __func__);
1196 delete factory;
1197 }
1198