1 /*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #ifdef MPEG4
19 #define LOG_TAG "C2SoftMpeg4Enc"
20 #else
21 #define LOG_TAG "C2SoftH263Enc"
22 #endif
23 #include <log/log.h>
24
25 #include <inttypes.h>
26
27 #include <media/hardware/VideoAPI.h>
28 #include <media/stagefright/foundation/AUtils.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <utils/misc.h>
31
32 #include <C2Debug.h>
33 #include <C2PlatformSupport.h>
34 #include <SimpleC2Interface.h>
35 #include <util/C2InterfaceHelper.h>
36
37 #include "C2SoftMpeg4Enc.h"
38 #include "mp4enc_api.h"
39
40 namespace android {
41
42 namespace {
43
44 #ifdef MPEG4
45 constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.encoder";
46 const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_MPEG4;
47 #else
48 constexpr char COMPONENT_NAME[] = "c2.android.h263.encoder";
49 const char *MEDIA_MIMETYPE_VIDEO = MEDIA_MIMETYPE_VIDEO_H263;
50 #endif
51
52 constexpr float VBV_DELAY = 5.0f;
53
54 } // namepsace
55
56 class C2SoftMpeg4Enc::IntfImpl : public SimpleInterface<void>::BaseParams {
57 public:
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)58 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
59 : SimpleInterface<void>::BaseParams(
60 helper,
61 COMPONENT_NAME,
62 C2Component::KIND_ENCODER,
63 C2Component::DOMAIN_VIDEO,
64 MEDIA_MIMETYPE_VIDEO) {
65 noPrivateBuffers(); // TODO: account for our buffers here
66 noInputReferences();
67 noOutputReferences();
68 noInputLatency();
69 noTimeStretch();
70 setDerivedInstance(this);
71
72 addParameter(
73 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
74 .withConstValue(new C2ComponentAttributesSetting(
75 C2Component::ATTRIB_IS_TEMPORAL))
76 .build());
77
78 addParameter(
79 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
80 .withConstValue(new C2StreamUsageTuning::input(
81 0u, (uint64_t)C2MemoryUsage::CPU_READ))
82 .build());
83
84 addParameter(
85 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
86 .withDefault(new C2StreamPictureSizeInfo::input(0u, 176, 144))
87 .withFields({
88 #ifdef MPEG4
89 C2F(mSize, width).inRange(16, 176, 16),
90 C2F(mSize, height).inRange(16, 144, 16),
91 #else
92 C2F(mSize, width).oneOf({176, 352}),
93 C2F(mSize, height).oneOf({144, 288}),
94 #endif
95 })
96 .withSetter(SizeSetter)
97 .build());
98
99 addParameter(
100 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
101 .withDefault(new C2StreamFrameRateInfo::output(0u, 17.))
102 // TODO: More restriction?
103 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
104 .withSetter(
105 Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
106 .build());
107
108 addParameter(
109 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
110 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
111 .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
112 .withSetter(BitrateSetter)
113 .build());
114
115 addParameter(
116 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
117 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
118 .withFields({C2F(mSyncFramePeriod, value).any()})
119 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
120 .build());
121
122 #ifdef MPEG4
123 addParameter(
124 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
125 .withDefault(new C2StreamProfileLevelInfo::output(
126 0u, PROFILE_MP4V_SIMPLE, LEVEL_MP4V_2))
127 .withFields({
128 C2F(mProfileLevel, profile).equalTo(
129 PROFILE_MP4V_SIMPLE),
130 C2F(mProfileLevel, level).oneOf({
131 C2Config::LEVEL_MP4V_0,
132 C2Config::LEVEL_MP4V_0B,
133 C2Config::LEVEL_MP4V_1,
134 C2Config::LEVEL_MP4V_2})
135 })
136 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
137 .build());
138 #else
139 addParameter(
140 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
141 .withDefault(new C2StreamProfileLevelInfo::output(
142 0u, PROFILE_H263_BASELINE, LEVEL_H263_45))
143 .withFields({
144 C2F(mProfileLevel, profile).equalTo(
145 PROFILE_H263_BASELINE),
146 C2F(mProfileLevel, level).oneOf({
147 C2Config::LEVEL_H263_10,
148 C2Config::LEVEL_H263_20,
149 C2Config::LEVEL_H263_30,
150 C2Config::LEVEL_H263_40,
151 C2Config::LEVEL_H263_45})
152 })
153 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
154 .build());
155 #endif
156 }
157
BitrateSetter(bool mayBlock,C2P<C2StreamBitrateInfo::output> & me)158 static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
159 (void)mayBlock;
160 C2R res = C2R::Ok();
161 if (me.v.value <= 4096) {
162 me.set().value = 4096;
163 }
164 return res;
165 }
166
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::input> & oldMe,C2P<C2StreamPictureSizeInfo::input> & me)167 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
168 C2P<C2StreamPictureSizeInfo::input> &me) {
169 (void)mayBlock;
170 C2R res = C2R::Ok();
171 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
172 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
173 me.set().width = oldMe.v.width;
174 }
175 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
176 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
177 me.set().height = oldMe.v.height;
178 }
179 return res;
180 }
181
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::output> & me,const C2P<C2StreamPictureSizeInfo::input> & size,const C2P<C2StreamFrameRateInfo::output> & frameRate,const C2P<C2StreamBitrateInfo::output> & bitrate)182 static C2R ProfileLevelSetter(
183 bool mayBlock,
184 C2P<C2StreamProfileLevelInfo::output> &me,
185 const C2P<C2StreamPictureSizeInfo::input> &size,
186 const C2P<C2StreamFrameRateInfo::output> &frameRate,
187 const C2P<C2StreamBitrateInfo::output> &bitrate) {
188 (void)mayBlock;
189 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
190 #ifdef MPEG4
191 me.set().profile = PROFILE_MP4V_SIMPLE;
192 #else
193 me.set().profile = PROFILE_H263_BASELINE;
194 #endif
195 }
196
197 struct LevelLimits {
198 C2Config::level_t level;
199 uint32_t sampleRate;
200 uint32_t width;
201 uint32_t height;
202 uint32_t frameRate;
203 uint32_t bitrate;
204 uint32_t vbvSize;
205 };
206
207 constexpr LevelLimits kLimits[] = {
208 #ifdef MPEG4
209 { LEVEL_MP4V_0, 380160, 176, 144, 15, 64000, 163840 },
210 // { LEVEL_MP4V_0B, 380160, 176, 144, 15, 128000, 163840 },
211 { LEVEL_MP4V_1, 380160, 176, 144, 30, 64000, 163840 },
212 { LEVEL_MP4V_2, 1520640, 352, 288, 30, 128000, 655360 },
213 #else
214 // HRD Buffer Size = (B + BPPmaxKb * 1024 bits)
215 // where, (BPPmaxKb * 1024) is maximum number of bits per picture
216 // that has been negotiated for use in the bitstream Sec 3.6 of T-Rec-H.263
217 // and B = 4 * Rmax / PCF. Rmax is max bit rate and PCF is picture
218 // clock frequency
219 { LEVEL_H263_10, 380160, 176, 144, 15, 64000, 74077 },
220 { LEVEL_H263_45, 380160, 176, 144, 15, 128000, 82619 },
221 { LEVEL_H263_20, 1520640, 352, 288, 30, 128000, 279227 },
222 { LEVEL_H263_30, 3041280, 352, 288, 30, 384000, 313395 },
223 { LEVEL_H263_40, 3041280, 352, 288, 30, 2048000, 535483 },
224 // { LEVEL_H263_50, 5068800, 352, 288, 60, 4096000, 808823 },
225 #endif
226 };
227
228 auto mbs = ((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
229 auto sampleRate = mbs * frameRate.v.value * 16 * 16;
230 auto vbvSize = bitrate.v.value * VBV_DELAY;
231
232 // Check if the supplied level meets the MB / bitrate requirements. If
233 // not, update the level with the lowest level meeting the requirements.
234 bool found = false;
235
236 // By default needsUpdate = false in case the supplied level does meet
237 // the requirements.
238 bool needsUpdate = false;
239 #ifdef MPEG4
240 // For Level 0b, we want to update the level anyway, as library does not
241 // seem to accept this value.
242 if (me.v.level == LEVEL_MP4V_0B) {
243 needsUpdate = true;
244 }
245 #endif
246 if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
247 needsUpdate = true;
248 }
249 for (const LevelLimits &limit : kLimits) {
250 if (sampleRate <= limit.sampleRate && size.v.width <= limit.width &&
251 vbvSize <= limit.vbvSize && size.v.height <= limit.height &&
252 bitrate.v.value <= limit.bitrate && frameRate.v.value <= limit.frameRate) {
253 // This is the lowest level that meets the requirements, and if
254 // we haven't seen the supplied level yet, that means we don't
255 // need the update.
256 if (needsUpdate) {
257 ALOGD("Given level %x does not cover current configuration: "
258 "adjusting to %x", me.v.level, limit.level);
259 me.set().level = limit.level;
260 }
261 found = true;
262 break;
263 }
264 if (me.v.level == limit.level) {
265 // We break out of the loop when the lowest feasible level is
266 // found. The fact that we're here means that our level doesn't
267 // meet the requirement and needs to be updated.
268 needsUpdate = true;
269 }
270 }
271 // If not found or exceeds max level, set to the highest supported level.
272 #ifdef MPEG4
273 if (!found || me.v.level > LEVEL_MP4V_2) {
274 me.set().level = LEVEL_MP4V_2;
275 }
276 #else
277 if (!found || (me.v.level != LEVEL_H263_45 && me.v.level > LEVEL_H263_40)) {
278 me.set().level = LEVEL_H263_40;
279 }
280 #endif
281 return C2R::Ok();
282 }
283
284 // unsafe getters
getSize_l() const285 std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
getFrameRate_l() const286 std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
getBitrate_l() const287 std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
getSyncFramePeriod() const288 uint32_t getSyncFramePeriod() const {
289 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
290 return 0;
291 }
292 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
293 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
294 }
295
296 private:
297 std::shared_ptr<C2StreamUsageTuning::input> mUsage;
298 std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
299 std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
300 std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
301 std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
302 std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
303 };
304
C2SoftMpeg4Enc(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)305 C2SoftMpeg4Enc::C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
306 const std::shared_ptr<IntfImpl>& intfImpl)
307 : SimpleC2Component(
308 std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
309 mIntf(intfImpl),
310 mHandle(nullptr),
311 mEncParams(nullptr),
312 mStarted(false),
313 mOutBufferSize(524288) {
314 }
315
C2SoftMpeg4Enc(const char * name,c2_node_id_t id,const std::shared_ptr<C2ReflectorHelper> & helper)316 C2SoftMpeg4Enc::C2SoftMpeg4Enc(const char* name, c2_node_id_t id,
317 const std::shared_ptr<C2ReflectorHelper>& helper)
318 : C2SoftMpeg4Enc(name, id, std::make_shared<IntfImpl>(helper)) {
319 }
320
~C2SoftMpeg4Enc()321 C2SoftMpeg4Enc::~C2SoftMpeg4Enc() {
322 onRelease();
323 }
324
onInit()325 c2_status_t C2SoftMpeg4Enc::onInit() {
326 #ifdef MPEG4
327 mEncodeMode = COMBINE_MODE_WITH_ERR_RES;
328 #else
329 mEncodeMode = H263_MODE;
330 #endif
331 if (!mHandle) {
332 mHandle = new tagvideoEncControls;
333 }
334
335 if (!mEncParams) {
336 mEncParams = new tagvideoEncOptions;
337 }
338
339 if (!(mEncParams && mHandle)) return C2_NO_MEMORY;
340
341 mSignalledOutputEos = false;
342 mSignalledError = false;
343
344 return initEncoder();
345 }
346
onStop()347 c2_status_t C2SoftMpeg4Enc::onStop() {
348 if (!mStarted) {
349 return C2_OK;
350 }
351 if (mHandle) {
352 (void)PVCleanUpVideoEncoder(mHandle);
353 }
354 mStarted = false;
355 mSignalledOutputEos = false;
356 mSignalledError = false;
357 return C2_OK;
358 }
359
onReset()360 void C2SoftMpeg4Enc::onReset() {
361 onStop();
362 initEncoder();
363 }
364
onRelease()365 void C2SoftMpeg4Enc::onRelease() {
366 onStop();
367 if (mEncParams) {
368 delete mEncParams;
369 mEncParams = nullptr;
370 }
371 if (mHandle) {
372 delete mHandle;
373 mHandle = nullptr;
374 }
375 }
376
onFlush_sm()377 c2_status_t C2SoftMpeg4Enc::onFlush_sm() {
378 return C2_OK;
379 }
380
fillEmptyWork(const std::unique_ptr<C2Work> & work)381 static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
382 uint32_t flags = 0;
383 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
384 flags |= C2FrameData::FLAG_END_OF_STREAM;
385 ALOGV("signalling eos");
386 }
387 work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
388 work->worklets.front()->output.buffers.clear();
389 work->worklets.front()->output.ordinal = work->input.ordinal;
390 work->workletsProcessed = 1u;
391 }
392
initEncParams()393 c2_status_t C2SoftMpeg4Enc::initEncParams() {
394 if (mHandle) {
395 memset(mHandle, 0, sizeof(tagvideoEncControls));
396 } else return C2_CORRUPTED;
397 if (mEncParams) {
398 memset(mEncParams, 0, sizeof(tagvideoEncOptions));
399 } else return C2_CORRUPTED;
400
401 if (!PVGetDefaultEncOption(mEncParams, 0)) {
402 ALOGE("Failed to get default encoding parameters");
403 return C2_CORRUPTED;
404 }
405
406 if (mFrameRate->value == 0) {
407 ALOGE("Framerate should not be 0");
408 return C2_BAD_VALUE;
409 }
410
411 mEncParams->encMode = mEncodeMode;
412 mEncParams->encWidth[0] = mSize->width;
413 mEncParams->encHeight[0] = mSize->height;
414 mEncParams->encFrameRate[0] = mFrameRate->value + 0.5;
415 mEncParams->rcType = VBR_1;
416 mEncParams->vbvDelay = VBV_DELAY;
417 mEncParams->profile_level = CORE_PROFILE_LEVEL2;
418 mEncParams->packetSize = 32;
419 mEncParams->rvlcEnable = PV_OFF;
420 mEncParams->numLayers = 1;
421 mEncParams->timeIncRes = 1000;
422 mEncParams->tickPerSrc = mEncParams->timeIncRes / (mFrameRate->value + 0.5);
423 mEncParams->bitRate[0] = mBitrate->value;
424 mEncParams->iQuant[0] = 15;
425 mEncParams->pQuant[0] = 12;
426 mEncParams->quantType[0] = 0;
427 mEncParams->noFrameSkipped = PV_OFF;
428
429 // PV's MPEG4 encoder requires the video dimension of multiple
430 if (mSize->width % 16 != 0 || mSize->height % 16 != 0) {
431 ALOGE("Video frame size %dx%d must be a multiple of 16",
432 mSize->width, mSize->height);
433 return C2_BAD_VALUE;
434 }
435
436 // Set IDR frame refresh interval
437 mEncParams->intraPeriod = mIntf->getSyncFramePeriod();
438 mEncParams->numIntraMB = 0;
439 mEncParams->sceneDetect = PV_ON;
440 mEncParams->searchRange = 16;
441 mEncParams->mv8x8Enable = PV_OFF;
442 mEncParams->gobHeaderInterval = 0;
443 mEncParams->useACPred = PV_ON;
444 mEncParams->intraDCVlcTh = 0;
445
446 return C2_OK;
447 }
448
initEncoder()449 c2_status_t C2SoftMpeg4Enc::initEncoder() {
450 if (mStarted) {
451 return C2_OK;
452 }
453 {
454 IntfImpl::Lock lock = mIntf->lock();
455 mSize = mIntf->getSize_l();
456 mBitrate = mIntf->getBitrate_l();
457 mFrameRate = mIntf->getFrameRate_l();
458 }
459 c2_status_t err = initEncParams();
460 if (C2_OK != err) {
461 ALOGE("Failed to initialized encoder params");
462 mSignalledError = true;
463 return err;
464 }
465 if (!PVInitVideoEncoder(mHandle, mEncParams)) {
466 ALOGE("Failed to initialize the encoder");
467 mSignalledError = true;
468 return C2_CORRUPTED;
469 }
470
471 // 1st buffer for codec specific data
472 mNumInputFrames = -1;
473 mStarted = true;
474 return C2_OK;
475 }
476
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)477 void C2SoftMpeg4Enc::process(
478 const std::unique_ptr<C2Work> &work,
479 const std::shared_ptr<C2BlockPool> &pool) {
480 // Initialize output work
481 work->result = C2_OK;
482 work->workletsProcessed = 1u;
483 work->worklets.front()->output.flags = work->input.flags;
484 if (mSignalledError || mSignalledOutputEos) {
485 work->result = C2_BAD_VALUE;
486 return;
487 }
488
489 // Initialize encoder if not already initialized
490 if (!mStarted && C2_OK != initEncoder()) {
491 ALOGE("Failed to initialize encoder");
492 mSignalledError = true;
493 work->result = C2_CORRUPTED;
494 return;
495 }
496
497 std::shared_ptr<C2LinearBlock> block;
498 C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
499 c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
500 if (err != C2_OK) {
501 ALOGE("fetchLinearBlock for Output failed with status %d", err);
502 work->result = C2_NO_MEMORY;
503 return;
504 }
505
506 C2WriteView wView = block->map().get();
507 if (wView.error()) {
508 ALOGE("write view map failed %d", wView.error());
509 work->result = wView.error();
510 return;
511 }
512
513 uint8_t *outPtr = (uint8_t *)wView.data();
514 if (mNumInputFrames < 0) {
515 // The very first thing we want to output is the codec specific data.
516 int32_t outputSize = mOutBufferSize;
517 if (!PVGetVolHeader(mHandle, outPtr, &outputSize, 0)) {
518 ALOGE("Failed to get VOL header");
519 mSignalledError = true;
520 work->result = C2_CORRUPTED;
521 return;
522 } else {
523 ALOGV("Bytes Generated in header %d\n", outputSize);
524 }
525
526 ++mNumInputFrames;
527 if (outputSize) {
528 std::unique_ptr<C2StreamInitDataInfo::output> csd =
529 C2StreamInitDataInfo::output::AllocUnique(outputSize, 0u);
530 if (!csd) {
531 ALOGE("CSD allocation failed");
532 mSignalledError = true;
533 work->result = C2_NO_MEMORY;
534 return;
535 }
536 memcpy(csd->m.value, outPtr, outputSize);
537 work->worklets.front()->output.configUpdate.push_back(std::move(csd));
538 }
539 }
540
541 // handle dynamic bitrate change
542 {
543 IntfImpl::Lock lock = mIntf->lock();
544 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
545 lock.unlock();
546
547 if (bitrate != mBitrate) {
548 mBitrate = bitrate;
549 int layerBitrate[2] = {static_cast<int>(mBitrate->value), 0};
550 ALOGV("Calling PVUpdateBitRate %d", layerBitrate[0]);
551 PVUpdateBitRate(mHandle, layerBitrate);
552 }
553 }
554
555 std::shared_ptr<C2GraphicView> rView;
556 std::shared_ptr<C2Buffer> inputBuffer;
557 bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
558 if (!work->input.buffers.empty()) {
559 inputBuffer = work->input.buffers[0];
560 rView = std::make_shared<C2GraphicView>(
561 inputBuffer->data().graphicBlocks().front().map().get());
562 if (rView->error() != C2_OK) {
563 ALOGE("graphic view map err = %d", rView->error());
564 work->result = rView->error();
565 return;
566 }
567 //(b/232396154)
568 //workaround for incorrect crop size in view when using surface mode
569 rView->setCrop_be(C2Rect(mSize->width, mSize->height));
570 } else {
571 fillEmptyWork(work);
572 if (eos) {
573 mSignalledOutputEos = true;
574 ALOGV("signalled EOS");
575 }
576 return;
577 }
578
579 uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull();
580 const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front();
581 if (inBuffer.width() < mSize->width ||
582 inBuffer.height() < mSize->height) {
583 /* Expect width height to be configured */
584 ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", inBuffer.width(),
585 mSize->width, inBuffer.height(), mSize->height);
586 work->result = C2_BAD_VALUE;
587 return;
588 }
589
590 const C2PlanarLayout &layout = rView->layout();
591 uint8_t *yPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_Y]);
592 uint8_t *uPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_U]);
593 uint8_t *vPlane = const_cast<uint8_t *>(rView->data()[C2PlanarLayout::PLANE_V]);
594 int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
595 int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
596 int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
597 uint32_t width = mSize->width;
598 uint32_t height = mSize->height;
599 // width and height are always even (as block size is 16x16)
600 CHECK_EQ((width & 1u), 0u);
601 CHECK_EQ((height & 1u), 0u);
602 size_t yPlaneSize = width * height;
603 switch (layout.type) {
604 case C2PlanarLayout::TYPE_RGB:
605 [[fallthrough]];
606 case C2PlanarLayout::TYPE_RGBA: {
607 MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
608 mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
609 yPlane = conversionBuffer.data();
610 uPlane = yPlane + yPlaneSize;
611 vPlane = uPlane + yPlaneSize / 4;
612 yStride = width;
613 uStride = vStride = width / 2;
614 ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *rView.get());
615 break;
616 }
617 case C2PlanarLayout::TYPE_YUV: {
618 if (!IsYUV420(*rView)) {
619 ALOGE("input is not YUV420");
620 work->result = C2_BAD_VALUE;
621 break;
622 }
623
624 if (layout.planes[layout.PLANE_Y].colInc == 1
625 && layout.planes[layout.PLANE_U].colInc == 1
626 && layout.planes[layout.PLANE_V].colInc == 1
627 && yStride == align(width, 16)
628 && uStride == vStride
629 && yStride == 2 * vStride) {
630 // I420 compatible with yStride being equal to aligned width
631 // planes are already set up above
632 break;
633 }
634
635 // copy to I420
636 MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
637 mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
638 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, width, height);
639 status_t err = ImageCopy(conversionBuffer.data(), &img, *rView);
640 if (err != OK) {
641 ALOGE("Buffer conversion failed: %d", err);
642 work->result = C2_BAD_VALUE;
643 return;
644 }
645 yPlane = conversionBuffer.data();
646 uPlane = yPlane + yPlaneSize;
647 vPlane = uPlane + yPlaneSize / 4;
648 yStride = width;
649 uStride = vStride = width / 2;
650 break;
651 }
652
653 case C2PlanarLayout::TYPE_YUVA:
654 ALOGE("YUVA plane type is not supported");
655 work->result = C2_BAD_VALUE;
656 return;
657
658 default:
659 ALOGE("Unrecognized plane type: %d", layout.type);
660 work->result = C2_BAD_VALUE;
661 return;
662 }
663
664 CHECK(NULL != yPlane);
665 /* Encode frames */
666 VideoEncFrameIO vin, vout;
667 memset(&vin, 0, sizeof(vin));
668 memset(&vout, 0, sizeof(vout));
669 vin.yChan = yPlane;
670 vin.uChan = uPlane;
671 vin.vChan = vPlane;
672 vin.timestamp = (inputTimeStamp + 500) / 1000; // in ms
673 vin.height = align(height, 16);
674 vin.pitch = align(width, 16);
675
676 uint32_t modTimeMs = 0;
677 int32_t nLayer = 0;
678 MP4HintTrack hintTrack;
679 int32_t outputSize = mOutBufferSize;
680 if (!PVEncodeVideoFrame(mHandle, &vin, &vout, &modTimeMs, outPtr, &outputSize, &nLayer) ||
681 !PVGetHintTrack(mHandle, &hintTrack)) {
682 ALOGE("Failed to encode frame or get hint track at frame %" PRId64, mNumInputFrames);
683 mSignalledError = true;
684 work->result = C2_CORRUPTED;
685 return;
686 }
687 ALOGV("outputSize filled : %d", outputSize);
688 ++mNumInputFrames;
689 CHECK(NULL == PVGetOverrunBuffer(mHandle));
690
691 fillEmptyWork(work);
692 if (outputSize) {
693 std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outputSize);
694 work->worklets.front()->output.ordinal.timestamp = inputTimeStamp;
695 if (hintTrack.CodeType == 0) {
696 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
697 0u /* stream id */, C2Config::SYNC_FRAME));
698 }
699 work->worklets.front()->output.buffers.push_back(buffer);
700 }
701 if (eos) {
702 mSignalledOutputEos = true;
703 }
704
705 mConversionBuffersInUse.erase(yPlane);
706 }
707
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)708 c2_status_t C2SoftMpeg4Enc::drain(
709 uint32_t drainMode,
710 const std::shared_ptr<C2BlockPool> &pool) {
711 (void)pool;
712 if (drainMode == NO_DRAIN) {
713 ALOGW("drain with NO_DRAIN: no-op");
714 return C2_OK;
715 }
716 if (drainMode == DRAIN_CHAIN) {
717 ALOGW("DRAIN_CHAIN not supported");
718 return C2_OMITTED;
719 }
720
721 return C2_OK;
722 }
723
724 class C2SoftMpeg4EncFactory : public C2ComponentFactory {
725 public:
C2SoftMpeg4EncFactory()726 C2SoftMpeg4EncFactory()
727 : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
728 GetCodec2PlatformComponentStore()->getParamReflector())) {}
729
createComponent(c2_node_id_t id,std::shared_ptr<C2Component> * const component,std::function<void (C2Component *)> deleter)730 virtual c2_status_t createComponent(
731 c2_node_id_t id,
732 std::shared_ptr<C2Component>* const component,
733 std::function<void(C2Component*)> deleter) override {
734 *component = std::shared_ptr<C2Component>(
735 new C2SoftMpeg4Enc(
736 COMPONENT_NAME, id,
737 std::make_shared<C2SoftMpeg4Enc::IntfImpl>(mHelper)),
738 deleter);
739 return C2_OK;
740 }
741
createInterface(c2_node_id_t id,std::shared_ptr<C2ComponentInterface> * const interface,std::function<void (C2ComponentInterface *)> deleter)742 virtual c2_status_t createInterface(
743 c2_node_id_t id,
744 std::shared_ptr<C2ComponentInterface>* const interface,
745 std::function<void(C2ComponentInterface*)> deleter) override {
746 *interface = std::shared_ptr<C2ComponentInterface>(
747 new SimpleInterface<C2SoftMpeg4Enc::IntfImpl>(
748 COMPONENT_NAME, id,
749 std::make_shared<C2SoftMpeg4Enc::IntfImpl>(mHelper)),
750 deleter);
751 return C2_OK;
752 }
753
754 virtual ~C2SoftMpeg4EncFactory() override = default;
755
756 private:
757 std::shared_ptr<C2ReflectorHelper> mHelper;
758 };
759
760 } // namespace android
761
762 __attribute__((cfi_canonical_jump_table))
CreateCodec2Factory()763 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
764 ALOGV("in %s", __func__);
765 return new ::android::C2SoftMpeg4EncFactory();
766 }
767
768 __attribute__((cfi_canonical_jump_table))
DestroyCodec2Factory(::C2ComponentFactory * factory)769 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
770 ALOGV("in %s", __func__);
771 delete factory;
772 }
773