• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2023 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file
4 
5 //#define LOG_NDEBUG 0
6 #define LOG_TAG "EncodeComponent"
7 
8 #include <v4l2_codec2/components/EncodeComponent.h>
9 
10 #include <inttypes.h>
11 
12 #include <algorithm>
13 #include <utility>
14 
15 #include <C2AllocatorGralloc.h>
16 #include <C2PlatformSupport.h>
17 #include <C2Work.h>
18 #include <android/hardware/graphics/common/1.0/types.h>
19 #include <base/bind.h>
20 #include <base/bind_helpers.h>
21 #include <log/log.h>
22 #include <media/stagefright/MediaDefs.h>
23 #include <ui/GraphicBuffer.h>
24 #include <ui/Size.h>
25 
26 #include <v4l2_codec2/common/EncodeHelpers.h>
27 #include <v4l2_codec2/common/FormatConverter.h>
28 #include <v4l2_codec2/components/BitstreamBuffer.h>
29 #include <v4l2_codec2/components/EncodeInterface.h>
30 #include <v4l2_codec2/components/VideoEncoder.h>
31 
32 using android::hardware::graphics::common::V1_0::BufferUsage;
33 
34 namespace android {
35 
36 namespace {
37 // Create an input frame from the specified graphic block.
createInputFrame(const C2ConstGraphicBlock & block,VideoPixelFormat format,const std::vector<VideoFramePlane> & planes,uint64_t index,int64_t timestamp)38 std::unique_ptr<VideoEncoder::InputFrame> createInputFrame(
39         const C2ConstGraphicBlock& block, VideoPixelFormat format,
40         const std::vector<VideoFramePlane>& planes, uint64_t index, int64_t timestamp) {
41     std::vector<int> fds;
42     const C2Handle* const handle = block.handle();
43     for (int i = 0; i < handle->numFds; i++) {
44         fds.emplace_back(handle->data[i]);
45     }
46 
47     return std::make_unique<VideoEncoder::InputFrame>(std::move(fds), planes, format, index,
48                                                       timestamp);
49 }
50 }  // namespace
51 
52 // Get the video frame layout from the specified |inputBlock|.
53 // TODO(dstaessens): Clean up code extracting layout from a C2GraphicBlock.
getVideoFrameLayout(const C2ConstGraphicBlock & block,VideoPixelFormat * format)54 std::optional<std::vector<VideoFramePlane>> getVideoFrameLayout(const C2ConstGraphicBlock& block,
55                                                                 VideoPixelFormat* format) {
56     ALOGV("%s()", __func__);
57 
58     // Get the C2PlanarLayout from the graphics block. The C2GraphicView returned by block.map()
59     // needs to be released before calling getGraphicBlockInfo(), or the lockYCbCr() call will block
60     // Indefinitely.
61     C2PlanarLayout layout = block.map().get().layout();
62 
63     // The above layout() cannot fill layout information and memset 0 instead if the input format is
64     // IMPLEMENTATION_DEFINED and its backed format is RGB. We fill the layout by using
65     // ImplDefinedToRGBXMap in the case.
66     if (layout.type == C2PlanarLayout::TYPE_UNKNOWN) {
67         std::unique_ptr<ImplDefinedToRGBXMap> idMap = ImplDefinedToRGBXMap::create(block);
68         if (idMap == nullptr) {
69             ALOGE("Unable to parse RGBX_8888 from IMPLEMENTATION_DEFINED");
70             return std::nullopt;
71         }
72         layout.type = C2PlanarLayout::TYPE_RGB;
73         // These parameters would be used in TYPE_GRB case below.
74         layout.numPlanes = 3;   // same value as in C2AllocationGralloc::map()
75         layout.rootPlanes = 1;  // same value as in C2AllocationGralloc::map()
76         layout.planes[C2PlanarLayout::PLANE_R].offset = idMap->offset();
77         layout.planes[C2PlanarLayout::PLANE_R].rowInc = idMap->rowInc();
78     }
79 
80     std::vector<uint32_t> offsets(layout.numPlanes, 0u);
81     std::vector<uint32_t> strides(layout.numPlanes, 0u);
82     switch (layout.type) {
83     case C2PlanarLayout::TYPE_YUV: {
84         android_ycbcr ycbcr = getGraphicBlockInfo(block);
85         offsets[C2PlanarLayout::PLANE_Y] =
86                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.y));
87         offsets[C2PlanarLayout::PLANE_U] =
88                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.cb));
89         offsets[C2PlanarLayout::PLANE_V] =
90                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.cr));
91         strides[C2PlanarLayout::PLANE_Y] = static_cast<uint32_t>(ycbcr.ystride);
92         strides[C2PlanarLayout::PLANE_U] = static_cast<uint32_t>(ycbcr.cstride);
93         strides[C2PlanarLayout::PLANE_V] = static_cast<uint32_t>(ycbcr.cstride);
94 
95         bool crcb = false;
96         if (offsets[C2PlanarLayout::PLANE_U] > offsets[C2PlanarLayout::PLANE_V]) {
97             // Swap offsets, no need to swap strides as they are identical for both chroma planes.
98             std::swap(offsets[C2PlanarLayout::PLANE_U], offsets[C2PlanarLayout::PLANE_V]);
99             crcb = true;
100         }
101 
102         bool semiplanar = false;
103         if (ycbcr.chroma_step >
104             offsets[C2PlanarLayout::PLANE_V] - offsets[C2PlanarLayout::PLANE_U]) {
105             semiplanar = true;
106         }
107 
108         if (!crcb && !semiplanar) {
109             *format = VideoPixelFormat::I420;
110         } else if (!crcb && semiplanar) {
111             *format = VideoPixelFormat::NV12;
112         } else if (crcb && !semiplanar) {
113             // HACK: pretend YV12 is I420 now since VEA only accepts I420. (YV12 will be used
114             //       for input byte-buffer mode).
115             // TODO(dstaessens): Is this hack still necessary now we're not using the VEA directly?
116             //format = VideoPixelFormat::YV12;
117             *format = VideoPixelFormat::I420;
118         } else {
119             *format = VideoPixelFormat::NV21;
120         }
121         break;
122     }
123     case C2PlanarLayout::TYPE_RGB: {
124         offsets[C2PlanarLayout::PLANE_R] = layout.planes[C2PlanarLayout::PLANE_R].offset;
125         strides[C2PlanarLayout::PLANE_R] =
126                 static_cast<uint32_t>(layout.planes[C2PlanarLayout::PLANE_R].rowInc);
127         *format = VideoPixelFormat::ARGB;
128         break;
129     }
130     default:
131         ALOGW("Unknown layout type: %u", static_cast<uint32_t>(layout.type));
132         return std::nullopt;
133     }
134 
135     std::vector<VideoFramePlane> planes;
136     for (uint32_t i = 0; i < layout.rootPlanes; ++i) {
137         // The mSize field is not used in our case, so we can safely set it to zero.
138         planes.push_back({strides[i], offsets[i], 0});
139     }
140     return planes;
141 }
142 
143 // Get the video frame stride for the specified |format| and |size|.
getVideoFrameStride(VideoPixelFormat format,ui::Size size)144 std::optional<uint32_t> getVideoFrameStride(VideoPixelFormat format, ui::Size size) {
145     // Fetch a graphic block from the pool to determine the stride.
146     std::shared_ptr<C2BlockPool> pool;
147     c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool);
148     if (status != C2_OK) {
149         ALOGE("Failed to get basic graphic block pool (err=%d)", status);
150         return std::nullopt;
151     }
152 
153     // Android HAL format doesn't have I420, we use YV12 instead and swap the U and V planes when
154     // converting to NV12. YCBCR_420_888 will allocate NV12 by minigbm.
155     HalPixelFormat halFormat = (format == VideoPixelFormat::I420) ? HalPixelFormat::YV12
156                                                                   : HalPixelFormat::YCBCR_420_888;
157 
158     std::shared_ptr<C2GraphicBlock> block;
159     status = pool->fetchGraphicBlock(size.width, size.height, static_cast<uint32_t>(halFormat),
160                                      C2MemoryUsage(C2MemoryUsage::CPU_READ), &block);
161     if (status != C2_OK) {
162         ALOGE("Failed to fetch graphic block (err=%d)", status);
163         return std::nullopt;
164     }
165 
166     const C2ConstGraphicBlock constBlock = block->share(C2Rect(size.width, size.height), C2Fence());
167     VideoPixelFormat pixelFormat;
168     std::optional<std::vector<VideoFramePlane>> planes =
169             getVideoFrameLayout(constBlock, &pixelFormat);
170     if (!planes || planes.value().empty()) {
171         ALOGE("Failed to get video frame layout from block");
172         return std::nullopt;
173     }
174 
175     return planes.value()[0].mStride;
176 }
177 
EncodeComponent(C2String name,c2_node_id_t id,std::shared_ptr<EncodeInterface> interface)178 EncodeComponent::EncodeComponent(C2String name, c2_node_id_t id,
179                                  std::shared_ptr<EncodeInterface> interface)
180       : mName(name),
181         mId(id),
182         mInterface(std::move(interface)),
183         mComponentState(ComponentState::LOADED) {
184     ALOGV("%s(%s)", __func__, name.c_str());
185 }
186 
~EncodeComponent()187 EncodeComponent::~EncodeComponent() {
188     ALOGV("%s()", __func__);
189 
190     // Stop encoder thread and invalidate pointers if component wasn't stopped before destroying.
191     if (mEncoderThread.IsRunning() && !mEncoderTaskRunner->RunsTasksInCurrentSequence()) {
192         mEncoderTaskRunner->PostTask(
193                 FROM_HERE, ::base::BindOnce(
194                                    [](::base::WeakPtrFactory<EncodeComponent>* weakPtrFactory,
195                                       std::unique_ptr<VideoEncoder>* encoder) {
196                                        weakPtrFactory->InvalidateWeakPtrs();
197                                        encoder->reset();
198                                    },
199                                    &mWeakThisFactory, &mEncoder));
200         mEncoderThread.Stop();
201     }
202 
203     ALOGV("%s(): done", __func__);
204 }
205 
start()206 c2_status_t EncodeComponent::start() {
207     ALOGV("%s()", __func__);
208 
209     // Lock while starting, to synchronize start/stop/reset/release calls.
210     std::lock_guard<std::mutex> lock(mComponentLock);
211 
212     // According to the specification start() should only be called in the LOADED state.
213     if (mComponentState != ComponentState::LOADED) {
214         return C2_BAD_STATE;
215     }
216 
217     if (!mEncoderThread.Start()) {
218         ALOGE("Failed to start encoder thread");
219         return C2_CORRUPTED;
220     }
221     mEncoderTaskRunner = mEncoderThread.task_runner();
222     mWeakThis = mWeakThisFactory.GetWeakPtr();
223 
224     // Initialize the encoder on the encoder thread.
225     ::base::WaitableEvent done;
226     bool success = false;
227     mEncoderTaskRunner->PostTask(
228             FROM_HERE, ::base::Bind(&EncodeComponent::startTask, mWeakThis, &success, &done));
229     done.Wait();
230 
231     if (!success) {
232         ALOGE("Failed to initialize encoder");
233         return C2_CORRUPTED;
234     }
235 
236     setComponentState(ComponentState::RUNNING);
237     return C2_OK;
238 }
239 
stop()240 c2_status_t EncodeComponent::stop() {
241     ALOGV("%s()", __func__);
242 
243     // Lock while stopping, to synchronize start/stop/reset/release calls.
244     std::lock_guard<std::mutex> lock(mComponentLock);
245 
246     if (mComponentState != ComponentState::RUNNING && mComponentState != ComponentState::ERROR) {
247         return C2_BAD_STATE;
248     }
249 
250     // Return immediately if the component is already stopped.
251     if (!mEncoderThread.IsRunning()) {
252         return C2_OK;
253     }
254 
255     // Wait for the component to stop.
256     ::base::WaitableEvent done;
257     mEncoderTaskRunner->PostTask(FROM_HERE,
258                                  ::base::BindOnce(&EncodeComponent::stopTask, mWeakThis, &done));
259     done.Wait();
260     mEncoderThread.Stop();
261 
262     setComponentState(ComponentState::LOADED);
263 
264     ALOGV("%s() - done", __func__);
265     return C2_OK;
266 }
267 
reset()268 c2_status_t EncodeComponent::reset() {
269     ALOGV("%s()", __func__);
270 
271     // The interface specification says: "This method MUST be supported in all (including tripped)
272     // states other than released".
273     if (mComponentState == ComponentState::UNLOADED) {
274         return C2_BAD_STATE;
275     }
276 
277     // TODO(dstaessens): Reset the component's interface to default values.
278     stop();
279 
280     return C2_OK;
281 }
282 
release()283 c2_status_t EncodeComponent::release() {
284     ALOGV("%s()", __func__);
285 
286     // The interface specification says: "This method MUST be supported in stopped state.", but the
287     // release method seems to be called in other states as well.
288     reset();
289 
290     setComponentState(ComponentState::UNLOADED);
291     return C2_OK;
292 }
293 
queue_nb(std::list<std::unique_ptr<C2Work>> * const items)294 c2_status_t EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
295     ALOGV("%s()", __func__);
296 
297     if (mComponentState != ComponentState::RUNNING) {
298         ALOGE("Trying to queue work item while component is not running");
299         return C2_BAD_STATE;
300     }
301 
302     while (!items->empty()) {
303         mEncoderTaskRunner->PostTask(FROM_HERE,
304                                      ::base::BindOnce(&EncodeComponent::queueTask, mWeakThis,
305                                                       std::move(items->front())));
306         items->pop_front();
307     }
308 
309     return C2_OK;
310 }
311 
drain_nb(drain_mode_t mode)312 c2_status_t EncodeComponent::drain_nb(drain_mode_t mode) {
313     ALOGV("%s()", __func__);
314 
315     if (mode == DRAIN_CHAIN) {
316         return C2_OMITTED;  // Tunneling is not supported for now.
317     }
318 
319     if (mComponentState != ComponentState::RUNNING) {
320         return C2_BAD_STATE;
321     }
322 
323     mEncoderTaskRunner->PostTask(FROM_HERE,
324                                  ::base::BindOnce(&EncodeComponent::drainTask, mWeakThis, mode));
325     return C2_OK;
326 }
327 
flush_sm(flush_mode_t mode,std::list<std::unique_ptr<C2Work>> * const flushedWork)328 c2_status_t EncodeComponent::flush_sm(flush_mode_t mode,
329                                       std::list<std::unique_ptr<C2Work>>* const flushedWork) {
330     ALOGV("%s()", __func__);
331 
332     if (mode != FLUSH_COMPONENT) {
333         return C2_OMITTED;  // Tunneling is not supported by now
334     }
335 
336     if (mComponentState != ComponentState::RUNNING) {
337         return C2_BAD_STATE;
338     }
339 
340     // Work that can be immediately discarded should be returned in |flushedWork|. This method may
341     // be momentarily blocking but must return within 5ms, which should give us enough time to
342     // immediately abandon all non-started work on the encoder thread. We can return all work that
343     // can't be immediately discarded using onWorkDone() later.
344     ::base::WaitableEvent done;
345     mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&EncodeComponent::flushTask, mWeakThis,
346                                                              &done, flushedWork));
347     done.Wait();
348 
349     return C2_OK;
350 }
351 
announce_nb(const std::vector<C2WorkOutline> & items)352 c2_status_t EncodeComponent::announce_nb(const std::vector<C2WorkOutline>& items) {
353     return C2_OMITTED;  // Tunneling is not supported by now
354 }
355 
setListener_vb(const std::shared_ptr<Listener> & listener,c2_blocking_t mayBlock)356 c2_status_t EncodeComponent::setListener_vb(const std::shared_ptr<Listener>& listener,
357                                             c2_blocking_t mayBlock) {
358     ALOG_ASSERT(mComponentState != ComponentState::UNLOADED);
359 
360     // Lock so we're sure the component isn't currently starting or stopping.
361     std::lock_guard<std::mutex> lock(mComponentLock);
362 
363     // If the encoder thread is not running it's safe to update the listener directly.
364     if (!mEncoderThread.IsRunning()) {
365         mListener = listener;
366         return C2_OK;
367     }
368 
369     // The listener should be updated before exiting this function. If called while the component is
370     // currently running we should be allowed to block, as we can only change the listener on the
371     // encoder thread.
372     ALOG_ASSERT(mayBlock == c2_blocking_t::C2_MAY_BLOCK);
373 
374     ::base::WaitableEvent done;
375     mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&EncodeComponent::setListenerTask,
376                                                              mWeakThis, listener, &done));
377     done.Wait();
378 
379     return C2_OK;
380 }
381 
intf()382 std::shared_ptr<C2ComponentInterface> EncodeComponent::intf() {
383     return std::make_shared<SimpleInterface<EncodeInterface>>(mName.c_str(), mId, mInterface);
384 }
385 
startTask(bool * success,::base::WaitableEvent * done)386 void EncodeComponent::startTask(bool* success, ::base::WaitableEvent* done) {
387     ALOGV("%s()", __func__);
388     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
389 
390     *success = initializeEncoder();
391     done->Signal();
392 }
393 
stopTask(::base::WaitableEvent * done)394 void EncodeComponent::stopTask(::base::WaitableEvent* done) {
395     ALOGV("%s()", __func__);
396     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
397 
398     // Flushing the encoder will abort all pending work.
399     flush();
400 
401     mInputFormatConverter.reset();
402     mInputPixelFormat = VideoPixelFormat::UNKNOWN;
403     mInputLayout.clear();
404 
405     mEncoder.reset();
406     mOutputBlockPool.reset();
407 
408     // Invalidate all weak pointers so no more functions will be executed on the encoder thread.
409     mWeakThisFactory.InvalidateWeakPtrs();
410 
411     done->Signal();
412 }
413 
queueTask(std::unique_ptr<C2Work> work)414 void EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
415     ALOGV("%s()", __func__);
416     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
417     ALOG_ASSERT(mEncoder);
418 
419     // Currently only a single worklet per work item is supported. An input buffer should always be
420     // supplied unless this is a drain or CSD request.
421     ALOG_ASSERT(work->input.buffers.size() <= 1u && work->worklets.size() == 1u);
422 
423     // Set the default values for the output worklet.
424     work->worklets.front()->output.flags = static_cast<C2FrameData::flags_t>(0);
425     work->worklets.front()->output.buffers.clear();
426     work->worklets.front()->output.ordinal = work->input.ordinal;
427 
428     uint64_t index = work->input.ordinal.frameIndex.peeku();
429     int64_t timestamp = static_cast<int64_t>(work->input.ordinal.timestamp.peeku());
430     bool endOfStream = work->input.flags & C2FrameData::FLAG_END_OF_STREAM;
431     ALOGV("Queuing next encode (index: %" PRIu64 ", timestamp: %" PRId64 ", EOS: %d)", index,
432           timestamp, endOfStream);
433 
434     // The codec 2.0 framework might queue an empty CSD request, but this is currently not
435     // supported. We will return the CSD with the first encoded buffer work.
436     if (work->input.buffers.empty() && !endOfStream) {
437         ALOGV("Discarding empty CSD request");
438         reportWork(std::move(work));
439         return;
440     }
441 
442     // By the time we get an input buffer, the output block pool should be configured.
443     if (!mOutputBlockPool && !getBlockPool()) {
444         reportError(C2_CORRUPTED);
445         return;
446     }
447 
448     // If this is the first input frame, create an input format converter if the V4L2 device doesn't
449     // support the requested input format.
450     if ((mInputPixelFormat == VideoPixelFormat::UNKNOWN) && !work->input.buffers.empty()) {
451         VideoPixelFormat format = VideoPixelFormat::UNKNOWN;
452         if (!getVideoFrameLayout(work->input.buffers.front()->data().graphicBlocks().front(),
453                                  &format)) {
454             ALOGE("Failed to get input block's layout");
455             reportError(C2_CORRUPTED);
456             return;
457         }
458         if (mEncoder->inputFormat() != format) {
459             ALOG_ASSERT(!mInputFormatConverter);
460             ALOGV("Creating input format convertor (%s)",
461                   videoPixelFormatToString(mEncoder->inputFormat()).c_str());
462             mInputFormatConverter =
463                     FormatConverter::create(mEncoder->inputFormat(), mEncoder->visibleSize(),
464                                             VideoEncoder::kInputBufferCount, mEncoder->codedSize());
465             if (!mInputFormatConverter) {
466                 ALOGE("Failed to created input format convertor");
467                 reportError(C2_CORRUPTED);
468                 return;
469             }
470         }
471     }
472 
473     // If conversion is required but no free buffers are available we queue the work item.
474     if (mInputFormatConverter && !mInputFormatConverter->isReady()) {
475         ALOGV("Input format convertor ran out of buffers");
476         mInputConverterQueue.push(std::move(work));
477         return;
478     }
479 
480     // If we have data to encode send it to the encoder. If conversion is required we will first
481     // convert the data to the requested pixel format.
482     if (!work->input.buffers.empty()) {
483         C2ConstGraphicBlock inputBlock =
484                 work->input.buffers.front()->data().graphicBlocks().front();
485         if (mInputFormatConverter) {
486             ALOGV("Converting input block (index: %" PRIu64 ")", index);
487             c2_status_t status =
488                     mInputFormatConverter->convertBlock(index, inputBlock, &inputBlock);
489             if (status != C2_OK) {
490                 ALOGE("Failed to convert input block (index: %" PRIu64 ")", index);
491                 reportError(status);
492                 return;
493             }
494         } else {
495             // Android encoder framework reuses the same gpu buffers as
496             // inputs and doesn't call lock/unlock explicitly between writes.
497             // If there is format conversion, this is fine since we will
498             // read back what we've written first and then put it in another
499             // buffer. Whenever there is no format conversion, this causes
500             // sync issue on ARCVM since host side buffers never get updated.
501             // Fix this by explicitly calling lock/unlock before sending buffer
502             // to encoder.
503             const C2Handle* handle = inputBlock.handle();
504             uint32_t width, height, format, stride, generation, igbpSlot;
505             uint64_t usage, igbpId;
506             _UnwrapNativeCodec2GrallocMetadata(handle, &width, &height, &format, &usage, &stride,
507                                                &generation, &igbpId, &igbpSlot);
508             do {
509                 if (!(usage & GRALLOC_USAGE_SW_WRITE_MASK)) break;
510                 native_handle_t* gralloc_handle = UnwrapNativeCodec2GrallocHandle(handle);
511                 if (nullptr == gralloc_handle) break;
512                 sp<GraphicBuffer> buffer =
513                         new GraphicBuffer(gralloc_handle, GraphicBuffer::CLONE_HANDLE, width,
514                                           height, format, 1, usage, stride);
515                 native_handle_delete(gralloc_handle);
516                 void* pixels;
517                 if (buffer->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, &pixels)) break;
518                 buffer->unlock();
519             } while (0);
520         }
521         if (!encode(inputBlock, index, timestamp)) {
522             return;
523         }
524     }
525 
526     mWorkQueue.push_back(std::move(work));
527     if (endOfStream) {
528         mEncoder->drain();
529     }
530 }
531 
drainTask(drain_mode_t)532 void EncodeComponent::drainTask(drain_mode_t /*drainMode*/) {
533     ALOGV("%s()", __func__);
534     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
535 
536     // We can only start draining if all work has been queued in the encoder, so we mark the last
537     // item waiting for conversion as EOS if required.
538     if (!mInputConverterQueue.empty()) {
539         C2Work* work = mInputConverterQueue.back().get();
540         work->input.flags = static_cast<C2FrameData::flags_t>(work->input.flags |
541                                                               C2FrameData::FLAG_END_OF_STREAM);
542         return;
543     }
544 
545     // Mark the last item in the output work queue as EOS, so we will only report it as finished
546     // after draining has completed.
547     if (!mWorkQueue.empty()) {
548         ALOGV("Starting drain and marking last item in output work queue as EOS");
549         C2Work* work = mWorkQueue.back().get();
550         work->input.flags = static_cast<C2FrameData::flags_t>(work->input.flags |
551                                                               C2FrameData::FLAG_END_OF_STREAM);
552         mEncoder->drain();
553     }
554 }
555 
onDrainDone(bool success)556 void EncodeComponent::onDrainDone(bool success) {
557     ALOGV("%s()", __func__);
558     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
559     ALOG_ASSERT(!mWorkQueue.empty());
560 
561     if (!success) {
562         ALOGE("draining the encoder failed");
563         reportError(C2_CORRUPTED);
564         return;
565     }
566 
567     // Find the first work item marked as EOS. This might not be the first item in the queue, as
568     // previous buffers in the queue might still be waiting for their associated input buffers.
569     auto it = std::find_if(
570             mWorkQueue.cbegin(), mWorkQueue.cend(), [](const std::unique_ptr<C2Work>& work) {
571                 return ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
572                         !(work->worklets.back()->output.flags & C2FrameData::FLAG_END_OF_STREAM));
573             });
574     if (it == mWorkQueue.end()) {
575         ALOGW("No EOS work item found in queue");
576         return;
577     }
578 
579     // Mark the item in the output work queue as EOS done.
580     C2Work* eosWork = it->get();
581     eosWork->worklets.back()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
582 
583     // Draining is done which means all buffers on the device output queue have been returned, but
584     // not all buffers on the device input queue might have been returned yet.
585     if ((eosWork != mWorkQueue.front().get()) || !isWorkDone(*eosWork)) {
586         ALOGV("Draining done, waiting for input buffers to be returned");
587         return;
588     }
589 
590     ALOGV("Draining done");
591     reportWork(std::move(mWorkQueue.front()));
592     mWorkQueue.pop_front();
593 }
594 
flushTask(::base::WaitableEvent * done,std::list<std::unique_ptr<C2Work>> * const flushedWork)595 void EncodeComponent::flushTask(::base::WaitableEvent* done,
596                                 std::list<std::unique_ptr<C2Work>>* const flushedWork) {
597     ALOGV("%s()", __func__);
598     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
599 
600     // Move all work that can immediately be aborted to flushedWork, and notify the caller.
601     if (flushedWork) {
602         while (!mInputConverterQueue.empty()) {
603             std::unique_ptr<C2Work> work = std::move(mInputConverterQueue.front());
604             work->input.buffers.clear();
605             flushedWork->push_back(std::move(work));
606             mInputConverterQueue.pop();
607         }
608     }
609     done->Signal();
610 
611     flush();
612 }
613 
setListenerTask(const std::shared_ptr<Listener> & listener,::base::WaitableEvent * done)614 void EncodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
615                                       ::base::WaitableEvent* done) {
616     ALOGV("%s()", __func__);
617     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
618 
619     mListener = listener;
620     done->Signal();
621 }
622 
updateEncodingParameters()623 bool EncodeComponent::updateEncodingParameters() {
624     ALOGV("%s()", __func__);
625     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
626 
627     // Ask device to change bitrate if it's different from the currently configured bitrate. The C2
628     // framework doesn't offer a parameter to configure the peak bitrate, so we'll use a multiple of
629     // the target bitrate here. The peak bitrate is only used if the bitrate mode is set to VBR.
630     uint32_t bitrate = mInterface->getBitrate();
631     if (mBitrate != bitrate) {
632         ALOG_ASSERT(bitrate > 0u);
633         ALOGV("Setting bitrate to %u", bitrate);
634         if (!mEncoder->setBitrate(bitrate)) {
635             reportError(C2_CORRUPTED);
636             return false;
637         }
638         mBitrate = bitrate;
639 
640         if (mBitrateMode == C2Config::BITRATE_VARIABLE) {
641             ALOGV("Setting peak bitrate to %u", bitrate * VideoEncoder::kPeakBitrateMultiplier);
642             // TODO(b/190336806): Our stack doesn't support dynamic peak bitrate changes yet, ignore
643             // errors for now.
644             mEncoder->setPeakBitrate(bitrate * VideoEncoder::kPeakBitrateMultiplier);
645         }
646     }
647 
648     // Ask device to change framerate if it's different from the currently configured framerate.
649     uint32_t framerate = static_cast<uint32_t>(std::round(mInterface->getFramerate()));
650     if (mFramerate != framerate) {
651         ALOG_ASSERT(framerate > 0u);
652         ALOGV("Setting framerate to %u", framerate);
653         if (!mEncoder->setFramerate(framerate)) {
654             ALOGE("Requesting framerate change failed");
655             reportError(C2_CORRUPTED);
656             return false;
657         }
658         mFramerate = framerate;
659     }
660 
661     // Check whether an explicit key frame was requested, if so reset the key frame counter to
662     // immediately request a key frame.
663     C2StreamRequestSyncFrameTuning::output requestKeyFrame;
664     c2_status_t status = mInterface->query({&requestKeyFrame}, {}, C2_DONT_BLOCK, nullptr);
665     if (status != C2_OK) {
666         ALOGE("Failed to query interface for key frame request (error code: %d)", status);
667         reportError(status);
668         return false;
669     }
670     if (requestKeyFrame.value == C2_TRUE) {
671         mEncoder->requestKeyframe();
672         requestKeyFrame.value = C2_FALSE;
673         std::vector<std::unique_ptr<C2SettingResult>> failures;
674         status = mInterface->config({&requestKeyFrame}, C2_MAY_BLOCK, &failures);
675         if (status != C2_OK) {
676             ALOGE("Failed to reset key frame request on interface (error code: %d)", status);
677             reportError(status);
678             return false;
679         }
680     }
681 
682     return true;
683 }
684 
encode(C2ConstGraphicBlock block,uint64_t index,int64_t timestamp)685 bool EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int64_t timestamp) {
686     ALOGV("%s()", __func__);
687     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
688     ALOG_ASSERT(mEncoder);
689 
690     ALOGV("Encoding input block (index: %" PRIu64 ", timestamp: %" PRId64 ", size: %dx%d)", index,
691           timestamp, block.width(), block.height());
692 
693     // If this is the first input frame, determine the pixel format and layout.
694     if (mInputPixelFormat == VideoPixelFormat::UNKNOWN) {
695         ALOG_ASSERT(mInputLayout.empty());
696         VideoPixelFormat format = VideoPixelFormat::UNKNOWN;
697         std::optional<std::vector<VideoFramePlane>> inputLayout =
698                 getVideoFrameLayout(block, &format);
699         if (!inputLayout) {
700             ALOGE("Failed to get input block's layout");
701             reportError(C2_CORRUPTED);
702             return false;
703         }
704         mInputPixelFormat = format;
705         mInputLayout = std::move(*inputLayout);
706     }
707 
708     // Dynamically adjust framerate based on the frame's timestamp if required.
709     constexpr int64_t kMaxFramerateDiff = 5;
710     if (mLastFrameTime && (timestamp > *mLastFrameTime)) {
711         int64_t newFramerate = std::max(
712                 static_cast<int64_t>(std::round(1000000.0 / (timestamp - *mLastFrameTime))),
713                 static_cast<int64_t>(1LL));
714         if (abs(mFramerate - newFramerate) > kMaxFramerateDiff) {
715             ALOGV("Adjusting framerate to %" PRId64 " based on frame timestamps", newFramerate);
716             mInterface->setFramerate(static_cast<uint32_t>(newFramerate));
717         }
718     }
719     mLastFrameTime = timestamp;
720 
721     // Update dynamic encoding parameters (bitrate, framerate, key frame) if requested.
722     if (!updateEncodingParameters()) return false;
723 
724     // Create an input frame from the graphic block.
725     std::unique_ptr<VideoEncoder::InputFrame> frame =
726             createInputFrame(block, mInputPixelFormat, mInputLayout, index, timestamp);
727     if (!frame) {
728         ALOGE("Failed to create video frame from input block (index: %" PRIu64
729               ", timestamp: %" PRId64 ")",
730               index, timestamp);
731         reportError(C2_CORRUPTED);
732         return false;
733     }
734 
735     if (!mEncoder->encode(std::move(frame))) {
736         return false;
737     }
738 
739     return true;
740 }
741 
flush()742 void EncodeComponent::flush() {
743     ALOGV("%s()", __func__);
744     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
745 
746     mEncoder->flush();
747 
748     // Report all queued work items as aborted.
749     std::list<std::unique_ptr<C2Work>> abortedWorkItems;
750     while (!mInputConverterQueue.empty()) {
751         std::unique_ptr<C2Work> work = std::move(mInputConverterQueue.front());
752         work->result = C2_NOT_FOUND;
753         work->input.buffers.clear();
754         abortedWorkItems.push_back(std::move(work));
755         mInputConverterQueue.pop();
756     }
757     while (!mWorkQueue.empty()) {
758         std::unique_ptr<C2Work> work = std::move(mWorkQueue.front());
759         // Return buffer to the input format convertor if required.
760         if (mInputFormatConverter && work->input.buffers.empty()) {
761             mInputFormatConverter->returnBlock(work->input.ordinal.frameIndex.peeku());
762         }
763         work->result = C2_NOT_FOUND;
764         work->input.buffers.clear();
765         abortedWorkItems.push_back(std::move(work));
766         mWorkQueue.pop_front();
767     }
768     if (!abortedWorkItems.empty()) {
769         mListener->onWorkDone_nb(weak_from_this(), std::move(abortedWorkItems));
770     }
771 }
772 
fetchOutputBlock(uint32_t size,std::unique_ptr<BitstreamBuffer> * buffer)773 void EncodeComponent::fetchOutputBlock(uint32_t size, std::unique_ptr<BitstreamBuffer>* buffer) {
774     ALOGV("Fetching linear block (size: %u)", size);
775     std::shared_ptr<C2LinearBlock> block;
776     c2_status_t status = mOutputBlockPool->fetchLinearBlock(
777             size,
778             C2MemoryUsage(C2MemoryUsage::CPU_READ |
779                           static_cast<uint64_t>(BufferUsage::VIDEO_ENCODER)),
780             &block);
781     if (status != C2_OK) {
782         ALOGE("Failed to fetch linear block (error: %d)", status);
783         reportError(status);
784     }
785 
786     *buffer = std::make_unique<BitstreamBuffer>(std::move(block), 0, size);
787 }
788 
onInputBufferDone(uint64_t index)789 void EncodeComponent::onInputBufferDone(uint64_t index) {
790     ALOGV("%s(): Input buffer done (index: %" PRIu64 ")", __func__, index);
791     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
792     ALOG_ASSERT(mEncoder);
793 
794     // There are no guarantees the input buffers are returned in order, so we need to find the work
795     // item which this buffer belongs to.
796     C2Work* work = getWorkByIndex(index);
797     if (!work) {
798         ALOGE("Failed to find work associated with input buffer %" PRIu64, index);
799         reportError(C2_CORRUPTED);
800         return;
801     }
802 
803     // We're done using the input block, release reference to return the block to the client.
804     LOG_ASSERT(!work->input.buffers.empty());
805     work->input.buffers.front().reset();
806 
807     // Return the block to the convertor if required. If we have buffers awaiting conversion, we can
808     // now attempt to convert and encode them again.
809     if (mInputFormatConverter) {
810         c2_status_t status = mInputFormatConverter->returnBlock(index);
811         if (status != C2_OK) {
812             reportError(status);
813             return;
814         }
815         while (!mInputConverterQueue.empty() && mInputFormatConverter->isReady()) {
816             std::unique_ptr<C2Work> work = std::move(mInputConverterQueue.front());
817             mInputConverterQueue.pop();
818             queueTask(std::move(work));
819         }
820     }
821 
822     // Return all completed work items. The work item might have been waiting for it's input buffer
823     // to be returned, in which case we can report it as completed now. As input buffers are not
824     // necessarily returned in order we might be able to return multiple ready work items now.
825     while (!mWorkQueue.empty() && isWorkDone(*mWorkQueue.front())) {
826         reportWork(std::move(mWorkQueue.front()));
827         mWorkQueue.pop_front();
828     }
829 }
830 
onOutputBufferDone(size_t dataSize,int64_t timestamp,bool keyFrame,std::unique_ptr<BitstreamBuffer> buffer)831 void EncodeComponent::onOutputBufferDone(size_t dataSize, int64_t timestamp, bool keyFrame,
832                                          std::unique_ptr<BitstreamBuffer> buffer) {
833     ALOGV("%s(): output buffer done (timestamp: %" PRId64 ", size: %zu, keyframe: %d)", __func__,
834           timestamp, dataSize, keyFrame);
835     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
836     ALOG_ASSERT(buffer->dmabuf);
837 
838     C2ConstLinearBlock constBlock =
839             buffer->dmabuf->share(buffer->dmabuf->offset(), dataSize, C2Fence());
840 
841     // If no CSD (content-specific-data, e.g. SPS for H.264) has been submitted yet, we expect this
842     // output block to contain CSD. We only submit the CSD once, even if it's attached to each key
843     // frame.
844     if (mExtractCSD) {
845         ALOGV("No CSD submitted yet, extracting CSD");
846         std::unique_ptr<C2StreamInitDataInfo::output> csd;
847         C2ReadView view = constBlock.map().get();
848         if (!extractCSDInfo(&csd, view.data(), view.capacity())) {
849             ALOGE("Failed to extract CSD");
850             reportError(C2_CORRUPTED);
851             return;
852         }
853 
854         // Attach the CSD to the first item in our output work queue.
855         LOG_ASSERT(!mWorkQueue.empty());
856         C2Work* work = mWorkQueue.front().get();
857         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
858         mExtractCSD = false;
859     }
860 
861     // Get the work item associated with the timestamp.
862     C2Work* work = getWorkByTimestamp(timestamp);
863     if (!work) {
864         // It's possible we got an empty CSD request with timestamp 0, which we currently just
865         // discard.
866         if (timestamp != 0) {
867             reportError(C2_CORRUPTED);
868         }
869         return;
870     }
871 
872     std::shared_ptr<C2Buffer> linearBuffer = C2Buffer::CreateLinearBuffer(std::move(constBlock));
873     if (!linearBuffer) {
874         ALOGE("Failed to create linear buffer from block");
875         reportError(C2_CORRUPTED);
876         return;
877     }
878 
879     if (keyFrame) {
880         linearBuffer->setInfo(
881                 std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u, C2Config::SYNC_FRAME));
882     }
883     work->worklets.front()->output.buffers.emplace_back(std::move(linearBuffer));
884 
885     // We can report the work item as completed if its associated input buffer has also been
886     // released. As output buffers are not necessarily returned in order we might be able to return
887     // multiple ready work items now.
888     while (!mWorkQueue.empty() && isWorkDone(*mWorkQueue.front())) {
889         reportWork(std::move(mWorkQueue.front()));
890         mWorkQueue.pop_front();
891     }
892 }
893 
getWorkByIndex(uint64_t index)894 C2Work* EncodeComponent::getWorkByIndex(uint64_t index) {
895     ALOGV("%s(): getting work item (index: %" PRIu64 ")", __func__, index);
896     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
897 
898     auto it = std::find_if(mWorkQueue.begin(), mWorkQueue.end(),
899                            [index](const std::unique_ptr<C2Work>& w) {
900                                return w->input.ordinal.frameIndex.peeku() == index;
901                            });
902     if (it == mWorkQueue.end()) {
903         ALOGE("Failed to find work (index: %" PRIu64 ")", index);
904         return nullptr;
905     }
906     return it->get();
907 }
908 
getWorkByTimestamp(int64_t timestamp)909 C2Work* EncodeComponent::getWorkByTimestamp(int64_t timestamp) {
910     ALOGV("%s(): getting work item (timestamp: %" PRId64 ")", __func__, timestamp);
911     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
912     ALOG_ASSERT(timestamp >= 0);
913 
914     // Find the work with specified timestamp by looping over the output work queue. This should be
915     // very fast as the output work queue will never be longer then a few items. Ignore empty work
916     // items that are marked as EOS, as their timestamp might clash with other work items.
917     auto it = std::find_if(
918             mWorkQueue.begin(), mWorkQueue.end(), [timestamp](const std::unique_ptr<C2Work>& w) {
919                 return !(w->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
920                        w->input.ordinal.timestamp.peeku() == static_cast<uint64_t>(timestamp);
921             });
922     if (it == mWorkQueue.end()) {
923         ALOGE("Failed to find work (timestamp: %" PRIu64 ")", timestamp);
924         return nullptr;
925     }
926     return it->get();
927 }
928 
isWorkDone(const C2Work & work) const929 bool EncodeComponent::isWorkDone(const C2Work& work) const {
930     ALOGV("%s()", __func__);
931     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
932 
933     if ((work.input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
934         !(work.worklets.front()->output.flags & C2FrameData::FLAG_END_OF_STREAM)) {
935         ALOGV("Work item %" PRIu64 " is marked as EOS but draining has not finished yet",
936               work.input.ordinal.frameIndex.peeku());
937         return false;
938     }
939 
940     if (!work.input.buffers.empty() && work.input.buffers.front()) {
941         ALOGV("Input buffer associated with work item %" PRIu64 " not returned yet",
942               work.input.ordinal.frameIndex.peeku());
943         return false;
944     }
945 
946     // If the work item had an input buffer to be encoded, it should have an output buffer set.
947     if (!work.input.buffers.empty() && work.worklets.front()->output.buffers.empty()) {
948         ALOGV("Output buffer associated with work item %" PRIu64 " not returned yet",
949               work.input.ordinal.frameIndex.peeku());
950         return false;
951     }
952 
953     return true;
954 }
955 
reportWork(std::unique_ptr<C2Work> work)956 void EncodeComponent::reportWork(std::unique_ptr<C2Work> work) {
957     ALOG_ASSERT(work);
958     ALOGV("%s(): Reporting work item as finished (index: %llu, timestamp: %llu)", __func__,
959           work->input.ordinal.frameIndex.peekull(), work->input.ordinal.timestamp.peekull());
960     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
961 
962     work->result = C2_OK;
963     work->workletsProcessed = static_cast<uint32_t>(work->worklets.size());
964 
965     std::list<std::unique_ptr<C2Work>> finishedWorkList;
966     finishedWorkList.emplace_back(std::move(work));
967     mListener->onWorkDone_nb(weak_from_this(), std::move(finishedWorkList));
968 }
969 
getBlockPool()970 bool EncodeComponent::getBlockPool() {
971     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
972 
973     auto sharedThis = weak_from_this().lock();
974     if (!sharedThis) {
975         ALOGI("%s(): V4L2EncodeComponent instance is already destroyed", __func__);
976         return false;
977     }
978 
979     C2BlockPool::local_id_t poolId = mInterface->getBlockPoolId();
980     if (poolId == C2BlockPool::BASIC_LINEAR) {
981         ALOGW("Using unoptimized linear block pool");
982     }
983     c2_status_t status = GetCodec2BlockPool(poolId, std::move(sharedThis), &mOutputBlockPool);
984     if (status != C2_OK || !mOutputBlockPool) {
985         ALOGE("Failed to get output block pool, error: %d", status);
986         return false;
987     }
988     return true;
989 }
990 
reportError(c2_status_t error)991 void EncodeComponent::reportError(c2_status_t error) {
992     ALOGV("%s()", __func__);
993     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
994 
995     // TODO(dstaessens): Report all pending work items as finished upon failure.
996     std::lock_guard<std::mutex> lock(mComponentLock);
997     if (mComponentState != ComponentState::ERROR) {
998         setComponentState(ComponentState::ERROR);
999         mListener->onError_nb(weak_from_this(), static_cast<uint32_t>(error));
1000     }
1001 }
1002 
setComponentState(ComponentState state)1003 void EncodeComponent::setComponentState(ComponentState state) {
1004     // Check whether the state change is valid.
1005     switch (state) {
1006     case ComponentState::UNLOADED:
1007         ALOG_ASSERT(mComponentState == ComponentState::LOADED);
1008         break;
1009     case ComponentState::LOADED:
1010         ALOG_ASSERT(mComponentState == ComponentState::UNLOADED ||
1011                     mComponentState == ComponentState::RUNNING ||
1012                     mComponentState == ComponentState::ERROR);
1013         break;
1014     case ComponentState::RUNNING:
1015         ALOG_ASSERT(mComponentState == ComponentState::LOADED);
1016         break;
1017     case ComponentState::ERROR:
1018         break;
1019     }
1020 
1021     ALOGV("Changed component state from %s to %s", componentStateToString(mComponentState),
1022           componentStateToString(state));
1023     mComponentState = state;
1024 }
1025 
componentStateToString(EncodeComponent::ComponentState state)1026 const char* EncodeComponent::componentStateToString(EncodeComponent::ComponentState state) {
1027     switch (state) {
1028     case ComponentState::UNLOADED:
1029         return "UNLOADED";
1030     case ComponentState::LOADED:
1031         return "LOADED";
1032     case ComponentState::RUNNING:
1033         return "RUNNING";
1034     case ComponentState::ERROR:
1035         return "ERROR";
1036     }
1037 }
1038 
1039 }  // namespace android
1040