• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //#define LOG_NDEBUG 0
6 #define LOG_TAG "V4L2EncodeComponent"
7 
8 #include <v4l2_codec2/components/V4L2EncodeComponent.h>
9 
10 #include <inttypes.h>
11 
12 #include <algorithm>
13 #include <utility>
14 
15 #include <C2AllocatorGralloc.h>
16 #include <C2PlatformSupport.h>
17 #include <C2Work.h>
18 #include <android/hardware/graphics/common/1.0/types.h>
19 #include <base/bind.h>
20 #include <base/bind_helpers.h>
21 #include <log/log.h>
22 #include <media/stagefright/MediaDefs.h>
23 #include <ui/GraphicBuffer.h>
24 
25 #include <fourcc.h>
26 #include <h264_parser.h>
27 #include <rect.h>
28 #include <v4l2_codec2/common/Common.h>
29 #include <v4l2_codec2/common/EncodeHelpers.h>
30 #include <v4l2_device.h>
31 #include <video_pixel_format.h>
32 
33 using android::hardware::graphics::common::V1_0::BufferUsage;
34 
35 namespace android {
36 
37 namespace {
38 
39 const media::VideoPixelFormat kInputPixelFormat = media::VideoPixelFormat::PIXEL_FORMAT_NV12;
40 
41 // Get the video frame layout from the specified |inputBlock|.
42 // TODO(dstaessens): Clean up code extracting layout from a C2GraphicBlock.
getVideoFrameLayout(const C2ConstGraphicBlock & block,media::VideoPixelFormat * format)43 std::optional<std::vector<VideoFramePlane>> getVideoFrameLayout(const C2ConstGraphicBlock& block,
44                                                                 media::VideoPixelFormat* format) {
45     ALOGV("%s()", __func__);
46 
47     // Get the C2PlanarLayout from the graphics block. The C2GraphicView returned by block.map()
48     // needs to be released before calling getGraphicBlockInfo(), or the lockYCbCr() call will block
49     // Indefinitely.
50     C2PlanarLayout layout = block.map().get().layout();
51 
52     // The above layout() cannot fill layout information and memset 0 instead if the input format is
53     // IMPLEMENTATION_DEFINED and its backed format is RGB. We fill the layout by using
54     // ImplDefinedToRGBXMap in the case.
55     if (layout.type == C2PlanarLayout::TYPE_UNKNOWN) {
56         std::unique_ptr<ImplDefinedToRGBXMap> idMap = ImplDefinedToRGBXMap::Create(block);
57         if (idMap == nullptr) {
58             ALOGE("Unable to parse RGBX_8888 from IMPLEMENTATION_DEFINED");
59             return std::nullopt;
60         }
61         layout.type = C2PlanarLayout::TYPE_RGB;
62         // These parameters would be used in TYPE_GRB case below.
63         layout.numPlanes = 3;   // same value as in C2AllocationGralloc::map()
64         layout.rootPlanes = 1;  // same value as in C2AllocationGralloc::map()
65         layout.planes[C2PlanarLayout::PLANE_R].offset = idMap->offset();
66         layout.planes[C2PlanarLayout::PLANE_R].rowInc = idMap->rowInc();
67     }
68 
69     std::vector<uint32_t> offsets(layout.numPlanes, 0u);
70     std::vector<uint32_t> strides(layout.numPlanes, 0u);
71     switch (layout.type) {
72     case C2PlanarLayout::TYPE_YUV: {
73         android_ycbcr ycbcr = getGraphicBlockInfo(block);
74         offsets[C2PlanarLayout::PLANE_Y] =
75                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.y));
76         offsets[C2PlanarLayout::PLANE_U] =
77                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.cb));
78         offsets[C2PlanarLayout::PLANE_V] =
79                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.cr));
80         strides[C2PlanarLayout::PLANE_Y] = static_cast<uint32_t>(ycbcr.ystride);
81         strides[C2PlanarLayout::PLANE_U] = static_cast<uint32_t>(ycbcr.cstride);
82         strides[C2PlanarLayout::PLANE_V] = static_cast<uint32_t>(ycbcr.cstride);
83 
84         bool crcb = false;
85         if (offsets[C2PlanarLayout::PLANE_U] > offsets[C2PlanarLayout::PLANE_V]) {
86             // Swap offsets, no need to swap strides as they are identical for both chroma planes.
87             std::swap(offsets[C2PlanarLayout::PLANE_U], offsets[C2PlanarLayout::PLANE_V]);
88             crcb = true;
89         }
90 
91         bool semiplanar = false;
92         if (ycbcr.chroma_step >
93             offsets[C2PlanarLayout::PLANE_V] - offsets[C2PlanarLayout::PLANE_U]) {
94             semiplanar = true;
95         }
96 
97         if (!crcb && !semiplanar) {
98             *format = media::VideoPixelFormat::PIXEL_FORMAT_I420;
99         } else if (!crcb && semiplanar) {
100             *format = media::VideoPixelFormat::PIXEL_FORMAT_NV12;
101         } else if (crcb && !semiplanar) {
102             // HACK: pretend YV12 is I420 now since VEA only accepts I420. (YV12 will be used
103             //       for input byte-buffer mode).
104             // TODO(dstaessens): Is this hack still necessary now we're not using the VEA directly?
105             //format = media::VideoPixelFormat::PIXEL_FORMAT_YV12;
106             *format = media::VideoPixelFormat::PIXEL_FORMAT_I420;
107         } else {
108             *format = media::VideoPixelFormat::PIXEL_FORMAT_NV21;
109         }
110         break;
111     }
112     case C2PlanarLayout::TYPE_RGB: {
113         offsets[C2PlanarLayout::PLANE_R] = layout.planes[C2PlanarLayout::PLANE_R].offset;
114         strides[C2PlanarLayout::PLANE_R] =
115                 static_cast<uint32_t>(layout.planes[C2PlanarLayout::PLANE_R].rowInc);
116         *format = media::VideoPixelFormat::PIXEL_FORMAT_ARGB;
117         break;
118     }
119     default:
120         ALOGW("Unknown layout type: %u", static_cast<uint32_t>(layout.type));
121         return std::nullopt;
122     }
123 
124     std::vector<VideoFramePlane> planes;
125     for (uint32_t i = 0; i < layout.rootPlanes; ++i) {
126         planes.push_back({offsets[i], strides[i]});
127     }
128     return planes;
129 }
130 
131 // The maximum size for output buffer, which is chosen empirically for a 1080p video.
132 constexpr size_t kMaxBitstreamBufferSizeInBytes = 2 * 1024 * 1024;  // 2MB
133 // The frame size for 1080p (FHD) video in pixels.
134 constexpr int k1080PSizeInPixels = 1920 * 1080;
135 // The frame size for 1440p (QHD) video in pixels.
136 constexpr int k1440PSizeInPixels = 2560 * 1440;
137 
138 // Use quadruple size of kMaxBitstreamBufferSizeInBytes when the input frame size is larger than
139 // 1440p, double if larger than 1080p. This is chosen empirically for some 4k encoding use cases and
140 // the Android CTS VideoEncoderTest (crbug.com/927284).
GetMaxOutputBufferSize(const media::Size & size)141 size_t GetMaxOutputBufferSize(const media::Size& size) {
142     if (size.GetArea() > k1440PSizeInPixels) return kMaxBitstreamBufferSizeInBytes * 4;
143     if (size.GetArea() > k1080PSizeInPixels) return kMaxBitstreamBufferSizeInBytes * 2;
144     return kMaxBitstreamBufferSizeInBytes;
145 }
146 
147 // These are rather subjectively tuned.
148 constexpr size_t kInputBufferCount = 2;
149 constexpr size_t kOutputBufferCount = 2;
150 
151 // Define V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR control code if not present in header files.
152 #ifndef V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR
153 #define V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR (V4L2_CID_MPEG_BASE + 388)
154 #endif
155 
156 }  // namespace
157 
158 // static
Create(const C2ConstGraphicBlock & block)159 std::unique_ptr<V4L2EncodeComponent::InputFrame> V4L2EncodeComponent::InputFrame::Create(
160         const C2ConstGraphicBlock& block) {
161     std::vector<int> fds;
162     const C2Handle* const handle = block.handle();
163     for (int i = 0; i < handle->numFds; i++) {
164         fds.emplace_back(handle->data[i]);
165     }
166 
167     return std::unique_ptr<InputFrame>(new InputFrame(std::move(fds)));
168 }
169 
170 // static
create(C2String name,c2_node_id_t id,std::shared_ptr<C2ReflectorHelper> helper,C2ComponentFactory::ComponentDeleter deleter)171 std::shared_ptr<C2Component> V4L2EncodeComponent::create(
172         C2String name, c2_node_id_t id, std::shared_ptr<C2ReflectorHelper> helper,
173         C2ComponentFactory::ComponentDeleter deleter) {
174     ALOGV("%s(%s)", __func__, name.c_str());
175 
176     auto interface = std::make_shared<V4L2EncodeInterface>(name, std::move(helper));
177     if (interface->status() != C2_OK) {
178         ALOGE("Component interface initialization failed (error code %d)", interface->status());
179         return nullptr;
180     }
181 
182     return std::shared_ptr<C2Component>(new V4L2EncodeComponent(name, id, std::move(interface)),
183                                         deleter);
184 }
185 
V4L2EncodeComponent(C2String name,c2_node_id_t id,std::shared_ptr<V4L2EncodeInterface> interface)186 V4L2EncodeComponent::V4L2EncodeComponent(C2String name, c2_node_id_t id,
187                                          std::shared_ptr<V4L2EncodeInterface> interface)
188       : mName(name),
189         mId(id),
190         mInterface(std::move(interface)),
191         mComponentState(ComponentState::LOADED) {
192     ALOGV("%s(%s)", __func__, name.c_str());
193 }
194 
~V4L2EncodeComponent()195 V4L2EncodeComponent::~V4L2EncodeComponent() {
196     ALOGV("%s()", __func__);
197 
198     // Stop encoder thread and invalidate pointers if component wasn't stopped before destroying.
199     if (mEncoderThread.IsRunning()) {
200         mEncoderTaskRunner->PostTask(
201                 FROM_HERE, ::base::BindOnce(
202                                    [](::base::WeakPtrFactory<V4L2EncodeComponent>* weakPtrFactory) {
203                                        weakPtrFactory->InvalidateWeakPtrs();
204                                    },
205                                    &mWeakThisFactory));
206         mEncoderThread.Stop();
207     }
208     ALOGV("%s(): done", __func__);
209 }
210 
start()211 c2_status_t V4L2EncodeComponent::start() {
212     ALOGV("%s()", __func__);
213 
214     // Lock while starting, to synchronize start/stop/reset/release calls.
215     std::lock_guard<std::mutex> lock(mComponentLock);
216 
217     // According to the specification start() should only be called in the LOADED state.
218     if (mComponentState != ComponentState::LOADED) {
219         return C2_BAD_STATE;
220     }
221 
222     if (!mEncoderThread.Start()) {
223         ALOGE("Failed to start encoder thread");
224         return C2_CORRUPTED;
225     }
226     mEncoderTaskRunner = mEncoderThread.task_runner();
227     mWeakThis = mWeakThisFactory.GetWeakPtr();
228 
229     // Initialize the encoder on the encoder thread.
230     ::base::WaitableEvent done;
231     bool success = false;
232     mEncoderTaskRunner->PostTask(
233             FROM_HERE, ::base::Bind(&V4L2EncodeComponent::startTask, mWeakThis, &success, &done));
234     done.Wait();
235 
236     if (!success) {
237         ALOGE("Failed to initialize encoder");
238         return C2_CORRUPTED;
239     }
240 
241     setComponentState(ComponentState::RUNNING);
242     return C2_OK;
243 }
244 
stop()245 c2_status_t V4L2EncodeComponent::stop() {
246     ALOGV("%s()", __func__);
247 
248     // Lock while stopping, to synchronize start/stop/reset/release calls.
249     std::lock_guard<std::mutex> lock(mComponentLock);
250 
251     if (mComponentState != ComponentState::RUNNING && mComponentState != ComponentState::ERROR) {
252         return C2_BAD_STATE;
253     }
254 
255     // Return immediately if the component is already stopped.
256     if (!mEncoderThread.IsRunning()) {
257         return C2_OK;
258     }
259 
260     // Wait for the component to stop.
261     ::base::WaitableEvent done;
262     mEncoderTaskRunner->PostTask(
263             FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::stopTask, mWeakThis, &done));
264     done.Wait();
265     mEncoderThread.Stop();
266 
267     setComponentState(ComponentState::LOADED);
268 
269     ALOGV("%s() - done", __func__);
270     return C2_OK;
271 }
272 
reset()273 c2_status_t V4L2EncodeComponent::reset() {
274     ALOGV("%s()", __func__);
275 
276     // The interface specification says: "This method MUST be supported in all (including tripped)
277     // states other than released".
278     if (mComponentState == ComponentState::UNLOADED) {
279         return C2_BAD_STATE;
280     }
281 
282     // TODO(dstaessens): Reset the component's interface to default values.
283     stop();
284 
285     return C2_OK;
286 }
287 
release()288 c2_status_t V4L2EncodeComponent::release() {
289     ALOGV("%s()", __func__);
290 
291     // The interface specification says: "This method MUST be supported in stopped state.", but the
292     // release method seems to be called in other states as well.
293     reset();
294 
295     setComponentState(ComponentState::UNLOADED);
296     return C2_OK;
297 }
298 
queue_nb(std::list<std::unique_ptr<C2Work>> * const items)299 c2_status_t V4L2EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
300     ALOGV("%s()", __func__);
301 
302     if (mComponentState != ComponentState::RUNNING) {
303         ALOGE("Trying to queue work item while component is not running");
304         return C2_BAD_STATE;
305     }
306 
307     while (!items->empty()) {
308         mEncoderTaskRunner->PostTask(FROM_HERE,
309                                      ::base::BindOnce(&V4L2EncodeComponent::queueTask, mWeakThis,
310                                                       std::move(items->front())));
311         items->pop_front();
312     }
313 
314     return C2_OK;
315 }
316 
drain_nb(drain_mode_t mode)317 c2_status_t V4L2EncodeComponent::drain_nb(drain_mode_t mode) {
318     ALOGV("%s()", __func__);
319 
320     if (mode == DRAIN_CHAIN) {
321         return C2_OMITTED;  // Tunneling is not supported for now.
322     }
323 
324     if (mComponentState != ComponentState::RUNNING) {
325         return C2_BAD_STATE;
326     }
327 
328     mEncoderTaskRunner->PostTask(
329             FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::drainTask, mWeakThis, mode));
330     return C2_OK;
331 }
332 
flush_sm(flush_mode_t mode,std::list<std::unique_ptr<C2Work>> * const flushedWork)333 c2_status_t V4L2EncodeComponent::flush_sm(flush_mode_t mode,
334                                           std::list<std::unique_ptr<C2Work>>* const flushedWork) {
335     ALOGV("%s()", __func__);
336 
337     if (mode != FLUSH_COMPONENT) {
338         return C2_OMITTED;  // Tunneling is not supported by now
339     }
340 
341     if (mComponentState != ComponentState::RUNNING) {
342         return C2_BAD_STATE;
343     }
344 
345     // Work that can be immediately discarded should be returned in |flushedWork|. This method may
346     // be momentarily blocking but must return within 5ms, which should give us enough time to
347     // immediately abandon all non-started work on the encoder thread. We can return all work that
348     // can't be immediately discarded using onWorkDone() later.
349     ::base::WaitableEvent done;
350     mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::flushTask,
351                                                              mWeakThis, &done, flushedWork));
352     done.Wait();
353 
354     return C2_OK;
355 }
356 
announce_nb(const std::vector<C2WorkOutline> & items)357 c2_status_t V4L2EncodeComponent::announce_nb(const std::vector<C2WorkOutline>& items) {
358     return C2_OMITTED;  // Tunneling is not supported by now
359 }
360 
setListener_vb(const std::shared_ptr<Listener> & listener,c2_blocking_t mayBlock)361 c2_status_t V4L2EncodeComponent::setListener_vb(const std::shared_ptr<Listener>& listener,
362                                                 c2_blocking_t mayBlock) {
363     ALOG_ASSERT(mComponentState != ComponentState::UNLOADED);
364 
365     // Lock so we're sure the component isn't currently starting or stopping.
366     std::lock_guard<std::mutex> lock(mComponentLock);
367 
368     // If the encoder thread is not running it's safe to update the listener directly.
369     if (!mEncoderThread.IsRunning()) {
370         mListener = listener;
371         return C2_OK;
372     }
373 
374     // The listener should be updated before exiting this function. If called while the component is
375     // currently running we should be allowed to block, as we can only change the listener on the
376     // encoder thread.
377     ALOG_ASSERT(mayBlock == c2_blocking_t::C2_MAY_BLOCK);
378 
379     ::base::WaitableEvent done;
380     mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::setListenerTask,
381                                                              mWeakThis, listener, &done));
382     done.Wait();
383 
384     return C2_OK;
385 }
386 
intf()387 std::shared_ptr<C2ComponentInterface> V4L2EncodeComponent::intf() {
388     return std::make_shared<SimpleInterface<V4L2EncodeInterface>>(mName.c_str(), mId, mInterface);
389 }
390 
startTask(bool * success,::base::WaitableEvent * done)391 void V4L2EncodeComponent::startTask(bool* success, ::base::WaitableEvent* done) {
392     ALOGV("%s()", __func__);
393     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
394     ALOG_ASSERT(mEncoderState == EncoderState::UNINITIALIZED);
395 
396     *success = initializeEncoder();
397     done->Signal();
398 }
399 
stopTask(::base::WaitableEvent * done)400 void V4L2EncodeComponent::stopTask(::base::WaitableEvent* done) {
401     ALOGV("%s()", __func__);
402     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
403 
404     // Flushing the encoder will abort all pending work and stop polling and streaming on the V4L2
405     // device queues.
406     flush();
407 
408     // Deallocate all V4L2 device input and output buffers.
409     destroyInputBuffers();
410     destroyOutputBuffers();
411 
412     // Invalidate all weak pointers so no more functions will be executed on the encoder thread.
413     mWeakThisFactory.InvalidateWeakPtrs();
414 
415     setEncoderState(EncoderState::UNINITIALIZED);
416     done->Signal();
417 }
418 
queueTask(std::unique_ptr<C2Work> work)419 void V4L2EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
420     ALOGV("%s()", __func__);
421     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
422     ALOG_ASSERT(mEncoderState != EncoderState::UNINITIALIZED);
423 
424     // If we're in the error state we can immediately return, freeing all buffers in the work item.
425     if (mEncoderState == EncoderState::ERROR) {
426         return;
427     }
428 
429     ALOGV("Queued work item (index: %llu, timestamp: %llu, EOS: %d)",
430           work->input.ordinal.frameIndex.peekull(), work->input.ordinal.timestamp.peekull(),
431           work->input.flags & C2FrameData::FLAG_END_OF_STREAM);
432 
433     mInputWorkQueue.push(std::move(work));
434 
435     // If we were waiting for work, start encoding again.
436     if (mEncoderState == EncoderState::WAITING_FOR_INPUT) {
437         setEncoderState(EncoderState::ENCODING);
438         mEncoderTaskRunner->PostTask(
439                 FROM_HERE,
440                 ::base::BindOnce(&V4L2EncodeComponent::scheduleNextEncodeTask, mWeakThis));
441     }
442 }
443 
444 // TODO(dstaessens): Investigate improving drain logic after draining the virtio device is fixed.
drainTask(drain_mode_t)445 void V4L2EncodeComponent::drainTask(drain_mode_t /*drainMode*/) {
446     ALOGV("%s()", __func__);
447     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
448 
449     // We can only start draining if all the work in our input queue has been queued on the V4L2
450     // device input queue, so we mark the last item in the input queue as EOS.
451     if (!mInputWorkQueue.empty()) {
452         ALOGV("Marking last item in input work queue as EOS");
453         mInputWorkQueue.back()->input.flags = static_cast<C2FrameData::flags_t>(
454                 mInputWorkQueue.back()->input.flags | C2FrameData::FLAG_END_OF_STREAM);
455         return;
456     }
457 
458     // If the input queue is empty and there is only a single empty EOS work item in the output
459     // queue we can immediately consider flushing done.
460     if ((mOutputWorkQueue.size() == 1) && mOutputWorkQueue.back()->input.buffers.empty()) {
461         ALOG_ASSERT(mOutputWorkQueue.back()->input.flags & C2FrameData::FLAG_END_OF_STREAM);
462         setEncoderState(EncoderState::DRAINING);
463         mEncoderTaskRunner->PostTask(
464                 FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::onDrainDone, mWeakThis, true));
465         return;
466     }
467 
468     // If the input queue is empty all work that needs to be drained has already been queued in the
469     // V4L2 device, so we can immediately request a drain.
470     if (!mOutputWorkQueue.empty()) {
471         // Mark the last item in the output work queue as EOS, so we will only report it as
472         // finished after draining has completed.
473         ALOGV("Starting drain and marking last item in output work queue as EOS");
474         mOutputWorkQueue.back()->input.flags = C2FrameData::FLAG_END_OF_STREAM;
475         drain();
476     }
477 }
478 
onDrainDone(bool done)479 void V4L2EncodeComponent::onDrainDone(bool done) {
480     ALOGV("%s()", __func__);
481     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
482     ALOG_ASSERT(mEncoderState == EncoderState::DRAINING || mEncoderState == EncoderState::ERROR);
483 
484     if (mEncoderState == EncoderState::ERROR) {
485         return;
486     }
487 
488     if (!done) {
489         ALOGE("draining the encoder failed");
490         reportError(C2_CORRUPTED);
491         return;
492     }
493 
494     // The last work item in the output work queue should be an EOS request.
495     if (mOutputWorkQueue.empty() ||
496         !(mOutputWorkQueue.back()->input.flags & C2FrameData::FLAG_END_OF_STREAM)) {
497         ALOGE("The last item in the output work queue should be marked EOS");
498         reportError(C2_CORRUPTED);
499         return;
500     }
501 
502     // Mark the last item in the output work queue as EOS done.
503     C2Work* eosWork = mOutputWorkQueue.back().get();
504     eosWork->worklets.back()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
505 
506     // Draining is done which means all buffers on the device output queue have been returned, but
507     // not all buffers on the device input queue might have been returned yet.
508     if ((mOutputWorkQueue.size() > 1) || !isWorkDone(*eosWork)) {
509         ALOGV("Draining done, waiting for input buffers to be returned");
510         return;
511     }
512 
513     ALOGV("Draining done");
514     reportWork(std::move(mOutputWorkQueue.front()));
515     mOutputWorkQueue.pop_front();
516 
517     // Draining the encoder is now done, we can start encoding again.
518     if (!mInputWorkQueue.empty()) {
519         setEncoderState(EncoderState::ENCODING);
520         mEncoderTaskRunner->PostTask(
521                 FROM_HERE,
522                 ::base::BindOnce(&V4L2EncodeComponent::scheduleNextEncodeTask, mWeakThis));
523     } else {
524         setEncoderState(EncoderState::WAITING_FOR_INPUT);
525     }
526 }
527 
flushTask(::base::WaitableEvent * done,std::list<std::unique_ptr<C2Work>> * const flushedWork)528 void V4L2EncodeComponent::flushTask(::base::WaitableEvent* done,
529                                     std::list<std::unique_ptr<C2Work>>* const flushedWork) {
530     ALOGV("%s()", __func__);
531     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
532 
533     // Move all work that can immediately be aborted to flushedWork, and notify the caller.
534     if (flushedWork) {
535         while (!mInputWorkQueue.empty()) {
536             std::unique_ptr<C2Work> work = std::move(mInputWorkQueue.front());
537             work->input.buffers.clear();
538             flushedWork->push_back(std::move(work));
539             mInputWorkQueue.pop();
540         }
541     }
542     done->Signal();
543 
544     flush();
545 }
546 
setListenerTask(const std::shared_ptr<Listener> & listener,::base::WaitableEvent * done)547 void V4L2EncodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
548                                           ::base::WaitableEvent* done) {
549     ALOGV("%s()", __func__);
550     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
551 
552     mListener = listener;
553     done->Signal();
554 }
555 
initializeEncoder()556 bool V4L2EncodeComponent::initializeEncoder() {
557     ALOGV("%s()", __func__);
558     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
559     ALOG_ASSERT(mEncoderState == EncoderState::UNINITIALIZED);
560 
561     mVisibleSize = mInterface->getInputVisibleSize();
562     mKeyFramePeriod = mInterface->getKeyFramePeriod();
563     mKeyFrameCounter = 0;
564     mCSDSubmitted = false;
565 
566     // Open the V4L2 device for encoding to the requested output format.
567     // TODO(dstaessens): Do we need to close the device first if already opened?
568     // TODO(dstaessens): Avoid conversion to VideoCodecProfile and use C2Config::profile_t directly.
569     media::VideoCodecProfile outputProfile =
570             c2ProfileToVideoCodecProfile(mInterface->getOutputProfile());
571     uint32_t outputPixelFormat =
572             media::V4L2Device::VideoCodecProfileToV4L2PixFmt(outputProfile, false);
573     if (!outputPixelFormat) {
574         ALOGE("Invalid output profile %s", media::GetProfileName(outputProfile).c_str());
575         return false;
576     }
577 
578     mDevice = media::V4L2Device::Create();
579     if (!mDevice) {
580         ALOGE("Failed to create V4L2 device");
581         return false;
582     }
583 
584     if (!mDevice->Open(media::V4L2Device::Type::kEncoder, outputPixelFormat)) {
585         ALOGE("Failed to open device for profile %s (%s)",
586               media::GetProfileName(outputProfile).c_str(),
587               media::FourccToString(outputPixelFormat).c_str());
588         return false;
589     }
590 
591     // Make sure the device has all required capabilities (multi-planar Memory-To-Memory and
592     // streaming I/O), and whether flushing is supported.
593     if (!mDevice->HasCapabilities(V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING)) {
594         ALOGE("Device doesn't have the required capabilities");
595         return false;
596     }
597     if (!mDevice->IsCommandSupported(V4L2_ENC_CMD_STOP)) {
598         ALOGE("Device does not support flushing (V4L2_ENC_CMD_STOP)");
599         return false;
600     }
601 
602     // Get input/output queues so we can send encode request to the device and get back the results.
603     mInputQueue = mDevice->GetQueue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
604     mOutputQueue = mDevice->GetQueue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
605     if (!mInputQueue || !mOutputQueue) {
606         ALOGE("Failed to get V4L2 device queues");
607         return false;
608     }
609 
610     // First try to configure the specified output format, as changing the output format can affect
611     // the configured input format.
612     if (!configureOutputFormat(outputProfile)) return false;
613 
614     // Configure the input format. If the device doesn't support the specified format we'll use one
615     // of the device's preferred formats in combination with an input format convertor.
616     if (!configureInputFormat(kInputPixelFormat)) return false;
617 
618     // Create input and output buffers.
619     // TODO(dstaessens): Avoid allocating output buffers, encode directly into blockpool buffers.
620     if (!createInputBuffers() || !createOutputBuffers()) return false;
621 
622     // Configure the device, setting all required controls.
623     uint8_t level = c2LevelToLevelIDC(mInterface->getOutputLevel());
624     if (!configureDevice(outputProfile, level)) return false;
625 
626     // We're ready to start encoding now.
627     setEncoderState(EncoderState::WAITING_FOR_INPUT);
628 
629     // As initialization is asynchronous work might have already be queued.
630     if (!mInputWorkQueue.empty()) {
631         setEncoderState(EncoderState::ENCODING);
632         mEncoderTaskRunner->PostTask(
633                 FROM_HERE, ::base::Bind(&V4L2EncodeComponent::scheduleNextEncodeTask, mWeakThis));
634     }
635     return true;
636 }
637 
configureInputFormat(media::VideoPixelFormat inputFormat)638 bool V4L2EncodeComponent::configureInputFormat(media::VideoPixelFormat inputFormat) {
639     ALOGV("%s()", __func__);
640     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
641     ALOG_ASSERT(mEncoderState == EncoderState::UNINITIALIZED);
642     ALOG_ASSERT(!mInputQueue->IsStreaming());
643     ALOG_ASSERT(!mVisibleSize.IsEmpty());
644     ALOG_ASSERT(!mInputFormatConverter);
645 
646     // First try to use the requested pixel format directly.
647     ::base::Optional<struct v4l2_format> format;
648     auto fourcc = media::Fourcc::FromVideoPixelFormat(inputFormat, false);
649     if (fourcc) {
650         format = mInputQueue->SetFormat(fourcc->ToV4L2PixFmt(), mVisibleSize, 0);
651     }
652 
653     // If the device doesn't support the requested input format we'll try the device's preferred
654     // input pixel formats and use a format convertor. We need to try all formats as some formats
655     // might not be supported for the configured output format.
656     if (!format) {
657         std::vector<uint32_t> preferredFormats =
658                 mDevice->PreferredInputFormat(media::V4L2Device::Type::kEncoder);
659         for (uint32_t i = 0; !format && i < preferredFormats.size(); ++i) {
660             format = mInputQueue->SetFormat(preferredFormats[i], mVisibleSize, 0);
661         }
662     }
663 
664     if (!format) {
665         ALOGE("Failed to set input format to %s",
666               media::VideoPixelFormatToString(inputFormat).c_str());
667         return false;
668     }
669 
670     // Check whether the negotiated input format is valid. The coded size might be adjusted to match
671     // encoder minimums, maximums and alignment requirements of the currently selected formats.
672     auto layout = media::V4L2Device::V4L2FormatToVideoFrameLayout(*format);
673     if (!layout) {
674         ALOGE("Invalid input layout");
675         return false;
676     }
677 
678     mInputLayout = layout.value();
679     if (!media::Rect(mInputLayout->coded_size()).Contains(media::Rect(mVisibleSize))) {
680         ALOGE("Input size %s exceeds encoder capability, encoder can handle %s",
681               mVisibleSize.ToString().c_str(), mInputLayout->coded_size().ToString().c_str());
682         return false;
683     }
684 
685     // Calculate the input coded size from the format.
686     // TODO(dstaessens): How is this different from mInputLayout->coded_size()?
687     mInputCodedSize = media::V4L2Device::AllocatedSizeFromV4L2Format(*format);
688 
689     // Add an input format convertor if the device doesn't support the requested input format.
690     // Note: The amount of input buffers in the convertor should match the amount of buffers on the
691     // device input queue, to simplify logic.
692     // TODO(dstaessens): Currently an input format convertor is always required. Mapping an input
693     // buffer always seems to fail unless we copy it into a new a buffer first. As a temporary
694     // workaround the line below is commented, but this should be undone once the issue is fixed.
695     //if (mInputLayout->format() != inputFormat) {
696     ALOGV("Creating input format convertor (%s)",
697           media::VideoPixelFormatToString(mInputLayout->format()).c_str());
698     mInputFormatConverter =
699             FormatConverter::Create(inputFormat, mVisibleSize, kInputBufferCount, mInputCodedSize);
700     if (!mInputFormatConverter) {
701         ALOGE("Failed to created input format convertor");
702         return false;
703     }
704     //}
705 
706     // The coded input size might be different from the visible size due to alignment requirements,
707     // So we need to specify the visible rectangle. Note that this rectangle might still be adjusted
708     // due to hardware limitations.
709     // TODO(dstaessens): Overwrite mVisibleSize with the adapted visible size here?
710     media::Rect visibleRectangle(mVisibleSize.width(), mVisibleSize.height());
711 
712     struct v4l2_rect rect;
713     rect.left = visibleRectangle.x();
714     rect.top = visibleRectangle.y();
715     rect.width = visibleRectangle.width();
716     rect.height = visibleRectangle.height();
717 
718     // Try to adjust the visible rectangle using the VIDIOC_S_SELECTION command. If this is not
719     // supported we'll try to use the VIDIOC_S_CROP command instead. The visible rectangle might be
720     // adjusted to conform to hardware limitations (e.g. round to closest horizontal and vertical
721     // offsets, width and height).
722     struct v4l2_selection selection_arg;
723     memset(&selection_arg, 0, sizeof(selection_arg));
724     selection_arg.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
725     selection_arg.target = V4L2_SEL_TGT_CROP;
726     selection_arg.r = rect;
727     if (mDevice->Ioctl(VIDIOC_S_SELECTION, &selection_arg) == 0) {
728         visibleRectangle = media::Rect(selection_arg.r.left, selection_arg.r.top,
729                                        selection_arg.r.width, selection_arg.r.height);
730     } else {
731         struct v4l2_crop crop;
732         memset(&crop, 0, sizeof(v4l2_crop));
733         crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
734         crop.c = rect;
735         if (mDevice->Ioctl(VIDIOC_S_CROP, &crop) != 0 ||
736             mDevice->Ioctl(VIDIOC_G_CROP, &crop) != 0) {
737             ALOGE("Failed to crop to specified visible rectangle");
738             return false;
739         }
740         visibleRectangle = media::Rect(crop.c.left, crop.c.top, crop.c.width, crop.c.height);
741     }
742 
743     ALOGV("Input format set to %s (size: %s, adjusted size: %dx%d, coded size: %s)",
744           media::VideoPixelFormatToString(mInputLayout->format()).c_str(),
745           mVisibleSize.ToString().c_str(), visibleRectangle.width(), visibleRectangle.height(),
746           mInputCodedSize.ToString().c_str());
747 
748     mVisibleSize.SetSize(visibleRectangle.width(), visibleRectangle.height());
749     return true;
750 }
751 
configureOutputFormat(media::VideoCodecProfile outputProfile)752 bool V4L2EncodeComponent::configureOutputFormat(media::VideoCodecProfile outputProfile) {
753     ALOGV("%s()", __func__);
754     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
755     ALOG_ASSERT(mEncoderState == EncoderState::UNINITIALIZED);
756     ALOG_ASSERT(!mOutputQueue->IsStreaming());
757     ALOG_ASSERT(!mVisibleSize.IsEmpty());
758 
759     auto format = mOutputQueue->SetFormat(
760             media::V4L2Device::VideoCodecProfileToV4L2PixFmt(outputProfile, false), mVisibleSize,
761             GetMaxOutputBufferSize(mVisibleSize));
762     if (!format) {
763         ALOGE("Failed to set output format to %s", media::GetProfileName(outputProfile).c_str());
764         return false;
765     }
766 
767     // The device might adjust the requested output buffer size to match hardware requirements.
768     mOutputBufferSize = ::base::checked_cast<size_t>(format->fmt.pix_mp.plane_fmt[0].sizeimage);
769 
770     ALOGV("Output format set to %s (buffer size: %u)", media::GetProfileName(outputProfile).c_str(),
771           mOutputBufferSize);
772     return true;
773 }
774 
configureDevice(media::VideoCodecProfile outputProfile,std::optional<const uint8_t> outputH264Level)775 bool V4L2EncodeComponent::configureDevice(media::VideoCodecProfile outputProfile,
776                                           std::optional<const uint8_t> outputH264Level) {
777     ALOGV("%s()", __func__);
778     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
779 
780     // Enable frame-level bitrate control. This is the only mandatory general control.
781     if (!mDevice->SetExtCtrls(V4L2_CTRL_CLASS_MPEG,
782                               {media::V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE, 1)})) {
783         ALOGW("Failed enabling bitrate control");
784         // TODO(b/161508368): V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE is currently not supported yet,
785         // assume the operation was successful for now.
786     }
787 
788     // Additional optional controls:
789     // - Enable macroblock-level bitrate control.
790     // - Set GOP length to 0 to disable periodic key frames.
791     mDevice->SetExtCtrls(V4L2_CTRL_CLASS_MPEG,
792                          {media::V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE, 1),
793                           media::V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_GOP_SIZE, 0)});
794 
795     // All controls below are H.264-specific, so we can return here if the profile is not H.264.
796     if (outputProfile < media::H264PROFILE_MIN || outputProfile > media::H264PROFILE_MAX) {
797         return true;
798     }
799 
800     // When encoding H.264 we want to prepend SPS and PPS to each IDR for resilience. Some
801     // devices support this through the V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR control.
802     // TODO(b/161495502): V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR is currently not supported
803     // yet, just log a warning if the operation was unsuccessful for now.
804     if (mDevice->IsCtrlExposed(V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR)) {
805         if (!mDevice->SetExtCtrls(
806                     V4L2_CTRL_CLASS_MPEG,
807                     {media::V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_H264_SPS_PPS_BEFORE_IDR, 1)})) {
808             ALOGE("Failed to configure device to prepend SPS and PPS to each IDR");
809             return false;
810         }
811         ALOGV("Device supports prepending SPS and PPS to each IDR");
812     } else {
813         ALOGW("Device doesn't support prepending SPS and PPS to IDR");
814     }
815 
816     std::vector<media::V4L2ExtCtrl> h264Ctrls;
817 
818     // No B-frames, for lowest decoding latency.
819     h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_B_FRAMES, 0);
820     // Quantization parameter maximum value (for variable bitrate control).
821     h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_H264_MAX_QP, 51);
822 
823     // Set H.264 profile.
824     int32_t profile = media::V4L2Device::VideoCodecProfileToV4L2H264Profile(outputProfile);
825     if (profile < 0) {
826         ALOGE("Trying to set invalid H.264 profile");
827         return false;
828     }
829     h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_H264_PROFILE, profile);
830 
831     // Set H.264 output level. Use Level 4.0 as fallback default.
832     // TODO(dstaessens): Investigate code added by hiroh@ recently to select level in Chrome VEA.
833     uint8_t h264Level = outputH264Level.value_or(media::H264SPS::kLevelIDC4p0);
834     h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_H264_LEVEL,
835                            media::V4L2Device::H264LevelIdcToV4L2H264Level(h264Level));
836 
837     // Ask not to put SPS and PPS into separate bitstream buffers.
838     h264Ctrls.emplace_back(V4L2_CID_MPEG_VIDEO_HEADER_MODE,
839                            V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME);
840 
841     // Ignore return value as these controls are optional.
842     mDevice->SetExtCtrls(V4L2_CTRL_CLASS_MPEG, std::move(h264Ctrls));
843 
844     return true;
845 }
846 
updateEncodingParameters()847 bool V4L2EncodeComponent::updateEncodingParameters() {
848     ALOGV("%s()", __func__);
849     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
850 
851     // Query the interface for the encoding parameters requested by the codec 2.0 framework.
852     C2StreamBitrateInfo::output bitrateInfo;
853     C2StreamFrameRateInfo::output framerateInfo;
854     c2_status_t status =
855             mInterface->query({&bitrateInfo, &framerateInfo}, {}, C2_DONT_BLOCK, nullptr);
856     if (status != C2_OK) {
857         ALOGE("Failed to query interface for encoding parameters (error code: %d)", status);
858         reportError(status);
859         return false;
860     }
861 
862     // Ask device to change bitrate if it's different from the currently configured bitrate.
863     uint32_t bitrate = bitrateInfo.value;
864     if (mBitrate != bitrate) {
865         ALOG_ASSERT(bitrate > 0u);
866         ALOGV("Setting bitrate to %u", bitrate);
867         if (!mDevice->SetExtCtrls(V4L2_CTRL_CLASS_MPEG,
868                                   {media::V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_BITRATE, bitrate)})) {
869             // TODO(b/161495749): V4L2_CID_MPEG_VIDEO_BITRATE is currently not supported yet, assume
870             // the operation was successful for now.
871             ALOGW("Requesting bitrate change failed");
872         }
873         mBitrate = bitrate;
874     }
875 
876     // Ask device to change framerate if it's different from the currently configured framerate.
877     // TODO(dstaessens): Move IOCTL to device and use helper function.
878     uint32_t framerate = static_cast<uint32_t>(std::round(framerateInfo.value));
879     if (mFramerate != framerate) {
880         ALOG_ASSERT(framerate > 0u);
881         ALOGV("Setting framerate to %u", framerate);
882         struct v4l2_streamparm parms;
883         memset(&parms, 0, sizeof(v4l2_streamparm));
884         parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
885         parms.parm.output.timeperframe.numerator = 1;
886         parms.parm.output.timeperframe.denominator = framerate;
887         if (mDevice->Ioctl(VIDIOC_S_PARM, &parms) != 0) {
888             // TODO(b/161499573): VIDIOC_S_PARM is currently not supported yet, assume the operation
889             // was successful for now.
890             ALOGW("Requesting framerate change failed");
891         }
892         mFramerate = framerate;
893     }
894 
895     // Check whether an explicit key frame was requested, if so reset the key frame counter to
896     // immediately request a key frame.
897     C2StreamRequestSyncFrameTuning::output requestKeyFrame;
898     status = mInterface->query({&requestKeyFrame}, {}, C2_DONT_BLOCK, nullptr);
899     if (status != C2_OK) {
900         ALOGE("Failed to query interface for key frame request (error code: %d)", status);
901         reportError(status);
902         return false;
903     }
904     if (requestKeyFrame.value == C2_TRUE) {
905         mKeyFrameCounter = 0;
906         requestKeyFrame.value = C2_FALSE;
907         std::vector<std::unique_ptr<C2SettingResult>> failures;
908         status = mInterface->config({&requestKeyFrame}, C2_MAY_BLOCK, &failures);
909         if (status != C2_OK) {
910             ALOGE("Failed to reset key frame request on interface (error code: %d)", status);
911             reportError(status);
912             return false;
913         }
914     }
915 
916     // Request the next frame to be a key frame each time the counter reaches 0.
917     if (mKeyFrameCounter == 0) {
918         if (!mDevice->SetExtCtrls(V4L2_CTRL_CLASS_MPEG,
919                                   {media::V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME)})) {
920             // TODO(b/161498590): V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME is currently not supported
921             // yet, assume the operation was successful for now.
922             ALOGW("Failed requesting key frame");
923         }
924     }
925 
926     return true;
927 }
928 
scheduleNextEncodeTask()929 void V4L2EncodeComponent::scheduleNextEncodeTask() {
930     ALOGV("%s()", __func__);
931     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
932     ALOG_ASSERT(mEncoderState == EncoderState::ENCODING || mEncoderState == EncoderState::ERROR);
933 
934     // If we're in the error state we can immediately return.
935     if (mEncoderState == EncoderState::ERROR) {
936         return;
937     }
938 
939     // Get the next work item. Currently only a single worklet per work item is supported. An input
940     // buffer should always be supplied unless this is a drain or CSD request.
941     ALOG_ASSERT(!mInputWorkQueue.empty());
942     C2Work* work = mInputWorkQueue.front().get();
943     ALOG_ASSERT(work->input.buffers.size() <= 1u && work->worklets.size() == 1u);
944 
945     // Set the default values for the output worklet.
946     work->worklets.front()->output.flags = static_cast<C2FrameData::flags_t>(0);
947     work->worklets.front()->output.buffers.clear();
948     work->worklets.front()->output.ordinal = work->input.ordinal;
949 
950     uint64_t index = work->input.ordinal.frameIndex.peeku();
951     int64_t timestamp = static_cast<int64_t>(work->input.ordinal.timestamp.peeku());
952     bool endOfStream = work->input.flags & C2FrameData::FLAG_END_OF_STREAM;
953     ALOGV("Scheduling next encode (index: %" PRIu64 ", timestamp: %" PRId64 ", EOS: %d)", index,
954           timestamp, endOfStream);
955 
956     if (!work->input.buffers.empty()) {
957         // Check if the device has free input buffers available. If not we'll switch to the
958         // WAITING_FOR_INPUT_BUFFERS state, and resume encoding once we're notified buffers are
959         // available in the onInputBufferDone() task. Note: The input buffers are not copied into
960         // the device's input buffers, but rather a memory pointer is imported. We still have to
961         // throttle the number of enqueues queued simultaneously on the device however.
962         if (mInputQueue->FreeBuffersCount() == 0) {
963             ALOGV("Waiting for device to return input buffers");
964             setEncoderState(EncoderState::WAITING_FOR_INPUT_BUFFERS);
965             return;
966         }
967 
968         C2ConstGraphicBlock inputBlock =
969                 work->input.buffers.front()->data().graphicBlocks().front();
970 
971         // If encoding fails, we'll wait for an event (e.g. input buffers available) to start
972         // encoding again.
973         if (!encode(inputBlock, index, timestamp)) {
974             return;
975         }
976     }
977 
978     // The codec 2.0 framework might queue an empty CSD request, but this is currently not
979     // supported. We will return the CSD with the first encoded buffer work.
980     // TODO(dstaessens): Avoid doing this, store CSD request work at start of output queue.
981     if (work->input.buffers.empty() && !endOfStream) {
982         ALOGV("Discarding empty CSD request");
983         reportWork(std::move(mInputWorkQueue.front()));
984     } else {
985         mOutputWorkQueue.push_back(std::move(mInputWorkQueue.front()));
986     }
987     mInputWorkQueue.pop();
988 
989     // Drain the encoder if required.
990     if (endOfStream) {
991         drainTask(C2Component::DRAIN_COMPONENT_WITH_EOS);
992     }
993 
994     if (mEncoderState == EncoderState::DRAINING) {
995         return;
996     } else if (mInputWorkQueue.empty()) {
997         setEncoderState(EncoderState::WAITING_FOR_INPUT);
998         return;
999     }
1000 
1001     // Queue the next work item to be encoded.
1002     mEncoderTaskRunner->PostTask(
1003             FROM_HERE, ::base::BindOnce(&V4L2EncodeComponent::scheduleNextEncodeTask, mWeakThis));
1004 }
1005 
encode(C2ConstGraphicBlock block,uint64_t index,int64_t timestamp)1006 bool V4L2EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int64_t timestamp) {
1007     ALOGV("%s()", __func__);
1008     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1009     ALOG_ASSERT(mEncoderState == EncoderState::ENCODING);
1010 
1011     // Update dynamic encoding parameters (bitrate, framerate, key frame) if requested.
1012     if (!updateEncodingParameters()) return false;
1013 
1014     mKeyFrameCounter = (mKeyFrameCounter + 1) % mKeyFramePeriod;
1015 
1016     // If required convert the data to the V4L2 device's configured input pixel format. We
1017     // allocate the same amount of buffers on the device input queue and the format convertor,
1018     // so we should never run out of conversion buffers if there are free buffers in the input
1019     // queue.
1020     if (mInputFormatConverter) {
1021         if (!mInputFormatConverter->isReady()) {
1022             ALOGE("Input format convertor ran out of buffers");
1023             reportError(C2_CORRUPTED);
1024             return false;
1025         }
1026 
1027         ALOGV("Converting input block (index: %" PRIu64 ")", index);
1028         c2_status_t status = C2_CORRUPTED;
1029         block = mInputFormatConverter->convertBlock(index, block, &status);
1030         if (status != C2_OK) {
1031             ALOGE("Failed to convert input block (index: %" PRIu64 ")", index);
1032             reportError(status);
1033             return false;
1034         }
1035     }
1036 
1037     ALOGV("Encoding input block (index: %" PRIu64 ", timestamp: %" PRId64 ", size: %dx%d)", index,
1038           timestamp, block.width(), block.height());
1039 
1040     // Create a video frame from the graphic block.
1041     std::unique_ptr<InputFrame> frame = InputFrame::Create(block);
1042     if (!frame) {
1043         ALOGE("Failed to create video frame from input block (index: %" PRIu64
1044               ", timestamp: %" PRId64 ")",
1045               index, timestamp);
1046         reportError(C2_CORRUPTED);
1047         return false;
1048     }
1049 
1050     // Get the video frame layout and pixel format from the graphic block.
1051     // TODO(dstaessens) Integrate getVideoFrameLayout() into InputFrame::Create()
1052     media::VideoPixelFormat format;
1053     std::optional<std::vector<VideoFramePlane>> planes = getVideoFrameLayout(block, &format);
1054     if (!planes) {
1055         ALOGE("Failed to get input block's layout");
1056         reportError(C2_CORRUPTED);
1057         return false;
1058     }
1059 
1060     if (!enqueueInputBuffer(std::move(frame), format, *planes, index, timestamp)) {
1061         ALOGE("Failed to enqueue video frame (index: %" PRIu64 ", timestamp: %" PRId64 ")", index,
1062               timestamp);
1063         reportError(C2_CORRUPTED);
1064         return false;
1065     }
1066 
1067     // Start streaming on the input and output queue if required.
1068     if (!mInputQueue->IsStreaming()) {
1069         ALOG_ASSERT(!mOutputQueue->IsStreaming());
1070         if (!mOutputQueue->Streamon() || !mInputQueue->Streamon()) {
1071             ALOGE("Failed to start streaming on input and output queue");
1072             reportError(C2_CORRUPTED);
1073             return false;
1074         }
1075         // Start polling on the V4L2 device.
1076         startDevicePoll();
1077     }
1078 
1079     // Queue all buffers on the output queue. These buffers will be used to store the encoded
1080     // bitstreams.
1081     while (mOutputQueue->FreeBuffersCount() > 0) {
1082         if (!enqueueOutputBuffer()) return false;
1083     }
1084 
1085     return true;
1086 }
1087 
drain()1088 void V4L2EncodeComponent::drain() {
1089     ALOGV("%s()", __func__);
1090     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1091 
1092     if (mEncoderState == EncoderState::DRAINING || mEncoderState == EncoderState::ERROR) {
1093         return;
1094     }
1095 
1096     ALOG_ASSERT(mInputQueue->IsStreaming() && mOutputQueue->IsStreaming());
1097     ALOG_ASSERT(!mOutputWorkQueue.empty());
1098 
1099     // TODO(dstaessens): Move IOCTL to device class.
1100     struct v4l2_encoder_cmd cmd;
1101     memset(&cmd, 0, sizeof(v4l2_encoder_cmd));
1102     cmd.cmd = V4L2_ENC_CMD_STOP;
1103     if (mDevice->Ioctl(VIDIOC_ENCODER_CMD, &cmd) != 0) {
1104         ALOGE("Failed to stop encoder");
1105         onDrainDone(false);
1106         return;
1107     }
1108     ALOGV("%s(): Sent STOP command to encoder", __func__);
1109 
1110     setEncoderState(EncoderState::DRAINING);
1111 }
1112 
flush()1113 void V4L2EncodeComponent::flush() {
1114     ALOGV("%s()", __func__);
1115     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1116 
1117     // Stop the device poll thread.
1118     stopDevicePoll();
1119 
1120     // Stop streaming on the V4L2 device, which stops all currently queued work and releases all
1121     // buffers currently in use by the device.
1122     // TODO(b/160540027): Calling streamoff currently results in a bug.
1123     for (auto& queue : {mInputQueue, mOutputQueue}) {
1124         if (queue && queue->IsStreaming() && !queue->Streamoff()) {
1125             ALOGE("Failed to stop streaming on the device queue");
1126             reportError(C2_CORRUPTED);
1127         }
1128     }
1129 
1130     // Return all buffers to the input format convertor and clear all references to graphic blocks
1131     // in the input queue. We don't need to clear the output map as those buffers will still be
1132     // used.
1133     for (auto& it : mInputBuffersMap) {
1134         if (mInputFormatConverter && it.second) {
1135             mInputFormatConverter->returnBlock(it.first);
1136         }
1137         it.second = nullptr;
1138     }
1139 
1140     // Report all queued work items as aborted.
1141     std::list<std::unique_ptr<C2Work>> abortedWorkItems;
1142     while (!mInputWorkQueue.empty()) {
1143         std::unique_ptr<C2Work> work = std::move(mInputWorkQueue.front());
1144         work->result = C2_NOT_FOUND;
1145         work->input.buffers.clear();
1146         abortedWorkItems.push_back(std::move(work));
1147         mInputWorkQueue.pop();
1148     }
1149     while (!mOutputWorkQueue.empty()) {
1150         std::unique_ptr<C2Work> work = std::move(mOutputWorkQueue.front());
1151         work->result = C2_NOT_FOUND;
1152         work->input.buffers.clear();
1153         abortedWorkItems.push_back(std::move(work));
1154         mOutputWorkQueue.pop_front();
1155     }
1156     if (!abortedWorkItems.empty())
1157         mListener->onWorkDone_nb(shared_from_this(), std::move(abortedWorkItems));
1158 
1159     // Streaming and polling on the V4L2 device input and output queues will be resumed once new
1160     // encode work is queued.
1161 }
1162 
fetchOutputBlock()1163 std::shared_ptr<C2LinearBlock> V4L2EncodeComponent::fetchOutputBlock() {
1164     // TODO(dstaessens): fetchLinearBlock() might be blocking.
1165     ALOGV("Fetching linear block (size: %u)", mOutputBufferSize);
1166     std::shared_ptr<C2LinearBlock> outputBlock;
1167     c2_status_t status = mOutputBlockPool->fetchLinearBlock(
1168             mOutputBufferSize,
1169             C2MemoryUsage(C2MemoryUsage::CPU_READ |
1170                           static_cast<uint64_t>(BufferUsage::VIDEO_ENCODER)),
1171             &outputBlock);
1172     if (status != C2_OK) {
1173         ALOGE("Failed to fetch linear block (error: %d)", status);
1174         reportError(status);
1175         return nullptr;
1176     }
1177 
1178     return outputBlock;
1179 }
1180 
onInputBufferDone(uint64_t index)1181 void V4L2EncodeComponent::onInputBufferDone(uint64_t index) {
1182     ALOGV("%s(): Input buffer done (index: %" PRIu64 ")", __func__, index);
1183     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1184     ALOG_ASSERT(mEncoderState != EncoderState::UNINITIALIZED);
1185 
1186     // There are no guarantees the input buffers are returned in order, so we need to find the work
1187     // item which this buffer belongs to.
1188     C2Work* work = getWorkByIndex(index);
1189     if (!work) {
1190         ALOGE("Failed to find work associated with input buffer %" PRIu64, index);
1191         reportError(C2_CORRUPTED);
1192         return;
1193     }
1194 
1195     // We're done using the input block, release reference to return the block to the client. If
1196     // using an input format convertor, we also need to return the block to the convertor.
1197     LOG_ASSERT(!work->input.buffers.empty());
1198     work->input.buffers.front().reset();
1199     if (mInputFormatConverter) {
1200         c2_status_t status = mInputFormatConverter->returnBlock(index);
1201         if (status != C2_OK) {
1202             reportError(status);
1203             return;
1204         }
1205     }
1206 
1207     // Return all completed work items. The work item might have been waiting for it's input buffer
1208     // to be returned, in which case we can report it as completed now. As input buffers are not
1209     // necessarily returned in order we might be able to return multiple ready work items now.
1210     while (!mOutputWorkQueue.empty() && isWorkDone(*mOutputWorkQueue.front())) {
1211         reportWork(std::move(mOutputWorkQueue.front()));
1212         mOutputWorkQueue.pop_front();
1213     }
1214 
1215     // We might have been waiting for input buffers to be returned after draining finished.
1216     if (mEncoderState == EncoderState::DRAINING && mOutputWorkQueue.empty()) {
1217         ALOGV("Draining done");
1218         mEncoderState = EncoderState::WAITING_FOR_INPUT_BUFFERS;
1219     }
1220 
1221     // If we previously used up all input queue buffers we can start encoding again now.
1222     if ((mEncoderState == EncoderState::WAITING_FOR_INPUT_BUFFERS) && !mInputWorkQueue.empty()) {
1223         setEncoderState(EncoderState::ENCODING);
1224         mEncoderTaskRunner->PostTask(
1225                 FROM_HERE,
1226                 ::base::BindOnce(&V4L2EncodeComponent::scheduleNextEncodeTask, mWeakThis));
1227     }
1228 }
1229 
onOutputBufferDone(uint32_t payloadSize,bool keyFrame,int64_t timestamp,std::shared_ptr<C2LinearBlock> outputBlock)1230 void V4L2EncodeComponent::onOutputBufferDone(uint32_t payloadSize, bool keyFrame, int64_t timestamp,
1231                                              std::shared_ptr<C2LinearBlock> outputBlock) {
1232     ALOGV("%s(): output buffer done (timestamp: %" PRId64 ", size: %u, key frame: %d)", __func__,
1233           timestamp, payloadSize, keyFrame);
1234     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1235 
1236     if (mEncoderState == EncoderState::ERROR) {
1237         return;
1238     }
1239 
1240     C2ConstLinearBlock constBlock =
1241             outputBlock->share(outputBlock->offset(), payloadSize, C2Fence());
1242 
1243     // If no CSD (content-specific-data, e.g. SPS for H.264) has been submitted yet, we expect this
1244     // output block to contain CSD. We only submit the CSD once, even if it's attached to each key
1245     // frame.
1246     if (!mCSDSubmitted) {
1247         ALOGV("No CSD submitted yet, extracting CSD");
1248         std::unique_ptr<C2StreamInitDataInfo::output> csd;
1249         C2ReadView view = constBlock.map().get();
1250         extractCSDInfo(&csd, view.data(), view.capacity());
1251         if (!csd) {
1252             ALOGE("Failed to extract CSD");
1253             reportError(C2_CORRUPTED);
1254             return;
1255         }
1256 
1257         // Attach the CSD to the first item in our output work queue.
1258         LOG_ASSERT(!mOutputWorkQueue.empty());
1259         C2Work* work = mOutputWorkQueue.front().get();
1260         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
1261         mCSDSubmitted = true;
1262     }
1263 
1264     // Get the work item associated with the timestamp.
1265     C2Work* work = getWorkByTimestamp(timestamp);
1266     if (!work) {
1267         // It's possible we got an empty CSD request with timestamp 0, which we currently just
1268         // discard.
1269         // TODO(dstaessens): Investigate handling empty CSD requests.
1270         if (timestamp != 0) {
1271             reportError(C2_CORRUPTED);
1272         }
1273         return;
1274     }
1275 
1276     std::shared_ptr<C2Buffer> buffer = C2Buffer::CreateLinearBuffer(std::move(constBlock));
1277     if (keyFrame) {
1278         buffer->setInfo(
1279                 std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u, C2Config::SYNC_FRAME));
1280     }
1281     work->worklets.front()->output.buffers.emplace_back(buffer);
1282 
1283     // We can report the work item as completed if its associated input buffer has also been
1284     // released. As output buffers are not necessarily returned in order we might be able to return
1285     // multiple ready work items now.
1286     while (!mOutputWorkQueue.empty() && isWorkDone(*mOutputWorkQueue.front())) {
1287         reportWork(std::move(mOutputWorkQueue.front()));
1288         mOutputWorkQueue.pop_front();
1289     }
1290 }
1291 
getWorkByIndex(uint64_t index)1292 C2Work* V4L2EncodeComponent::getWorkByIndex(uint64_t index) {
1293     ALOGV("%s(): getting work item (index: %" PRIu64 ")", __func__, index);
1294     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1295 
1296     auto it = std::find_if(mOutputWorkQueue.begin(), mOutputWorkQueue.end(),
1297                            [index](const std::unique_ptr<C2Work>& w) {
1298                                return w->input.ordinal.frameIndex.peeku() == index;
1299                            });
1300     if (it == mOutputWorkQueue.end()) {
1301         ALOGE("Failed to find work (index: %" PRIu64 ")", index);
1302         return nullptr;
1303     }
1304     return it->get();
1305 }
1306 
getWorkByTimestamp(int64_t timestamp)1307 C2Work* V4L2EncodeComponent::getWorkByTimestamp(int64_t timestamp) {
1308     ALOGV("%s(): getting work item (timestamp: %" PRId64 ")", __func__, timestamp);
1309     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1310     ALOG_ASSERT(timestamp >= 0);
1311 
1312     // Find the work with specified timestamp by looping over the output work queue. This should be
1313     // very fast as the output work queue will never be longer then a few items. Ignore empty work
1314     // items that are marked as EOS, as their timestamp might clash with other work items.
1315     auto it = std::find_if(mOutputWorkQueue.begin(), mOutputWorkQueue.end(),
1316                            [timestamp](const std::unique_ptr<C2Work>& w) {
1317                                return !(w->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
1318                                       w->input.ordinal.timestamp.peeku() ==
1319                                               static_cast<uint64_t>(timestamp);
1320                            });
1321     if (it == mOutputWorkQueue.end()) {
1322         ALOGE("Failed to find work (timestamp: %" PRIu64 ")", timestamp);
1323         return nullptr;
1324     }
1325     return it->get();
1326 }
1327 
isWorkDone(const C2Work & work) const1328 bool V4L2EncodeComponent::isWorkDone(const C2Work& work) const {
1329     ALOGV("%s()", __func__);
1330     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1331 
1332     if ((work.input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
1333         !(work.worklets.front()->output.flags & C2FrameData::FLAG_END_OF_STREAM)) {
1334         ALOGV("Work item %" PRIu64 " is marked as EOS but draining has not finished yet",
1335               work.input.ordinal.frameIndex.peeku());
1336         return false;
1337     }
1338 
1339     if (!work.input.buffers.empty() && work.input.buffers.front()) {
1340         ALOGV("Input buffer associated with work item %" PRIu64 " not returned yet",
1341               work.input.ordinal.frameIndex.peeku());
1342         return false;
1343     }
1344 
1345     // If the work item had an input buffer to be encoded, it should have an output buffer set.
1346     if (!work.input.buffers.empty() && work.worklets.front()->output.buffers.empty()) {
1347         ALOGV("Output buffer associated with work item %" PRIu64 " not returned yet",
1348               work.input.ordinal.frameIndex.peeku());
1349         return false;
1350     }
1351 
1352     return true;
1353 }
1354 
reportWork(std::unique_ptr<C2Work> work)1355 void V4L2EncodeComponent::reportWork(std::unique_ptr<C2Work> work) {
1356     ALOG_ASSERT(work);
1357     ALOGV("%s(): Reporting work item as finished (index: %llu, timestamp: %llu)", __func__,
1358           work->input.ordinal.frameIndex.peekull(), work->input.ordinal.timestamp.peekull());
1359     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1360 
1361     work->result = C2_OK;
1362     work->workletsProcessed = static_cast<uint32_t>(work->worklets.size());
1363 
1364     std::list<std::unique_ptr<C2Work>> finishedWorkList;
1365     finishedWorkList.emplace_back(std::move(work));
1366     mListener->onWorkDone_nb(shared_from_this(), std::move(finishedWorkList));
1367 }
1368 
startDevicePoll()1369 bool V4L2EncodeComponent::startDevicePoll() {
1370     ALOGV("%s()", __func__);
1371     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1372 
1373     if (!mDevice->StartPolling(
1374                 ::base::BindRepeating(&V4L2EncodeComponent::serviceDeviceTask, mWeakThis),
1375                 ::base::BindRepeating(&V4L2EncodeComponent::onPollError, mWeakThis))) {
1376         ALOGE("Device poll thread failed to start");
1377         reportError(C2_CORRUPTED);
1378         return false;
1379     }
1380 
1381     ALOGV("Device poll started");
1382     return true;
1383 }
1384 
stopDevicePoll()1385 bool V4L2EncodeComponent::stopDevicePoll() {
1386     ALOGV("%s()", __func__);
1387     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1388 
1389     if (!mDevice->StopPolling()) {
1390         ALOGE("Failed to stop polling on the device");
1391         reportError(C2_CORRUPTED);
1392         return false;
1393     }
1394 
1395     ALOGV("Device poll stopped");
1396     return true;
1397 }
1398 
onPollError()1399 void V4L2EncodeComponent::onPollError() {
1400     ALOGV("%s()", __func__);
1401     reportError(C2_CORRUPTED);
1402 }
1403 
serviceDeviceTask(bool)1404 void V4L2EncodeComponent::serviceDeviceTask(bool /*event*/) {
1405     ALOGV("%s()", __func__);
1406     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1407     ALOG_ASSERT(mEncoderState != EncoderState::UNINITIALIZED);
1408 
1409     if (mEncoderState == EncoderState::ERROR) {
1410         return;
1411     }
1412 
1413     // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free list.
1414     while (mInputQueue->QueuedBuffersCount() > 0) {
1415         if (!dequeueInputBuffer()) break;
1416     }
1417 
1418     // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the free list.
1419     while (mOutputQueue->QueuedBuffersCount() > 0) {
1420         if (!dequeueOutputBuffer()) break;
1421     }
1422 
1423     ALOGV("%s() - done", __func__);
1424 }
1425 
enqueueInputBuffer(std::unique_ptr<InputFrame> frame,media::VideoPixelFormat format,const std::vector<VideoFramePlane> & planes,int64_t index,int64_t timestamp)1426 bool V4L2EncodeComponent::enqueueInputBuffer(std::unique_ptr<InputFrame> frame,
1427                                              media::VideoPixelFormat format,
1428                                              const std::vector<VideoFramePlane>& planes,
1429                                              int64_t index, int64_t timestamp) {
1430     ALOGV("%s(): queuing input buffer (index: %" PRId64 ")", __func__, index);
1431     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1432     ALOG_ASSERT(mInputQueue->FreeBuffersCount() > 0);
1433     ALOG_ASSERT(mEncoderState == EncoderState::ENCODING);
1434     ALOG_ASSERT(mInputLayout->format() == format);
1435     ALOG_ASSERT(mInputLayout->planes().size() == planes.size());
1436 
1437     auto buffer = mInputQueue->GetFreeBuffer();
1438     if (!buffer) {
1439         ALOGE("Failed to get free buffer from device input queue");
1440         return false;
1441     }
1442 
1443     // Mark the buffer with the frame's timestamp so we can identify the associated output buffers.
1444     buffer->SetTimeStamp(
1445             {.tv_sec = static_cast<time_t>(timestamp / ::base::Time::kMicrosecondsPerSecond),
1446              .tv_usec = static_cast<time_t>(timestamp % ::base::Time::kMicrosecondsPerSecond)});
1447     size_t bufferId = buffer->BufferId();
1448 
1449     for (size_t i = 0; i < planes.size(); ++i) {
1450         // Single-buffer input format may have multiple color planes, so bytesUsed of the single
1451         // buffer should be sum of each color planes' size.
1452         size_t bytesUsed = 0;
1453         if (planes.size() == 1) {
1454             bytesUsed = media::VideoFrame::AllocationSize(format, mInputLayout->coded_size());
1455         } else {
1456             bytesUsed = ::base::checked_cast<size_t>(
1457                     media::VideoFrame::PlaneSize(format, i, mInputLayout->coded_size()).GetArea());
1458         }
1459 
1460         // TODO(crbug.com/901264): The way to pass an offset within a DMA-buf is not defined
1461         // in V4L2 specification, so we abuse data_offset for now. Fix it when we have the
1462         // right interface, including any necessary validation and potential alignment.
1463         buffer->SetPlaneDataOffset(i, planes[i].mOffset);
1464         bytesUsed += planes[i].mOffset;
1465         // Workaround: filling length should not be needed. This is a bug of videobuf2 library.
1466         buffer->SetPlaneSize(i, mInputLayout->planes()[i].size + planes[i].mOffset);
1467         buffer->SetPlaneBytesUsed(i, bytesUsed);
1468     }
1469 
1470     std::move(*buffer).QueueDMABuf(frame->getFDs());
1471 
1472     ALOGV("Queued buffer in input queue (index: %" PRId64 ", timestamp: %" PRId64
1473           ", bufferId: %zu)",
1474           index, timestamp, bufferId);
1475 
1476     mInputBuffersMap[bufferId] = {index, std::move(frame)};
1477 
1478     return true;
1479 }
1480 
enqueueOutputBuffer()1481 bool V4L2EncodeComponent::enqueueOutputBuffer() {
1482     ALOGV("%s()", __func__);
1483     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1484     ALOG_ASSERT(mOutputQueue->FreeBuffersCount() > 0);
1485 
1486     auto buffer = mOutputQueue->GetFreeBuffer();
1487     if (!buffer) {
1488         ALOGE("Failed to get free buffer from device output queue");
1489         reportError(C2_CORRUPTED);
1490         return false;
1491     }
1492 
1493     std::shared_ptr<C2LinearBlock> outputBlock = fetchOutputBlock();
1494     if (!outputBlock) {
1495         ALOGE("Failed to fetch output block");
1496         reportError(C2_CORRUPTED);
1497         return false;
1498     }
1499 
1500     size_t bufferId = buffer->BufferId();
1501 
1502     std::vector<int> fds;
1503     fds.push_back(outputBlock->handle()->data[0]);
1504     if (!std::move(*buffer).QueueDMABuf(fds)) {
1505         ALOGE("Failed to queue output buffer using QueueDMABuf");
1506         reportError(C2_CORRUPTED);
1507         return false;
1508     }
1509 
1510     ALOG_ASSERT(!mOutputBuffersMap[bufferId]);
1511     mOutputBuffersMap[bufferId] = std::move(outputBlock);
1512     ALOGV("%s(): Queued buffer in output queue (bufferId: %zu)", __func__, bufferId);
1513     return true;
1514 }
1515 
dequeueInputBuffer()1516 bool V4L2EncodeComponent::dequeueInputBuffer() {
1517     ALOGV("%s()", __func__);
1518     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1519     ALOG_ASSERT(mEncoderState != EncoderState::UNINITIALIZED);
1520     ALOG_ASSERT(mInputQueue->QueuedBuffersCount() > 0);
1521 
1522     std::pair<bool, media::V4L2ReadableBufferRef> result = mInputQueue->DequeueBuffer();
1523     if (!result.first) {
1524         ALOGE("Failed to dequeue buffer from input queue");
1525         reportError(C2_CORRUPTED);
1526         return false;
1527     }
1528     if (!result.second) {
1529         // No more buffers ready to be dequeued in input queue.
1530         return false;
1531     }
1532 
1533     const media::V4L2ReadableBufferRef buffer = std::move(result.second);
1534     uint64_t index = mInputBuffersMap[buffer->BufferId()].first;
1535     int64_t timestamp = buffer->GetTimeStamp().tv_usec +
1536                         buffer->GetTimeStamp().tv_sec * ::base::Time::kMicrosecondsPerSecond;
1537     ALOGV("Dequeued buffer from input queue (index: %" PRId64 ", timestamp: %" PRId64
1538           ", bufferId: %zu)",
1539           index, timestamp, buffer->BufferId());
1540 
1541     mInputBuffersMap[buffer->BufferId()].second = nullptr;
1542     onInputBufferDone(index);
1543 
1544     return true;
1545 }
1546 
dequeueOutputBuffer()1547 bool V4L2EncodeComponent::dequeueOutputBuffer() {
1548     ALOGV("%s()", __func__);
1549     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1550     ALOG_ASSERT(mEncoderState != EncoderState::UNINITIALIZED);
1551     ALOG_ASSERT(mOutputQueue->QueuedBuffersCount() > 0);
1552 
1553     std::pair<bool, media::V4L2ReadableBufferRef> result = mOutputQueue->DequeueBuffer();
1554     if (!result.first) {
1555         ALOGE("Failed to dequeue buffer from output queue");
1556         reportError(C2_CORRUPTED);
1557         return false;
1558     }
1559     if (!result.second) {
1560         // No more buffers ready to be dequeued in output queue.
1561         return false;
1562     }
1563 
1564     media::V4L2ReadableBufferRef buffer = std::move(result.second);
1565     size_t encodedDataSize = buffer->GetPlaneBytesUsed(0) - buffer->GetPlaneDataOffset(0);
1566     ::base::TimeDelta timestamp = ::base::TimeDelta::FromMicroseconds(
1567             buffer->GetTimeStamp().tv_usec +
1568             buffer->GetTimeStamp().tv_sec * ::base::Time::kMicrosecondsPerSecond);
1569 
1570     ALOGV("Dequeued buffer from output queue (timestamp: %" PRId64
1571           ", bufferId: %zu, data size: %zu, EOS: %d)",
1572           timestamp.InMicroseconds(), buffer->BufferId(), encodedDataSize, buffer->IsLast());
1573 
1574     if (!mOutputBuffersMap[buffer->BufferId()]) {
1575         ALOGE("Failed to find output block associated with output buffer");
1576         reportError(C2_CORRUPTED);
1577         return false;
1578     }
1579 
1580     std::shared_ptr<C2LinearBlock> block = std::move(mOutputBuffersMap[buffer->BufferId()]);
1581     if (encodedDataSize > 0) {
1582         onOutputBufferDone(encodedDataSize, buffer->IsKeyframe(), timestamp.InMicroseconds(),
1583                            std::move(block));
1584     }
1585 
1586     // If the buffer is marked as last and we were flushing the encoder, flushing is now done.
1587     if ((mEncoderState == EncoderState::DRAINING) && buffer->IsLast()) {
1588         onDrainDone(true);
1589 
1590         // Start the encoder again.
1591         struct v4l2_encoder_cmd cmd;
1592         memset(&cmd, 0, sizeof(v4l2_encoder_cmd));
1593         cmd.cmd = V4L2_ENC_CMD_START;
1594         if (mDevice->Ioctl(VIDIOC_ENCODER_CMD, &cmd) != 0) {
1595             ALOGE("Failed to restart encoder after flushing (V4L2_ENC_CMD_START)");
1596             reportError(C2_CORRUPTED);
1597             return false;
1598         }
1599     }
1600 
1601     // Queue a new output buffer to replace the one we dequeued.
1602     buffer = nullptr;
1603     enqueueOutputBuffer();
1604 
1605     return true;
1606 }
1607 
createInputBuffers()1608 bool V4L2EncodeComponent::createInputBuffers() {
1609     ALOGV("%s()", __func__);
1610     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1611     ALOG_ASSERT(!mInputQueue->IsStreaming());
1612     ALOG_ASSERT(mInputBuffersMap.empty());
1613 
1614     // No memory is allocated here, we just generate a list of buffers on the input queue, which
1615     // will hold memory handles to the real buffers.
1616     if (mInputQueue->AllocateBuffers(kInputBufferCount, V4L2_MEMORY_DMABUF) < kInputBufferCount) {
1617         ALOGE("Failed to create V4L2 input buffers.");
1618         return false;
1619     }
1620 
1621     mInputBuffersMap.resize(mInputQueue->AllocatedBuffersCount());
1622     return true;
1623 }
1624 
createOutputBuffers()1625 bool V4L2EncodeComponent::createOutputBuffers() {
1626     ALOGV("%s()", __func__);
1627     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1628     ALOG_ASSERT(!mOutputQueue->IsStreaming());
1629     ALOG_ASSERT(mOutputBuffersMap.empty());
1630 
1631     // Fetch the output block pool.
1632     C2BlockPool::local_id_t poolId = mInterface->getBlockPoolId();
1633     c2_status_t status = GetCodec2BlockPool(poolId, shared_from_this(), &mOutputBlockPool);
1634     if (status != C2_OK || !mOutputBlockPool) {
1635         ALOGE("Failed to get output block pool, error: %d", status);
1636         return false;
1637     }
1638 
1639     // No memory is allocated here, we just generate a list of buffers on the output queue, which
1640     // will hold memory handles to the real buffers.
1641     if (mOutputQueue->AllocateBuffers(kOutputBufferCount, V4L2_MEMORY_DMABUF) <
1642         kOutputBufferCount) {
1643         ALOGE("Failed to create V4L2 output buffers.");
1644         return false;
1645     }
1646 
1647     mOutputBuffersMap.resize(mOutputQueue->AllocatedBuffersCount());
1648     return true;
1649 }
1650 
destroyInputBuffers()1651 void V4L2EncodeComponent::destroyInputBuffers() {
1652     ALOGV("%s()", __func__);
1653     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1654     ALOG_ASSERT(!mInputQueue->IsStreaming());
1655 
1656     if (!mInputQueue || mInputQueue->AllocatedBuffersCount() == 0) return;
1657     mInputQueue->DeallocateBuffers();
1658     mInputBuffersMap.clear();
1659 }
1660 
destroyOutputBuffers()1661 void V4L2EncodeComponent::destroyOutputBuffers() {
1662     ALOGV("%s()", __func__);
1663     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1664     ALOG_ASSERT(!mOutputQueue->IsStreaming());
1665 
1666     if (!mOutputQueue || mOutputQueue->AllocatedBuffersCount() == 0) return;
1667     mOutputQueue->DeallocateBuffers();
1668     mOutputBuffersMap.clear();
1669     mOutputBlockPool.reset();
1670 }
1671 
reportError(c2_status_t error)1672 void V4L2EncodeComponent::reportError(c2_status_t error) {
1673     ALOGV("%s()", __func__);
1674     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1675 
1676     {
1677         std::lock_guard<std::mutex> lock(mComponentLock);
1678         setComponentState(ComponentState::ERROR);
1679     }
1680 
1681     // TODO(dstaessens): Report all pending work items as finished upon failure.
1682     if (mEncoderState != EncoderState::ERROR) {
1683         setEncoderState(EncoderState::ERROR);
1684         mListener->onError_nb(shared_from_this(), static_cast<uint32_t>(error));
1685     }
1686 }
1687 
setComponentState(ComponentState state)1688 void V4L2EncodeComponent::setComponentState(ComponentState state) {
1689     // Check whether the state change is valid.
1690     switch (state) {
1691     case ComponentState::UNLOADED:
1692         ALOG_ASSERT(mComponentState == ComponentState::LOADED);
1693         break;
1694     case ComponentState::LOADED:
1695         ALOG_ASSERT(mComponentState == ComponentState::UNLOADED ||
1696                     mComponentState == ComponentState::RUNNING ||
1697                     mComponentState == ComponentState::ERROR);
1698         break;
1699     case ComponentState::RUNNING:
1700         ALOG_ASSERT(mComponentState == ComponentState::LOADED);
1701         break;
1702     case ComponentState::ERROR:
1703         break;
1704     }
1705 
1706     ALOGV("Changed component state from %s to %s", componentStateToString(mComponentState),
1707           componentStateToString(state));
1708     mComponentState = state;
1709 }
1710 
setEncoderState(EncoderState state)1711 void V4L2EncodeComponent::setEncoderState(EncoderState state) {
1712     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1713 
1714     // Check whether the state change is valid.
1715     switch (state) {
1716     case EncoderState::UNINITIALIZED:
1717         // TODO(dstaessens): Check all valid state changes.
1718         break;
1719     case EncoderState::WAITING_FOR_INPUT:
1720         ALOG_ASSERT(mEncoderState == EncoderState::UNINITIALIZED ||
1721                     mEncoderState == EncoderState::ENCODING ||
1722                     mEncoderState == EncoderState::DRAINING);
1723         break;
1724     case EncoderState::WAITING_FOR_INPUT_BUFFERS:
1725         ALOG_ASSERT(mEncoderState == EncoderState::ENCODING);
1726         break;
1727     case EncoderState::ENCODING:
1728         ALOG_ASSERT(mEncoderState == EncoderState::WAITING_FOR_INPUT ||
1729                     mEncoderState == EncoderState::WAITING_FOR_INPUT_BUFFERS ||
1730                     mEncoderState == EncoderState::DRAINING);
1731         break;
1732     case EncoderState::DRAINING:
1733         ALOG_ASSERT(mEncoderState == EncoderState::ENCODING);
1734         break;
1735     case EncoderState::ERROR:
1736         break;
1737     }
1738 
1739     ALOGV("Changed encoder state from %s to %s", encoderStateToString(mEncoderState),
1740           encoderStateToString(state));
1741     mEncoderState = state;
1742 }
1743 
componentStateToString(V4L2EncodeComponent::ComponentState state)1744 const char* V4L2EncodeComponent::componentStateToString(V4L2EncodeComponent::ComponentState state) {
1745     switch (state) {
1746     case ComponentState::UNLOADED:
1747         return "UNLOADED";
1748     case ComponentState::LOADED:
1749         return "LOADED";
1750     case ComponentState::RUNNING:
1751         return "RUNNING";
1752     case ComponentState::ERROR:
1753         return "ERROR";
1754     }
1755 }
1756 
encoderStateToString(V4L2EncodeComponent::EncoderState state)1757 const char* V4L2EncodeComponent::encoderStateToString(V4L2EncodeComponent::EncoderState state) {
1758     switch (state) {
1759     case EncoderState::UNINITIALIZED:
1760         return "UNINITIALIZED";
1761     case EncoderState::WAITING_FOR_INPUT:
1762         return "WAITING_FOR_INPUT";
1763     case EncoderState::WAITING_FOR_INPUT_BUFFERS:
1764         return "WAITING_FOR_INPUT_BUFFERS";
1765     case EncoderState::ENCODING:
1766         return "ENCODING";
1767     case EncoderState::DRAINING:
1768         return "Draining";
1769     case EncoderState::ERROR:
1770         return "ERROR";
1771     }
1772 }
1773 
1774 }  // namespace android
1775