• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 // Note: ported from Chromium commit head: 91175b1
5 // Note: image processor is not ported.
6 
7 #include "v4l2_video_decode_accelerator.h"
8 
9 #include <dlfcn.h>
10 #include <errno.h>
11 #include <fcntl.h>
12 #include <linux/videodev2.h>
13 #include <poll.h>
14 #include <string.h>
15 #include <sys/eventfd.h>
16 #include <sys/ioctl.h>
17 #include <sys/mman.h>
18 
19 #include "base/bind.h"
20 #include "base/command_line.h"
21 #include "base/message_loop/message_loop.h"
22 #include "base/numerics/safe_conversions.h"
23 #include "base/posix/eintr_wrapper.h"
24 #include "base/single_thread_task_runner.h"
25 #include "base/memory/ptr_util.h"
26 #include "base/threading/thread_task_runner_handle.h"
27 #include "build/build_config.h"
28 #include "h264_parser.h"
29 #include "rect.h"
30 #include "shared_memory_region.h"
31 
32 #define DVLOGF(level) DVLOG(level) << __func__ << "(): "
33 #define VLOGF(level) VLOG(level) << __func__ << "(): "
34 #define VPLOGF(level) VPLOG(level) << __func__ << "(): "
35 
36 #define NOTIFY_ERROR(x)                      \
37   do {                                       \
38     VLOGF(1) << "Setting error state:" << x; \
39     SetErrorState(x);                        \
40   } while (0)
41 
42 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
43   do {                                                          \
44     if (device_->Ioctl(type, arg) != 0) {                       \
45       VPLOGF(1) << "ioctl() failed: " << type_str;              \
46       NOTIFY_ERROR(PLATFORM_FAILURE);                           \
47       return value;                                             \
48     }                                                           \
49   } while (0)
50 
51 #define IOCTL_OR_ERROR_RETURN(type, arg) \
52   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
53 
54 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
55   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
56 
57 #define IOCTL_OR_LOG_ERROR(type, arg)           \
58   do {                                          \
59     if (device_->Ioctl(type, arg) != 0)         \
60       VPLOGF(1) << "ioctl() failed: " << #type; \
61   } while (0)
62 
63 namespace media {
64 
65 // static
66 const uint32_t V4L2VideoDecodeAccelerator::supported_input_fourccs_[] = {
67     V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9,
68 };
69 
70 struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
71   BitstreamBufferRef(
72       base::WeakPtr<Client>& client,
73       scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
74       std::unique_ptr<SharedMemoryRegion> shm,
75       int32_t input_id);
76   ~BitstreamBufferRef();
77   const base::WeakPtr<Client> client;
78   const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
79   const std::unique_ptr<SharedMemoryRegion> shm;
80   size_t bytes_used;
81   const int32_t input_id;
82 };
83 
BitstreamBufferRef(base::WeakPtr<Client> & client,scoped_refptr<base::SingleThreadTaskRunner> & client_task_runner,std::unique_ptr<SharedMemoryRegion> shm,int32_t input_id)84 V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
85     base::WeakPtr<Client>& client,
86     scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
87     std::unique_ptr<SharedMemoryRegion> shm,
88     int32_t input_id)
89     : client(client),
90       client_task_runner(client_task_runner),
91       shm(std::move(shm)),
92       bytes_used(0),
93       input_id(input_id) {}
94 
~BitstreamBufferRef()95 V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
96   if (input_id >= 0) {
97     client_task_runner->PostTask(
98         FROM_HERE,
99         base::Bind(&Client::NotifyEndOfBitstreamBuffer, client, input_id));
100   }
101 }
102 
InputRecord()103 V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
104     : at_device(false), address(NULL), length(0), bytes_used(0), input_id(-1) {}
105 
~InputRecord()106 V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {}
107 
OutputRecord()108 V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
109     : state(kFree),
110       picture_id(-1),
111       cleared(false) {}
112 
~OutputRecord()113 V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
114 
PictureRecord(bool cleared,const Picture & picture)115 V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(bool cleared,
116                                                          const Picture& picture)
117     : cleared(cleared), picture(picture) {}
118 
~PictureRecord()119 V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
120 
V4L2VideoDecodeAccelerator(const scoped_refptr<V4L2Device> & device)121 V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
122     const scoped_refptr<V4L2Device>& device)
123     : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
124       decoder_thread_("V4L2DecoderThread"),
125       decoder_state_(kUninitialized),
126       output_mode_(Config::OutputMode::ALLOCATE),
127       device_(device),
128       decoder_delay_bitstream_buffer_id_(-1),
129       decoder_current_input_buffer_(-1),
130       decoder_decode_buffer_tasks_scheduled_(0),
131       decoder_frames_at_client_(0),
132       decoder_flushing_(false),
133       decoder_cmd_supported_(false),
134       flush_awaiting_last_output_buffer_(false),
135       reset_pending_(false),
136       decoder_partial_frame_pending_(false),
137       input_streamon_(false),
138       input_buffer_queued_count_(0),
139       output_streamon_(false),
140       output_buffer_queued_count_(0),
141       output_dpb_size_(0),
142       output_planes_count_(0),
143       picture_clearing_count_(0),
144       device_poll_thread_("V4L2DevicePollThread"),
145       video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
146       input_format_fourcc_(0),
147       output_format_fourcc_(0),
148       weak_this_factory_(this) {
149   weak_this_ = weak_this_factory_.GetWeakPtr();
150 }
151 
~V4L2VideoDecodeAccelerator()152 V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
153   DCHECK(!decoder_thread_.IsRunning());
154   DCHECK(!device_poll_thread_.IsRunning());
155   DVLOGF(2);
156 
157   // These maps have members that should be manually destroyed, e.g. file
158   // descriptors, mmap() segments, etc.
159   DCHECK(input_buffer_map_.empty());
160   DCHECK(output_buffer_map_.empty());
161 }
162 
Initialize(const Config & config,Client * client)163 bool V4L2VideoDecodeAccelerator::Initialize(const Config& config,
164                                             Client* client) {
165   VLOGF(2) << "profile: " << config.profile
166            << ", output_mode=" << static_cast<int>(config.output_mode);
167   DCHECK(child_task_runner_->BelongsToCurrentThread());
168   DCHECK_EQ(decoder_state_, kUninitialized);
169 
170   if (config.output_mode != Config::OutputMode::IMPORT) {
171     NOTREACHED() << "Only IMPORT OutputModes are supported";
172     return false;
173   }
174 
175   client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
176   client_ = client_ptr_factory_->GetWeakPtr();
177   // If we haven't been set up to decode on separate thread via
178   // TryToSetupDecodeOnSeparateThread(), use the main thread/client for
179   // decode tasks.
180   if (!decode_task_runner_) {
181     decode_task_runner_ = child_task_runner_;
182     DCHECK(!decode_client_);
183     decode_client_ = client_;
184   }
185 
186   video_profile_ = config.profile;
187 
188   input_format_fourcc_ =
189       V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_);
190 
191   if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
192     VLOGF(1) << "Failed to open device for profile: " << config.profile
193              << " fourcc: " << std::hex << "0x" << input_format_fourcc_;
194     return false;
195   }
196 
197   // Capabilities check.
198   struct v4l2_capability caps;
199   const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
200   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
201   if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
202     VLOGF(1) << "ioctl() failed: VIDIOC_QUERYCAP"
203              << ", caps check failed: 0x" << std::hex << caps.capabilities;
204     return false;
205   }
206 
207   if (!SetupFormats())
208     return false;
209 
210   if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
211     decoder_h264_parser_.reset(new H264Parser());
212   }
213 
214   if (!decoder_thread_.Start()) {
215     VLOGF(1) << "decoder thread failed to start";
216     return false;
217   }
218 
219   decoder_state_ = kInitialized;
220   output_mode_ = config.output_mode;
221 
222   // InitializeTask will NOTIFY_ERROR on failure.
223   decoder_thread_.task_runner()->PostTask(
224       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::InitializeTask,
225                             base::Unretained(this)));
226 
227   return true;
228 }
229 
InitializeTask()230 void V4L2VideoDecodeAccelerator::InitializeTask() {
231   VLOGF(2);
232   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
233   DCHECK_EQ(decoder_state_, kInitialized);
234 
235   // Subscribe to the resolution change event.
236   struct v4l2_event_subscription sub;
237   memset(&sub, 0, sizeof(sub));
238   sub.type = V4L2_EVENT_SOURCE_CHANGE;
239   IOCTL_OR_ERROR_RETURN(VIDIOC_SUBSCRIBE_EVENT, &sub);
240 
241   if (!CreateInputBuffers()) {
242     NOTIFY_ERROR(PLATFORM_FAILURE);
243     return;
244   }
245 
246   decoder_cmd_supported_ = IsDecoderCmdSupported();
247 
248   if (!StartDevicePoll())
249     return;
250 }
251 
Decode(const BitstreamBuffer & bitstream_buffer)252 void V4L2VideoDecodeAccelerator::Decode(
253     const BitstreamBuffer& bitstream_buffer) {
254   DVLOGF(4) << "input_id=" << bitstream_buffer.id()
255             << ", size=" << bitstream_buffer.size();
256   DCHECK(decode_task_runner_->BelongsToCurrentThread());
257 
258   if (bitstream_buffer.id() < 0) {
259     VLOGF(1) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id();
260     if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle()))
261       base::SharedMemory::CloseHandle(bitstream_buffer.handle());
262     NOTIFY_ERROR(INVALID_ARGUMENT);
263     return;
264   }
265 
266   // DecodeTask() will take care of running a DecodeBufferTask().
267   decoder_thread_.task_runner()->PostTask(
268       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeTask,
269                             base::Unretained(this), bitstream_buffer));
270 }
271 
AssignPictureBuffers(const std::vector<PictureBuffer> & buffers)272 void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
273     const std::vector<PictureBuffer>& buffers) {
274   VLOGF(2) << "buffer_count=" << buffers.size();
275   DCHECK(child_task_runner_->BelongsToCurrentThread());
276 
277   decoder_thread_.task_runner()->PostTask(
278       FROM_HERE,
279       base::Bind(&V4L2VideoDecodeAccelerator::AssignPictureBuffersTask,
280                  base::Unretained(this), buffers));
281 }
282 
AssignPictureBuffersTask(const std::vector<PictureBuffer> & buffers)283 void V4L2VideoDecodeAccelerator::AssignPictureBuffersTask(
284     const std::vector<PictureBuffer>& buffers) {
285   VLOGF(2);
286   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
287   DCHECK_EQ(decoder_state_, kAwaitingPictureBuffers);
288 
289   uint32_t req_buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
290 
291   if (buffers.size() < req_buffer_count) {
292     VLOGF(1) << "Failed to provide requested picture buffers. (Got "
293              << buffers.size() << ", requested " << req_buffer_count << ")";
294     NOTIFY_ERROR(INVALID_ARGUMENT);
295     return;
296   }
297 
298   // Allocate the output buffers.
299   struct v4l2_requestbuffers reqbufs;
300   memset(&reqbufs, 0, sizeof(reqbufs));
301   reqbufs.count = buffers.size();
302   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
303   reqbufs.memory = V4L2_MEMORY_MMAP;
304   IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
305 
306   if (reqbufs.count != buffers.size()) {
307     VLOGF(1) << "Could not allocate enough output buffers";
308     NOTIFY_ERROR(PLATFORM_FAILURE);
309     return;
310   }
311 
312   DCHECK(free_output_buffers_.empty());
313   DCHECK(output_buffer_map_.empty());
314   output_buffer_map_.resize(buffers.size());
315 
316   // Always use IMPORT output mode for Android solution.
317   DCHECK_EQ(output_mode_, Config::OutputMode::IMPORT);
318 
319   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
320     OutputRecord& output_record = output_buffer_map_[i];
321     DCHECK_EQ(output_record.state, kFree);
322     DCHECK_EQ(output_record.picture_id, -1);
323     DCHECK_EQ(output_record.cleared, false);
324 
325     output_record.picture_id = buffers[i].id();
326 
327     // This will remain kAtClient until ImportBufferForPicture is called, either
328     // by the client, or by ourselves, if we are allocating.
329     output_record.state = kAtClient;
330 
331     DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
332   }
333 }
334 
ImportBufferForPicture(int32_t picture_buffer_id,VideoPixelFormat pixel_format,const NativePixmapHandle & native_pixmap_handle)335 void V4L2VideoDecodeAccelerator::ImportBufferForPicture(
336     int32_t picture_buffer_id,
337     VideoPixelFormat pixel_format,
338     const NativePixmapHandle& native_pixmap_handle) {
339   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
340   DCHECK(child_task_runner_->BelongsToCurrentThread());
341 
342   if (output_mode_ != Config::OutputMode::IMPORT) {
343     VLOGF(1) << "Cannot import in non-import mode";
344     NOTIFY_ERROR(INVALID_ARGUMENT);
345     return;
346   }
347 
348   if (pixel_format !=
349       V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_)) {
350     VLOGF(1) << "Unsupported import format: " << pixel_format;
351     NOTIFY_ERROR(INVALID_ARGUMENT);
352     return;
353   }
354 
355   std::vector<base::ScopedFD> dmabuf_fds;
356   for (const auto& fd : native_pixmap_handle.fds) {
357     DCHECK_NE(fd.fd, -1);
358     dmabuf_fds.push_back(base::ScopedFD(fd.fd));
359   }
360 
361   decoder_thread_.task_runner()->PostTask(
362       FROM_HERE,
363       base::Bind(&V4L2VideoDecodeAccelerator::ImportBufferForPictureTask,
364                  base::Unretained(this), picture_buffer_id,
365                  base::Passed(&dmabuf_fds)));
366 }
367 
ImportBufferForPictureTask(int32_t picture_buffer_id,std::vector<base::ScopedFD> dmabuf_fds)368 void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask(
369     int32_t picture_buffer_id,
370     std::vector<base::ScopedFD> dmabuf_fds) {
371   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id
372             << ", dmabuf_fds.size()=" << dmabuf_fds.size();
373   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
374 
375   const auto iter =
376       std::find_if(output_buffer_map_.begin(), output_buffer_map_.end(),
377                    [picture_buffer_id](const OutputRecord& output_record) {
378                      return output_record.picture_id == picture_buffer_id;
379                    });
380   if (iter == output_buffer_map_.end()) {
381     // It's possible that we've already posted a DismissPictureBuffer for this
382     // picture, but it has not yet executed when this ImportBufferForPicture was
383     // posted to us by the client. In that case just ignore this (we've already
384     // dismissed it and accounted for that).
385     DVLOGF(3) << "got picture id=" << picture_buffer_id
386               << " not in use (anymore?).";
387     return;
388   }
389 
390   if (iter->state != kAtClient) {
391     VLOGF(1) << "Cannot import buffer not owned by client";
392     NOTIFY_ERROR(INVALID_ARGUMENT);
393     return;
394   }
395 
396   size_t index = iter - output_buffer_map_.begin();
397   DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
398                        index),
399             0);
400 
401   iter->state = kFree;
402 
403   DCHECK_EQ(output_planes_count_, dmabuf_fds.size());
404 
405   iter->processor_output_fds.swap(dmabuf_fds);
406   free_output_buffers_.push_back(index);
407   if (decoder_state_ != kChangingResolution) {
408       Enqueue();
409       ScheduleDecodeBufferTaskIfNeeded();
410   }
411 }
412 
ReusePictureBuffer(int32_t picture_buffer_id)413 void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) {
414   DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
415   // Must be run on child thread, as we'll insert a sync in the EGL context.
416   DCHECK(child_task_runner_->BelongsToCurrentThread());
417 
418   decoder_thread_.task_runner()->PostTask(
419       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
420                             base::Unretained(this), picture_buffer_id));
421 }
422 
Flush()423 void V4L2VideoDecodeAccelerator::Flush() {
424   VLOGF(2);
425   DCHECK(child_task_runner_->BelongsToCurrentThread());
426   decoder_thread_.task_runner()->PostTask(
427       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::FlushTask,
428                             base::Unretained(this)));
429 }
430 
Reset()431 void V4L2VideoDecodeAccelerator::Reset() {
432   VLOGF(2);
433   DCHECK(child_task_runner_->BelongsToCurrentThread());
434   decoder_thread_.task_runner()->PostTask(
435       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ResetTask,
436                             base::Unretained(this)));
437 }
438 
Destroy()439 void V4L2VideoDecodeAccelerator::Destroy() {
440   VLOGF(2);
441   DCHECK(child_task_runner_->BelongsToCurrentThread());
442 
443   // We're destroying; cancel all callbacks.
444   client_ptr_factory_.reset();
445   weak_this_factory_.InvalidateWeakPtrs();
446 
447   // If the decoder thread is running, destroy using posted task.
448   if (decoder_thread_.IsRunning()) {
449     decoder_thread_.task_runner()->PostTask(
450         FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DestroyTask,
451                               base::Unretained(this)));
452     // DestroyTask() will cause the decoder_thread_ to flush all tasks.
453     decoder_thread_.Stop();
454   } else {
455     // Otherwise, call the destroy task directly.
456     DestroyTask();
457   }
458 
459   delete this;
460   VLOGF(2) << "Destroyed.";
461 }
462 
TryToSetupDecodeOnSeparateThread(const base::WeakPtr<Client> & decode_client,const scoped_refptr<base::SingleThreadTaskRunner> & decode_task_runner)463 bool V4L2VideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
464     const base::WeakPtr<Client>& decode_client,
465     const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) {
466   VLOGF(2);
467   decode_client_ = decode_client;
468   decode_task_runner_ = decode_task_runner;
469   return true;
470 }
471 
472 // static
473 VideoDecodeAccelerator::SupportedProfiles
GetSupportedProfiles()474 V4L2VideoDecodeAccelerator::GetSupportedProfiles() {
475     scoped_refptr<V4L2Device> device(new V4L2Device());
476   if (!device)
477     return SupportedProfiles();
478 
479   return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_),
480                                             supported_input_fourccs_);
481 }
482 
DecodeTask(const BitstreamBuffer & bitstream_buffer)483 void V4L2VideoDecodeAccelerator::DecodeTask(
484     const BitstreamBuffer& bitstream_buffer) {
485   DVLOGF(4) << "input_id=" << bitstream_buffer.id();
486   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
487   DCHECK_NE(decoder_state_, kUninitialized);
488 
489   std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
490       decode_client_, decode_task_runner_,
491       std::unique_ptr<SharedMemoryRegion>(
492           new SharedMemoryRegion(bitstream_buffer, true)),
493       bitstream_buffer.id()));
494 
495   // Skip empty buffer.
496   if (bitstream_buffer.size() == 0)
497     return;
498 
499   if (!bitstream_record->shm->Map()) {
500     VLOGF(1) << "could not map bitstream_buffer";
501     NOTIFY_ERROR(UNREADABLE_INPUT);
502     return;
503   }
504   DVLOGF(4) << "mapped at=" << bitstream_record->shm->memory();
505 
506   if (decoder_state_ == kResetting || decoder_flushing_) {
507     // In the case that we're resetting or flushing, we need to delay decoding
508     // the BitstreamBuffers that come after the Reset() or Flush() call.  When
509     // we're here, we know that this DecodeTask() was scheduled by a Decode()
510     // call that came after (in the client thread) the Reset() or Flush() call;
511     // thus set up the delay if necessary.
512     if (decoder_delay_bitstream_buffer_id_ == -1)
513       decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
514   } else if (decoder_state_ == kError) {
515     VLOGF(2) << "early out: kError state";
516     return;
517   }
518 
519   decoder_input_queue_.push(
520       linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
521   decoder_decode_buffer_tasks_scheduled_++;
522   DecodeBufferTask();
523 }
524 
DecodeBufferTask()525 void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
526   DVLOGF(4);
527   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
528   DCHECK_NE(decoder_state_, kUninitialized);
529 
530   decoder_decode_buffer_tasks_scheduled_--;
531 
532   if (decoder_state_ != kInitialized && decoder_state_ != kDecoding) {
533     DVLOGF(3) << "early out: state=" << decoder_state_;
534     return;
535   }
536 
537   if (decoder_current_bitstream_buffer_ == NULL) {
538     if (decoder_input_queue_.empty()) {
539       // We're waiting for a new buffer -- exit without scheduling a new task.
540       return;
541     }
542     linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
543     if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
544       // We're asked to delay decoding on this and subsequent buffers.
545       return;
546     }
547 
548     // Setup to use the next buffer.
549     decoder_current_bitstream_buffer_.reset(buffer_ref.release());
550     decoder_input_queue_.pop();
551     const auto& shm = decoder_current_bitstream_buffer_->shm;
552     if (shm) {
553       DVLOGF(4) << "reading input_id="
554                 << decoder_current_bitstream_buffer_->input_id
555                 << ", addr=" << shm->memory() << ", size=" << shm->size();
556     } else {
557       DCHECK_EQ(decoder_current_bitstream_buffer_->input_id, kFlushBufferId);
558       DVLOGF(4) << "reading input_id=kFlushBufferId";
559     }
560   }
561   bool schedule_task = false;
562   size_t decoded_size = 0;
563   const auto& shm = decoder_current_bitstream_buffer_->shm;
564   if (!shm) {
565     // This is a dummy buffer, queued to flush the pipe.  Flush.
566     DCHECK_EQ(decoder_current_bitstream_buffer_->input_id, kFlushBufferId);
567     // Enqueue a buffer guaranteed to be empty.  To do that, we flush the
568     // current input, enqueue no data to the next frame, then flush that down.
569     schedule_task = true;
570     if (decoder_current_input_buffer_ != -1 &&
571         input_buffer_map_[decoder_current_input_buffer_].input_id !=
572             kFlushBufferId)
573       schedule_task = FlushInputFrame();
574 
575     if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
576       VLOGF(2) << "enqueued flush buffer";
577       decoder_partial_frame_pending_ = false;
578       schedule_task = true;
579     } else {
580       // If we failed to enqueue the empty buffer (due to pipeline
581       // backpressure), don't advance the bitstream buffer queue, and don't
582       // schedule the next task.  This bitstream buffer queue entry will get
583       // reprocessed when the pipeline frees up.
584       schedule_task = false;
585     }
586   } else if (shm->size() == 0) {
587     // This is a buffer queued from the client that has zero size.  Skip.
588     schedule_task = true;
589   } else {
590     // This is a buffer queued from the client, with actual contents.  Decode.
591     const uint8_t* const data =
592         reinterpret_cast<const uint8_t*>(shm->memory()) +
593         decoder_current_bitstream_buffer_->bytes_used;
594     const size_t data_size =
595         shm->size() - decoder_current_bitstream_buffer_->bytes_used;
596     if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
597       NOTIFY_ERROR(UNREADABLE_INPUT);
598       return;
599     }
600     // AdvanceFrameFragment should not return a size larger than the buffer
601     // size, even on invalid data.
602     CHECK_LE(decoded_size, data_size);
603 
604     switch (decoder_state_) {
605       case kInitialized:
606         schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
607         break;
608       case kDecoding:
609         schedule_task = DecodeBufferContinue(data, decoded_size);
610         break;
611       default:
612         NOTIFY_ERROR(ILLEGAL_STATE);
613         return;
614     }
615   }
616   if (decoder_state_ == kError) {
617     // Failed during decode.
618     return;
619   }
620 
621   if (schedule_task) {
622     decoder_current_bitstream_buffer_->bytes_used += decoded_size;
623     if ((shm ? shm->size() : 0) ==
624         decoder_current_bitstream_buffer_->bytes_used) {
625       // Our current bitstream buffer is done; return it.
626       int32_t input_id = decoder_current_bitstream_buffer_->input_id;
627       DVLOGF(4) << "finished input_id=" << input_id;
628       // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
629       decoder_current_bitstream_buffer_.reset();
630     }
631     ScheduleDecodeBufferTaskIfNeeded();
632   }
633 }
634 
AdvanceFrameFragment(const uint8_t * data,size_t size,size_t * endpos)635 bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(const uint8_t* data,
636                                                       size_t size,
637                                                       size_t* endpos) {
638   if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
639     // For H264, we need to feed HW one frame at a time.  This is going to take
640     // some parsing of our input stream.
641     decoder_h264_parser_->SetStream(data, size);
642     H264NALU nalu;
643     H264Parser::Result result;
644     *endpos = 0;
645 
646     // Keep on peeking the next NALs while they don't indicate a frame
647     // boundary.
648     for (;;) {
649       bool end_of_frame = false;
650       result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
651       if (result == H264Parser::kInvalidStream ||
652           result == H264Parser::kUnsupportedStream)
653         return false;
654       if (result == H264Parser::kEOStream) {
655         // We've reached the end of the buffer before finding a frame boundary.
656         decoder_partial_frame_pending_ = true;
657         *endpos = size;
658         return true;
659       }
660       switch (nalu.nal_unit_type) {
661         case H264NALU::kNonIDRSlice:
662         case H264NALU::kIDRSlice:
663           if (nalu.size < 1)
664             return false;
665           // For these two, if the "first_mb_in_slice" field is zero, start a
666           // new frame and return.  This field is Exp-Golomb coded starting on
667           // the eighth data bit of the NAL; a zero value is encoded with a
668           // leading '1' bit in the byte, which we can detect as the byte being
669           // (unsigned) greater than or equal to 0x80.
670           if (nalu.data[1] >= 0x80) {
671             end_of_frame = true;
672             break;
673           }
674           break;
675         case H264NALU::kSEIMessage:
676         case H264NALU::kSPS:
677         case H264NALU::kPPS:
678         case H264NALU::kAUD:
679         case H264NALU::kEOSeq:
680         case H264NALU::kEOStream:
681         case H264NALU::kReserved14:
682         case H264NALU::kReserved15:
683         case H264NALU::kReserved16:
684         case H264NALU::kReserved17:
685         case H264NALU::kReserved18:
686           // These unconditionally signal a frame boundary.
687           end_of_frame = true;
688           break;
689         default:
690           // For all others, keep going.
691           break;
692       }
693       if (end_of_frame) {
694         if (!decoder_partial_frame_pending_ && *endpos == 0) {
695           // The frame was previously restarted, and we haven't filled the
696           // current frame with any contents yet.  Start the new frame here and
697           // continue parsing NALs.
698         } else {
699           // The frame wasn't previously restarted and/or we have contents for
700           // the current frame; signal the start of a new frame here: we don't
701           // have a partial frame anymore.
702           decoder_partial_frame_pending_ = false;
703           return true;
704         }
705       }
706       *endpos = (nalu.data + nalu.size) - data;
707     }
708     NOTREACHED();
709     return false;
710   } else {
711     DCHECK_GE(video_profile_, VP8PROFILE_MIN);
712     DCHECK_LE(video_profile_, VP9PROFILE_MAX);
713     // For VP8/9, we can just dump the entire buffer.  No fragmentation needed,
714     // and we never return a partial frame.
715     *endpos = size;
716     decoder_partial_frame_pending_ = false;
717     return true;
718   }
719 }
720 
ScheduleDecodeBufferTaskIfNeeded()721 void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
722   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
723 
724   // If we're behind on tasks, schedule another one.
725   int buffers_to_decode = decoder_input_queue_.size();
726   if (decoder_current_bitstream_buffer_ != NULL)
727     buffers_to_decode++;
728   if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
729     decoder_decode_buffer_tasks_scheduled_++;
730     decoder_thread_.task_runner()->PostTask(
731         FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DecodeBufferTask,
732                               base::Unretained(this)));
733   }
734 }
735 
DecodeBufferInitial(const void * data,size_t size,size_t * endpos)736 bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(const void* data,
737                                                      size_t size,
738                                                      size_t* endpos) {
739   DVLOGF(3) << "data=" << data << ", size=" << size;
740   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
741   DCHECK_EQ(decoder_state_, kInitialized);
742   // Initial decode.  We haven't been able to get output stream format info yet.
743   // Get it, and start decoding.
744 
745   // Copy in and send to HW.
746   if (!AppendToInputFrame(data, size))
747     return false;
748 
749   // If we only have a partial frame, don't flush and process yet.
750   if (decoder_partial_frame_pending_)
751     return true;
752 
753   if (!FlushInputFrame())
754     return false;
755 
756   // Recycle buffers.
757   Dequeue();
758 
759   *endpos = size;
760 
761   // If an initial resolution change event is not done yet, a driver probably
762   // needs more stream to decode format.
763   // Return true and schedule next buffer without changing status to kDecoding.
764   // If the initial resolution change is done and coded size is known, we may
765   // still have to wait for AssignPictureBuffers() and output buffers to be
766   // allocated.
767   if (coded_size_.IsEmpty() || output_buffer_map_.empty()) {
768     // Need more stream to decode format, return true and schedule next buffer.
769     return true;
770   }
771 
772   decoder_state_ = kDecoding;
773   ScheduleDecodeBufferTaskIfNeeded();
774   return true;
775 }
776 
DecodeBufferContinue(const void * data,size_t size)777 bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(const void* data,
778                                                       size_t size) {
779   DVLOGF(4) << "data=" << data << ", size=" << size;
780   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
781   DCHECK_EQ(decoder_state_, kDecoding);
782 
783   // Both of these calls will set kError state if they fail.
784   // Only flush the frame if it's complete.
785   return (AppendToInputFrame(data, size) &&
786           (decoder_partial_frame_pending_ || FlushInputFrame()));
787 }
788 
AppendToInputFrame(const void * data,size_t size)789 bool V4L2VideoDecodeAccelerator::AppendToInputFrame(const void* data,
790                                                     size_t size) {
791   DVLOGF(4);
792   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
793   DCHECK_NE(decoder_state_, kUninitialized);
794   DCHECK_NE(decoder_state_, kResetting);
795   DCHECK_NE(decoder_state_, kError);
796   // This routine can handle data == NULL and size == 0, which occurs when
797   // we queue an empty buffer for the purposes of flushing the pipe.
798 
799   // Flush if we're too big
800   if (decoder_current_input_buffer_ != -1) {
801     InputRecord& input_record =
802         input_buffer_map_[decoder_current_input_buffer_];
803     if (input_record.bytes_used + size > input_record.length) {
804       if (!FlushInputFrame())
805         return false;
806       decoder_current_input_buffer_ = -1;
807     }
808   }
809 
810   // Try to get an available input buffer
811   if (decoder_current_input_buffer_ == -1) {
812     if (free_input_buffers_.empty()) {
813       // See if we can get more free buffers from HW
814       Dequeue();
815       if (free_input_buffers_.empty()) {
816         // Nope!
817         DVLOGF(4) << "stalled for input buffers";
818         return false;
819       }
820     }
821     decoder_current_input_buffer_ = free_input_buffers_.back();
822     free_input_buffers_.pop_back();
823     InputRecord& input_record =
824         input_buffer_map_[decoder_current_input_buffer_];
825     DCHECK_EQ(input_record.bytes_used, 0);
826     DCHECK_EQ(input_record.input_id, -1);
827     DCHECK(decoder_current_bitstream_buffer_ != NULL);
828     input_record.input_id = decoder_current_bitstream_buffer_->input_id;
829   }
830 
831   DCHECK(data != NULL || size == 0);
832   if (size == 0) {
833     // If we asked for an empty buffer, return now.  We return only after
834     // getting the next input buffer, since we might actually want an empty
835     // input buffer for flushing purposes.
836     return true;
837   }
838 
839   // Copy in to the buffer.
840   InputRecord& input_record = input_buffer_map_[decoder_current_input_buffer_];
841   if (size > input_record.length - input_record.bytes_used) {
842     VLOGF(1) << "over-size frame, erroring";
843     NOTIFY_ERROR(UNREADABLE_INPUT);
844     return false;
845   }
846   memcpy(reinterpret_cast<uint8_t*>(input_record.address) +
847              input_record.bytes_used,
848          data, size);
849   input_record.bytes_used += size;
850 
851   return true;
852 }
853 
FlushInputFrame()854 bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
855   DVLOGF(4);
856   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
857   DCHECK_NE(decoder_state_, kUninitialized);
858   DCHECK_NE(decoder_state_, kResetting);
859   DCHECK_NE(decoder_state_, kError);
860 
861   if (decoder_current_input_buffer_ == -1)
862     return true;
863 
864   InputRecord& input_record = input_buffer_map_[decoder_current_input_buffer_];
865   DCHECK_NE(input_record.input_id, -1);
866   DCHECK(input_record.input_id != kFlushBufferId ||
867          input_record.bytes_used == 0);
868   // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
869   //   got from the client.  We can skip it if it is empty.
870   // * if input_id < 0 (should be kFlushBufferId in this case), this input
871   //   buffer was prompted by a flush buffer, and should be queued even when
872   //   empty.
873   if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
874     input_record.input_id = -1;
875     free_input_buffers_.push_back(decoder_current_input_buffer_);
876     decoder_current_input_buffer_ = -1;
877     return true;
878   }
879 
880   // Queue it.
881   input_ready_queue_.push(decoder_current_input_buffer_);
882   decoder_current_input_buffer_ = -1;
883   DVLOGF(4) << "submitting input_id=" << input_record.input_id;
884   // Enqueue once since there's new available input for it.
885   Enqueue();
886 
887   return (decoder_state_ != kError);
888 }
889 
ServiceDeviceTask(bool event_pending)890 void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
891   DVLOGF(4);
892   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
893   DCHECK_NE(decoder_state_, kUninitialized);
894 
895   if (decoder_state_ == kResetting) {
896     DVLOGF(3) << "early out: kResetting state";
897     return;
898   } else if (decoder_state_ == kError) {
899     DVLOGF(3) << "early out: kError state";
900     return;
901   } else if (decoder_state_ == kChangingResolution) {
902     DVLOGF(3) << "early out: kChangingResolution state";
903     return;
904   }
905 
906   bool resolution_change_pending = false;
907   if (event_pending)
908     resolution_change_pending = DequeueResolutionChangeEvent();
909 
910   if (!resolution_change_pending && coded_size_.IsEmpty()) {
911     // Some platforms do not send an initial resolution change event.
912     // To work around this, we need to keep checking if the initial resolution
913     // is known already by explicitly querying the format after each decode,
914     // regardless of whether we received an event.
915     // This needs to be done on initial resolution change,
916     // i.e. when coded_size_.IsEmpty().
917 
918     // Try GetFormatInfo to check if an initial resolution change can be done.
919     struct v4l2_format format;
920     Size visible_size;
921     bool again;
922     if (GetFormatInfo(&format, &visible_size, &again) && !again) {
923       resolution_change_pending = true;
924       DequeueResolutionChangeEvent();
925     }
926   }
927 
928   Dequeue();
929   Enqueue();
930 
931   // Clear the interrupt fd.
932   if (!device_->ClearDevicePollInterrupt()) {
933     NOTIFY_ERROR(PLATFORM_FAILURE);
934     return;
935   }
936 
937   bool poll_device = false;
938   // Add fd, if we should poll on it.
939   // Can be polled as soon as either input or output buffers are queued.
940   if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
941     poll_device = true;
942 
943   // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
944   // so either:
945   // * device_poll_thread_ is running normally
946   // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
947   //   shut it down, in which case we're either in kResetting or kError states
948   //   respectively, and we should have early-outed already.
949   DCHECK(device_poll_thread_.message_loop());
950   // Queue the DevicePollTask() now.
951   device_poll_thread_.task_runner()->PostTask(
952       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
953                             base::Unretained(this), poll_device));
954 
955   DVLOGF(3) << "ServiceDeviceTask(): buffer counts: DEC["
956             << decoder_input_queue_.size() << "->"
957             << input_ready_queue_.size() << "] => DEVICE["
958             << free_input_buffers_.size() << "+"
959             << input_buffer_queued_count_ << "/"
960             << input_buffer_map_.size() << "->"
961             << free_output_buffers_.size() << "+"
962             << output_buffer_queued_count_ << "/"
963             << output_buffer_map_.size() << "] => CLIENT["
964             << decoder_frames_at_client_ << "]";
965 
966   ScheduleDecodeBufferTaskIfNeeded();
967   if (resolution_change_pending)
968     StartResolutionChange();
969 }
970 
Enqueue()971 void V4L2VideoDecodeAccelerator::Enqueue() {
972   DVLOGF(4);
973   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
974   DCHECK_NE(decoder_state_, kUninitialized);
975 
976   // Drain the pipe of completed decode buffers.
977   const int old_inputs_queued = input_buffer_queued_count_;
978   while (!input_ready_queue_.empty()) {
979     const int buffer = input_ready_queue_.front();
980     InputRecord& input_record = input_buffer_map_[buffer];
981     if (input_record.input_id == kFlushBufferId && decoder_cmd_supported_) {
982       // Send the flush command after all input buffers are dequeued. This makes
983       // sure all previous resolution changes have been handled because the
984       // driver must hold the input buffer that triggers resolution change. The
985       // driver cannot decode data in it without new output buffers. If we send
986       // the flush now and a queued input buffer triggers resolution change
987       // later, the driver will send an output buffer that has
988       // V4L2_BUF_FLAG_LAST. But some queued input buffer have not been decoded
989       // yet. Also, V4L2VDA calls STREAMOFF and STREAMON after resolution
990       // change. They implicitly send a V4L2_DEC_CMD_STOP and V4L2_DEC_CMD_START
991       // to the decoder.
992       if (input_buffer_queued_count_ == 0) {
993         if (!SendDecoderCmdStop())
994           return;
995         input_ready_queue_.pop();
996         free_input_buffers_.push_back(buffer);
997         input_record.input_id = -1;
998       } else {
999         break;
1000       }
1001     } else if (!EnqueueInputRecord())
1002       return;
1003   }
1004   if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
1005     // We just started up a previously empty queue.
1006     // Queue state changed; signal interrupt.
1007     if (!device_->SetDevicePollInterrupt()) {
1008       VPLOGF(1) << "SetDevicePollInterrupt failed";
1009       NOTIFY_ERROR(PLATFORM_FAILURE);
1010       return;
1011     }
1012     // Start VIDIOC_STREAMON if we haven't yet.
1013     if (!input_streamon_) {
1014       __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1015       IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1016       input_streamon_ = true;
1017     }
1018   }
1019 
1020   // Enqueue all the outputs we can.
1021   const int old_outputs_queued = output_buffer_queued_count_;
1022   while (!free_output_buffers_.empty()) {
1023     if (!EnqueueOutputRecord())
1024       return;
1025   }
1026   if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
1027     // We just started up a previously empty queue.
1028     // Queue state changed; signal interrupt.
1029     if (!device_->SetDevicePollInterrupt()) {
1030       VPLOGF(1) << "SetDevicePollInterrupt(): failed";
1031       NOTIFY_ERROR(PLATFORM_FAILURE);
1032       return;
1033     }
1034     // Start VIDIOC_STREAMON if we haven't yet.
1035     if (!output_streamon_) {
1036       __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1037       IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
1038       output_streamon_ = true;
1039     }
1040   }
1041 }
1042 
DequeueResolutionChangeEvent()1043 bool V4L2VideoDecodeAccelerator::DequeueResolutionChangeEvent() {
1044   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1045   DCHECK_NE(decoder_state_, kUninitialized);
1046   DVLOGF(3);
1047 
1048   struct v4l2_event ev;
1049   memset(&ev, 0, sizeof(ev));
1050 
1051   while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1052     if (ev.type == V4L2_EVENT_SOURCE_CHANGE) {
1053       if (ev.u.src_change.changes & V4L2_EVENT_SRC_CH_RESOLUTION) {
1054         VLOGF(2) << "got resolution change event.";
1055         return true;
1056       }
1057     } else {
1058       VLOGF(1) << "got an event (" << ev.type << ") we haven't subscribed to.";
1059     }
1060   }
1061   return false;
1062 }
1063 
Dequeue()1064 void V4L2VideoDecodeAccelerator::Dequeue() {
1065   DVLOGF(4);
1066   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1067   DCHECK_NE(decoder_state_, kUninitialized);
1068 
1069   while (input_buffer_queued_count_ > 0) {
1070     if (!DequeueInputBuffer())
1071       break;
1072   }
1073   while (output_buffer_queued_count_ > 0) {
1074     if (!DequeueOutputBuffer())
1075       break;
1076   }
1077   NotifyFlushDoneIfNeeded();
1078 }
1079 
DequeueInputBuffer()1080 bool V4L2VideoDecodeAccelerator::DequeueInputBuffer() {
1081   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1082   DCHECK_GT(input_buffer_queued_count_, 0);
1083   DCHECK(input_streamon_);
1084 
1085   // Dequeue a completed input (VIDEO_OUTPUT) buffer, and recycle to the free
1086   // list.
1087   struct v4l2_buffer dqbuf;
1088   struct v4l2_plane planes[1];
1089   memset(&dqbuf, 0, sizeof(dqbuf));
1090   memset(planes, 0, sizeof(planes));
1091   dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1092   dqbuf.memory = V4L2_MEMORY_MMAP;
1093   dqbuf.m.planes = planes;
1094   dqbuf.length = 1;
1095   if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1096     if (errno == EAGAIN) {
1097       // EAGAIN if we're just out of buffers to dequeue.
1098       return false;
1099     }
1100     VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
1101     NOTIFY_ERROR(PLATFORM_FAILURE);
1102     return false;
1103   }
1104   InputRecord& input_record = input_buffer_map_[dqbuf.index];
1105   DCHECK(input_record.at_device);
1106   free_input_buffers_.push_back(dqbuf.index);
1107   input_record.at_device = false;
1108   input_record.bytes_used = 0;
1109   input_record.input_id = -1;
1110   input_buffer_queued_count_--;
1111 
1112   return true;
1113 }
1114 
DequeueOutputBuffer()1115 bool V4L2VideoDecodeAccelerator::DequeueOutputBuffer() {
1116   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1117   DCHECK_GT(output_buffer_queued_count_, 0);
1118   DCHECK(output_streamon_);
1119 
1120   // Dequeue a completed output (VIDEO_CAPTURE) buffer, and queue to the
1121   // completed queue.
1122   struct v4l2_buffer dqbuf;
1123   std::unique_ptr<struct v4l2_plane[]> planes(
1124       new v4l2_plane[output_planes_count_]);
1125   memset(&dqbuf, 0, sizeof(dqbuf));
1126   memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1127   dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1128   dqbuf.memory = V4L2_MEMORY_MMAP;
1129   dqbuf.m.planes = planes.get();
1130   dqbuf.length = output_planes_count_;
1131   if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1132     if (errno == EAGAIN) {
1133       // EAGAIN if we're just out of buffers to dequeue.
1134       return false;
1135     } else if (errno == EPIPE) {
1136       DVLOGF(3) << "Got EPIPE. Last output buffer was already dequeued.";
1137       return false;
1138     }
1139     VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
1140     NOTIFY_ERROR(PLATFORM_FAILURE);
1141     return false;
1142   }
1143   OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1144   DCHECK_EQ(output_record.state, kAtDevice);
1145   DCHECK_NE(output_record.picture_id, -1);
1146   output_buffer_queued_count_--;
1147   if (dqbuf.m.planes[0].bytesused == 0) {
1148     // This is an empty output buffer returned as part of a flush.
1149     output_record.state = kFree;
1150     free_output_buffers_.push_back(dqbuf.index);
1151   } else {
1152     int32_t bitstream_buffer_id = dqbuf.timestamp.tv_sec;
1153     DCHECK_GE(bitstream_buffer_id, 0);
1154     DVLOGF(4) << "Dequeue output buffer: dqbuf index=" << dqbuf.index
1155               << " bitstream input_id=" << bitstream_buffer_id;
1156     output_record.state = kAtClient;
1157     decoder_frames_at_client_++;
1158 
1159     const Picture picture(output_record.picture_id, bitstream_buffer_id,
1160                           Rect(visible_size_), false);
1161     pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
1162     SendPictureReady();
1163     output_record.cleared = true;
1164   }
1165   if (dqbuf.flags & V4L2_BUF_FLAG_LAST) {
1166     DVLOGF(3) << "Got last output buffer. Waiting last buffer="
1167               << flush_awaiting_last_output_buffer_;
1168     if (flush_awaiting_last_output_buffer_) {
1169       flush_awaiting_last_output_buffer_ = false;
1170       struct v4l2_decoder_cmd cmd;
1171       memset(&cmd, 0, sizeof(cmd));
1172       cmd.cmd = V4L2_DEC_CMD_START;
1173       IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_DECODER_CMD, &cmd);
1174     }
1175   }
1176   return true;
1177 }
1178 
EnqueueInputRecord()1179 bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1180   DVLOGF(4);
1181   DCHECK(!input_ready_queue_.empty());
1182 
1183   // Enqueue an input (VIDEO_OUTPUT) buffer.
1184   const int buffer = input_ready_queue_.front();
1185   InputRecord& input_record = input_buffer_map_[buffer];
1186   DCHECK(!input_record.at_device);
1187   struct v4l2_buffer qbuf;
1188   struct v4l2_plane qbuf_plane;
1189   memset(&qbuf, 0, sizeof(qbuf));
1190   memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1191   qbuf.index = buffer;
1192   qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1193   qbuf.timestamp.tv_sec = input_record.input_id;
1194   qbuf.memory = V4L2_MEMORY_MMAP;
1195   qbuf.m.planes = &qbuf_plane;
1196   qbuf.m.planes[0].bytesused = input_record.bytes_used;
1197   qbuf.length = 1;
1198   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1199   input_ready_queue_.pop();
1200   input_record.at_device = true;
1201   input_buffer_queued_count_++;
1202   DVLOGF(4) << "enqueued input_id=" << input_record.input_id
1203             << " size=" << input_record.bytes_used;
1204   return true;
1205 }
1206 
EnqueueOutputRecord()1207 bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1208   DCHECK(!free_output_buffers_.empty());
1209 
1210   // Enqueue an output (VIDEO_CAPTURE) buffer.
1211   const int buffer = free_output_buffers_.front();
1212   DVLOGF(4) << "buffer " << buffer;
1213   OutputRecord& output_record = output_buffer_map_[buffer];
1214   DCHECK_EQ(output_record.state, kFree);
1215   DCHECK_NE(output_record.picture_id, -1);
1216   struct v4l2_buffer qbuf;
1217   std::unique_ptr<struct v4l2_plane[]> qbuf_planes(
1218       new v4l2_plane[output_planes_count_]);
1219   memset(&qbuf, 0, sizeof(qbuf));
1220   memset(qbuf_planes.get(), 0,
1221          sizeof(struct v4l2_plane) * output_planes_count_);
1222   qbuf.index = buffer;
1223   qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1224   qbuf.memory = V4L2_MEMORY_MMAP;
1225   qbuf.m.planes = qbuf_planes.get();
1226   qbuf.length = output_planes_count_;
1227   DVLOGF(4) << "qbuf.index=" << qbuf.index;
1228   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1229   free_output_buffers_.pop_front();
1230   output_record.state = kAtDevice;
1231   output_buffer_queued_count_++;
1232   return true;
1233 }
1234 
ReusePictureBufferTask(int32_t picture_buffer_id)1235 void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(int32_t picture_buffer_id) {
1236   DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
1237   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1238 
1239   // We run ReusePictureBufferTask even if we're in kResetting.
1240   if (decoder_state_ == kError) {
1241     DVLOGF(4) << "early out: kError state";
1242     return;
1243   }
1244 
1245   if (decoder_state_ == kChangingResolution) {
1246     DVLOGF(4) << "early out: kChangingResolution";
1247     return;
1248   }
1249 
1250   size_t index;
1251   for (index = 0; index < output_buffer_map_.size(); ++index)
1252     if (output_buffer_map_[index].picture_id == picture_buffer_id)
1253       break;
1254 
1255   if (index >= output_buffer_map_.size()) {
1256     // It's possible that we've already posted a DismissPictureBuffer for this
1257     // picture, but it has not yet executed when this ReusePictureBuffer was
1258     // posted to us by the client. In that case just ignore this (we've already
1259     // dismissed it and accounted for that) and let the sync object get
1260     // destroyed.
1261     DVLOGF(3) << "got picture id= " << picture_buffer_id
1262               << " not in use (anymore?).";
1263     return;
1264   }
1265 
1266   OutputRecord& output_record = output_buffer_map_[index];
1267   if (output_record.state != kAtClient) {
1268     VLOGF(1) << "picture_buffer_id not reusable";
1269     NOTIFY_ERROR(INVALID_ARGUMENT);
1270     return;
1271   }
1272 
1273   output_record.state = kFree;
1274   free_output_buffers_.push_back(index);
1275   decoder_frames_at_client_--;
1276   // We got a buffer back, so enqueue it back.
1277   Enqueue();
1278 }
1279 
FlushTask()1280 void V4L2VideoDecodeAccelerator::FlushTask() {
1281   VLOGF(2);
1282   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1283 
1284   if (decoder_state_ == kError) {
1285     VLOGF(2) << "early out: kError state";
1286     return;
1287   }
1288 
1289   // We don't support stacked flushing.
1290   DCHECK(!decoder_flushing_);
1291 
1292   // Queue up an empty buffer -- this triggers the flush.
1293   decoder_input_queue_.push(
1294       linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1295           decode_client_, decode_task_runner_, nullptr, kFlushBufferId)));
1296   decoder_flushing_ = true;
1297   SendPictureReady();  // Send all pending PictureReady.
1298 
1299   ScheduleDecodeBufferTaskIfNeeded();
1300 }
1301 
NotifyFlushDoneIfNeeded()1302 void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1303   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1304   if (!decoder_flushing_)
1305     return;
1306 
1307   // Pipeline is empty when:
1308   // * Decoder input queue is empty of non-delayed buffers.
1309   // * There is no currently filling input buffer.
1310   // * Input holding queue is empty.
1311   // * All input (VIDEO_OUTPUT) buffers are returned.
1312   // * All image processor buffers are returned.
1313   if (!decoder_input_queue_.empty()) {
1314     if (decoder_input_queue_.front()->input_id !=
1315         decoder_delay_bitstream_buffer_id_) {
1316       DVLOGF(3) << "Some input bitstream buffers are not queued.";
1317       return;
1318     }
1319   }
1320   if (decoder_current_input_buffer_ != -1) {
1321     DVLOGF(3) << "Current input buffer != -1";
1322     return;
1323   }
1324   if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0) {
1325     DVLOGF(3) << "Some input buffers are not dequeued.";
1326     return;
1327   }
1328   if (flush_awaiting_last_output_buffer_) {
1329     DVLOGF(3) << "Waiting for last output buffer.";
1330     return;
1331   }
1332 
1333   // TODO(posciak): https://crbug.com/270039. Exynos requires a
1334   // streamoff-streamon sequence after flush to continue, even if we are not
1335   // resetting. This would make sense, because we don't really want to resume
1336   // from a non-resume point (e.g. not from an IDR) if we are flushed.
1337   // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1338   // could argue either way, or even say that Flush() is not needed/harmful when
1339   // transitioning to next chunk.
1340   // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1341   // when doing MSE. This should be harmless otherwise.
1342   if (!(StopDevicePoll() && StopOutputStream() && StopInputStream()))
1343     return;
1344 
1345   if (!StartDevicePoll())
1346     return;
1347 
1348   decoder_delay_bitstream_buffer_id_ = -1;
1349   decoder_flushing_ = false;
1350   VLOGF(2) << "returning flush";
1351   child_task_runner_->PostTask(FROM_HERE,
1352                                base::Bind(&Client::NotifyFlushDone, client_));
1353 
1354   // While we were flushing, we early-outed DecodeBufferTask()s.
1355   ScheduleDecodeBufferTaskIfNeeded();
1356 }
1357 
IsDecoderCmdSupported()1358 bool V4L2VideoDecodeAccelerator::IsDecoderCmdSupported() {
1359   // CMD_STOP should always succeed. If the decoder is started, the command can
1360   // flush it. If the decoder is stopped, the command does nothing. We use this
1361   // to know if a driver supports V4L2_DEC_CMD_STOP to flush.
1362   struct v4l2_decoder_cmd cmd;
1363   memset(&cmd, 0, sizeof(cmd));
1364   cmd.cmd = V4L2_DEC_CMD_STOP;
1365   if (device_->Ioctl(VIDIOC_TRY_DECODER_CMD, &cmd) != 0) {
1366     VLOGF(2) << "V4L2_DEC_CMD_STOP is not supported.";
1367     return false;
1368   }
1369 
1370   return true;
1371 }
1372 
SendDecoderCmdStop()1373 bool V4L2VideoDecodeAccelerator::SendDecoderCmdStop() {
1374   VLOGF(2);
1375   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1376   DCHECK(!flush_awaiting_last_output_buffer_);
1377 
1378   struct v4l2_decoder_cmd cmd;
1379   memset(&cmd, 0, sizeof(cmd));
1380   cmd.cmd = V4L2_DEC_CMD_STOP;
1381   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_DECODER_CMD, &cmd);
1382   flush_awaiting_last_output_buffer_ = true;
1383 
1384   return true;
1385 }
1386 
ResetTask()1387 void V4L2VideoDecodeAccelerator::ResetTask() {
1388   VLOGF(2);
1389   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1390 
1391   if (decoder_state_ == kError) {
1392     VLOGF(2) << "early out: kError state";
1393     return;
1394   }
1395   decoder_current_bitstream_buffer_.reset();
1396   while (!decoder_input_queue_.empty())
1397     decoder_input_queue_.pop();
1398 
1399   decoder_current_input_buffer_ = -1;
1400 
1401   // If we are in the middle of switching resolutions or awaiting picture
1402   // buffers, postpone reset until it's done. We don't have to worry about
1403   // timing of this wrt to decoding, because output pipe is already
1404   // stopped if we are changing resolution. We will come back here after
1405   // we are done.
1406   DCHECK(!reset_pending_);
1407   if (decoder_state_ == kChangingResolution ||
1408       decoder_state_ == kAwaitingPictureBuffers) {
1409     reset_pending_ = true;
1410     return;
1411   }
1412   FinishReset();
1413 }
1414 
FinishReset()1415 void V4L2VideoDecodeAccelerator::FinishReset() {
1416   VLOGF(2);
1417   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1418 
1419   reset_pending_ = false;
1420   // After the output stream is stopped, the codec should not post any
1421   // resolution change events. So we dequeue the resolution change event
1422   // afterwards. The event could be posted before or while stopping the output
1423   // stream. The codec will expect the buffer of new size after the seek, so
1424   // we need to handle the resolution change event first.
1425   if (!(StopDevicePoll() && StopOutputStream()))
1426     return;
1427 
1428   if (DequeueResolutionChangeEvent()) {
1429     reset_pending_ = true;
1430     StartResolutionChange();
1431     return;
1432   }
1433 
1434   if (!StopInputStream())
1435     return;
1436 
1437   // If we were flushing, we'll never return any more BitstreamBuffers or
1438   // PictureBuffers; they have all been dropped and returned by now.
1439   NotifyFlushDoneIfNeeded();
1440 
1441   // Mark that we're resetting, then enqueue a ResetDoneTask().  All intervening
1442   // jobs will early-out in the kResetting state.
1443   decoder_state_ = kResetting;
1444   SendPictureReady();  // Send all pending PictureReady.
1445   decoder_thread_.task_runner()->PostTask(
1446       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ResetDoneTask,
1447                             base::Unretained(this)));
1448 }
1449 
ResetDoneTask()1450 void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1451   VLOGF(2);
1452   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1453 
1454   if (decoder_state_ == kError) {
1455     VLOGF(2) << "early out: kError state";
1456     return;
1457   }
1458 
1459   // Start poll thread if NotifyFlushDoneIfNeeded has not already.
1460   if (!device_poll_thread_.IsRunning()) {
1461     if (!StartDevicePoll())
1462       return;
1463   }
1464 
1465   // Reset format-specific bits.
1466   if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
1467     decoder_h264_parser_.reset(new H264Parser());
1468   }
1469 
1470   // Jobs drained, we're finished resetting.
1471   DCHECK_EQ(decoder_state_, kResetting);
1472   decoder_state_ = kInitialized;
1473 
1474   decoder_partial_frame_pending_ = false;
1475   decoder_delay_bitstream_buffer_id_ = -1;
1476   child_task_runner_->PostTask(FROM_HERE,
1477                                base::Bind(&Client::NotifyResetDone, client_));
1478 
1479   // While we were resetting, we early-outed DecodeBufferTask()s.
1480   ScheduleDecodeBufferTaskIfNeeded();
1481 }
1482 
DestroyTask()1483 void V4L2VideoDecodeAccelerator::DestroyTask() {
1484   VLOGF(2);
1485 
1486   // DestroyTask() should run regardless of decoder_state_.
1487 
1488   StopDevicePoll();
1489   StopOutputStream();
1490   StopInputStream();
1491 
1492   decoder_current_bitstream_buffer_.reset();
1493   decoder_current_input_buffer_ = -1;
1494   decoder_decode_buffer_tasks_scheduled_ = 0;
1495   decoder_frames_at_client_ = 0;
1496   while (!decoder_input_queue_.empty())
1497     decoder_input_queue_.pop();
1498   decoder_flushing_ = false;
1499 
1500   // Set our state to kError.  Just in case.
1501   decoder_state_ = kError;
1502 
1503   DestroyInputBuffers();
1504   DestroyOutputBuffers();
1505 }
1506 
StartDevicePoll()1507 bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1508   DVLOGF(3);
1509   DCHECK(!device_poll_thread_.IsRunning());
1510   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1511 
1512   // Start up the device poll thread and schedule its first DevicePollTask().
1513   if (!device_poll_thread_.Start()) {
1514     VLOGF(1) << "Device thread failed to start";
1515     NOTIFY_ERROR(PLATFORM_FAILURE);
1516     return false;
1517   }
1518   device_poll_thread_.task_runner()->PostTask(
1519       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
1520                             base::Unretained(this), 0));
1521 
1522   return true;
1523 }
1524 
StopDevicePoll()1525 bool V4L2VideoDecodeAccelerator::StopDevicePoll() {
1526   DVLOGF(3);
1527 
1528   if (!device_poll_thread_.IsRunning())
1529     return true;
1530 
1531   if (decoder_thread_.IsRunning())
1532     DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1533 
1534   // Signal the DevicePollTask() to stop, and stop the device poll thread.
1535   if (!device_->SetDevicePollInterrupt()) {
1536     VPLOGF(1) << "SetDevicePollInterrupt(): failed";
1537     NOTIFY_ERROR(PLATFORM_FAILURE);
1538     return false;
1539   }
1540   device_poll_thread_.Stop();
1541   // Clear the interrupt now, to be sure.
1542   if (!device_->ClearDevicePollInterrupt()) {
1543     NOTIFY_ERROR(PLATFORM_FAILURE);
1544     return false;
1545   }
1546   DVLOGF(3) << "device poll stopped";
1547   return true;
1548 }
1549 
StopOutputStream()1550 bool V4L2VideoDecodeAccelerator::StopOutputStream() {
1551   VLOGF(2);
1552   if (!output_streamon_)
1553     return true;
1554 
1555   __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1556   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1557   output_streamon_ = false;
1558 
1559   // Output stream is stopped. No need to wait for the buffer anymore.
1560   flush_awaiting_last_output_buffer_ = false;
1561 
1562   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1563     // After streamoff, the device drops ownership of all buffers, even if we
1564     // don't dequeue them explicitly. Some of them may still be owned by the
1565     // client however. Reuse only those that aren't.
1566     OutputRecord& output_record = output_buffer_map_[i];
1567     if (output_record.state == kAtDevice) {
1568       output_record.state = kFree;
1569       free_output_buffers_.push_back(i);
1570     }
1571   }
1572   output_buffer_queued_count_ = 0;
1573   return true;
1574 }
1575 
StopInputStream()1576 bool V4L2VideoDecodeAccelerator::StopInputStream() {
1577   VLOGF(2);
1578   if (!input_streamon_)
1579     return true;
1580 
1581   __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1582   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1583   input_streamon_ = false;
1584 
1585   // Reset accounting info for input.
1586   while (!input_ready_queue_.empty())
1587     input_ready_queue_.pop();
1588   free_input_buffers_.clear();
1589   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1590     free_input_buffers_.push_back(i);
1591     input_buffer_map_[i].at_device = false;
1592     input_buffer_map_[i].bytes_used = 0;
1593     input_buffer_map_[i].input_id = -1;
1594   }
1595   input_buffer_queued_count_ = 0;
1596 
1597   return true;
1598 }
1599 
StartResolutionChange()1600 void V4L2VideoDecodeAccelerator::StartResolutionChange() {
1601   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1602   DCHECK_NE(decoder_state_, kUninitialized);
1603   DCHECK_NE(decoder_state_, kResetting);
1604 
1605   VLOGF(2) << "Initiate resolution change";
1606 
1607   if (!(StopDevicePoll() && StopOutputStream()))
1608     return;
1609 
1610   decoder_state_ = kChangingResolution;
1611   SendPictureReady();  // Send all pending PictureReady.
1612 
1613   if (!DestroyOutputBuffers()) {
1614     VLOGF(1) << "Failed destroying output buffers.";
1615     NOTIFY_ERROR(PLATFORM_FAILURE);
1616     return;
1617   }
1618 
1619   FinishResolutionChange();
1620 }
1621 
FinishResolutionChange()1622 void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1623   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1624   DCHECK_EQ(decoder_state_, kChangingResolution);
1625   VLOGF(2);
1626 
1627   if (decoder_state_ == kError) {
1628     VLOGF(2) << "early out: kError state";
1629     return;
1630   }
1631 
1632   struct v4l2_format format;
1633   bool again;
1634   Size visible_size;
1635   bool ret = GetFormatInfo(&format, &visible_size, &again);
1636   if (!ret || again) {
1637     VLOGF(1) << "Couldn't get format information after resolution change";
1638     NOTIFY_ERROR(PLATFORM_FAILURE);
1639     return;
1640   }
1641 
1642   if (!CreateBuffersForFormat(format, visible_size)) {
1643     VLOGF(1) << "Couldn't reallocate buffers after resolution change";
1644     NOTIFY_ERROR(PLATFORM_FAILURE);
1645     return;
1646   }
1647 
1648   if (!StartDevicePoll())
1649     return;
1650 }
1651 
DevicePollTask(bool poll_device)1652 void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1653   DVLOGF(4);
1654   DCHECK(device_poll_thread_.task_runner()->BelongsToCurrentThread());
1655 
1656   bool event_pending = false;
1657 
1658   if (!device_->Poll(poll_device, &event_pending)) {
1659     NOTIFY_ERROR(PLATFORM_FAILURE);
1660     return;
1661   }
1662 
1663   // All processing should happen on ServiceDeviceTask(), since we shouldn't
1664   // touch decoder state from this thread.
1665   decoder_thread_.task_runner()->PostTask(
1666       FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1667                             base::Unretained(this), event_pending));
1668 }
1669 
NotifyError(Error error)1670 void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1671   VLOGF(1);
1672 
1673   if (!child_task_runner_->BelongsToCurrentThread()) {
1674     child_task_runner_->PostTask(
1675         FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::NotifyError,
1676                               weak_this_, error));
1677     return;
1678   }
1679 
1680   if (client_) {
1681     client_->NotifyError(error);
1682     client_ptr_factory_.reset();
1683   }
1684 }
1685 
SetErrorState(Error error)1686 void V4L2VideoDecodeAccelerator::SetErrorState(Error error) {
1687   // We can touch decoder_state_ only if this is the decoder thread or the
1688   // decoder thread isn't running.
1689   if (decoder_thread_.task_runner() &&
1690       !decoder_thread_.task_runner()->BelongsToCurrentThread()) {
1691     decoder_thread_.task_runner()->PostTask(
1692         FROM_HERE, base::Bind(&V4L2VideoDecodeAccelerator::SetErrorState,
1693                               base::Unretained(this), error));
1694     return;
1695   }
1696 
1697   // Post NotifyError only if we are already initialized, as the API does
1698   // not allow doing so before that.
1699   if (decoder_state_ != kError && decoder_state_ != kUninitialized)
1700     NotifyError(error);
1701 
1702   decoder_state_ = kError;
1703 }
1704 
GetFormatInfo(struct v4l2_format * format,Size * visible_size,bool * again)1705 bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1706                                                Size* visible_size,
1707                                                bool* again) {
1708   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1709 
1710   *again = false;
1711   memset(format, 0, sizeof(*format));
1712   format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1713   if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1714     if (errno == EINVAL) {
1715       // EINVAL means we haven't seen sufficient stream to decode the format.
1716       *again = true;
1717       return true;
1718     } else {
1719       VPLOGF(1) << "ioctl() failed: VIDIOC_G_FMT";
1720       NOTIFY_ERROR(PLATFORM_FAILURE);
1721       return false;
1722     }
1723   }
1724 
1725   // Make sure we are still getting the format we set on initialization.
1726   if (format->fmt.pix_mp.pixelformat != output_format_fourcc_) {
1727     VLOGF(1) << "Unexpected format from G_FMT on output";
1728     return false;
1729   }
1730 
1731   Size coded_size(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
1732   if (visible_size != nullptr)
1733     *visible_size = GetVisibleSize(coded_size);
1734 
1735   return true;
1736 }
1737 
CreateBuffersForFormat(const struct v4l2_format & format,const Size & visible_size)1738 bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1739     const struct v4l2_format& format,
1740     const Size& visible_size) {
1741   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1742   output_planes_count_ = format.fmt.pix_mp.num_planes;
1743   coded_size_.SetSize(format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1744   visible_size_ = visible_size;
1745 
1746   VLOGF(2) << "new resolution: " << coded_size_.ToString()
1747            << ", visible size: " << visible_size_.ToString()
1748            << ", decoder output planes count: " << output_planes_count_;
1749 
1750   return CreateOutputBuffers();
1751 }
1752 
GetVisibleSize(const Size & coded_size)1753 Size V4L2VideoDecodeAccelerator::GetVisibleSize(
1754     const Size& coded_size) {
1755   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1756 
1757   struct v4l2_rect* visible_rect;
1758   struct v4l2_selection selection_arg;
1759   memset(&selection_arg, 0, sizeof(selection_arg));
1760   selection_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1761   selection_arg.target = V4L2_SEL_TGT_COMPOSE;
1762 
1763   if (device_->Ioctl(VIDIOC_G_SELECTION, &selection_arg) == 0) {
1764     VLOGF(2) << "VIDIOC_G_SELECTION is supported";
1765     visible_rect = &selection_arg.r;
1766   } else {
1767     VLOGF(2) << "Fallback to VIDIOC_G_CROP";
1768     struct v4l2_crop crop_arg;
1769     memset(&crop_arg, 0, sizeof(crop_arg));
1770     crop_arg.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1771 
1772     if (device_->Ioctl(VIDIOC_G_CROP, &crop_arg) != 0) {
1773       VPLOGF(1) << "ioctl() VIDIOC_G_CROP failed";
1774       return coded_size;
1775     }
1776     visible_rect = &crop_arg.c;
1777   }
1778 
1779   Rect rect(visible_rect->left, visible_rect->top, visible_rect->width,
1780             visible_rect->height);
1781   VLOGF(2) << "visible rectangle is " << rect.ToString();
1782   if (!Rect(coded_size).Contains(rect)) {
1783     DVLOGF(3) << "visible rectangle " << rect.ToString()
1784               << " is not inside coded size " << coded_size.ToString();
1785     return coded_size;
1786   }
1787   if (rect.IsEmpty()) {
1788     VLOGF(1) << "visible size is empty";
1789     return coded_size;
1790   }
1791 
1792   // Chrome assume picture frame is coded at (0, 0).
1793   if (rect.x() != 0 || rect.y() != 0) {
1794     VLOGF(1) << "Unexpected visible rectangle " << rect.ToString()
1795              << ", top-left is not origin";
1796     return coded_size;
1797   }
1798 
1799   return rect.size();
1800 }
1801 
CreateInputBuffers()1802 bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1803   VLOGF(2);
1804   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
1805   // We always run this as we prepare to initialize.
1806   DCHECK_EQ(decoder_state_, kInitialized);
1807   DCHECK(!input_streamon_);
1808   DCHECK(input_buffer_map_.empty());
1809 
1810   struct v4l2_requestbuffers reqbufs;
1811   memset(&reqbufs, 0, sizeof(reqbufs));
1812   reqbufs.count = kInputBufferCount;
1813   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1814   reqbufs.memory = V4L2_MEMORY_MMAP;
1815   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1816   input_buffer_map_.resize(reqbufs.count);
1817   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1818     free_input_buffers_.push_back(i);
1819 
1820     // Query for the MEMORY_MMAP pointer.
1821     struct v4l2_plane planes[1];
1822     struct v4l2_buffer buffer;
1823     memset(&buffer, 0, sizeof(buffer));
1824     memset(planes, 0, sizeof(planes));
1825     buffer.index = i;
1826     buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1827     buffer.memory = V4L2_MEMORY_MMAP;
1828     buffer.m.planes = planes;
1829     buffer.length = 1;
1830     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1831     void* address = device_->Mmap(NULL,
1832                                   buffer.m.planes[0].length,
1833                                   PROT_READ | PROT_WRITE,
1834                                   MAP_SHARED,
1835                                   buffer.m.planes[0].m.mem_offset);
1836     if (address == MAP_FAILED) {
1837       VPLOGF(1) << "mmap() failed";
1838       return false;
1839     }
1840     input_buffer_map_[i].address = address;
1841     input_buffer_map_[i].length = buffer.m.planes[0].length;
1842   }
1843 
1844   return true;
1845 }
1846 
IsSupportedOutputFormat(uint32_t v4l2_format)1847 static bool IsSupportedOutputFormat(uint32_t v4l2_format) {
1848   // Only support V4L2_PIX_FMT_NV12 output format for now.
1849   // TODO(johnylin): add more supported format if necessary.
1850   uint32_t kSupportedOutputFmtFourcc[] = { V4L2_PIX_FMT_NV12 };
1851   return std::find(
1852       kSupportedOutputFmtFourcc,
1853       kSupportedOutputFmtFourcc + arraysize(kSupportedOutputFmtFourcc),
1854       v4l2_format) !=
1855           kSupportedOutputFmtFourcc + arraysize(kSupportedOutputFmtFourcc);
1856 }
1857 
SetupFormats()1858 bool V4L2VideoDecodeAccelerator::SetupFormats() {
1859   // We always run this as we prepare to initialize.
1860   DCHECK(child_task_runner_->BelongsToCurrentThread());
1861   DCHECK_EQ(decoder_state_, kUninitialized);
1862   DCHECK(!input_streamon_);
1863   DCHECK(!output_streamon_);
1864 
1865   size_t input_size;
1866   Size max_resolution, min_resolution;
1867   device_->GetSupportedResolution(input_format_fourcc_, &min_resolution,
1868                                   &max_resolution);
1869   if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
1870     input_size = kInputBufferMaxSizeFor4k;
1871   else
1872     input_size = kInputBufferMaxSizeFor1080p;
1873 
1874   struct v4l2_fmtdesc fmtdesc;
1875   memset(&fmtdesc, 0, sizeof(fmtdesc));
1876   fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1877   bool is_format_supported = false;
1878   while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1879     if (fmtdesc.pixelformat == input_format_fourcc_) {
1880       is_format_supported = true;
1881       break;
1882     }
1883     ++fmtdesc.index;
1884   }
1885 
1886   if (!is_format_supported) {
1887     VLOGF(1) << "Input fourcc " << input_format_fourcc_
1888              << " not supported by device.";
1889     return false;
1890   }
1891 
1892   struct v4l2_format format;
1893   memset(&format, 0, sizeof(format));
1894   format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1895   format.fmt.pix_mp.pixelformat = input_format_fourcc_;
1896   format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
1897   format.fmt.pix_mp.num_planes = 1;
1898   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1899 
1900   // We have to set up the format for output, because the driver may not allow
1901   // changing it once we start streaming; whether it can support our chosen
1902   // output format or not may depend on the input format.
1903   memset(&fmtdesc, 0, sizeof(fmtdesc));
1904   fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1905   while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
1906     if (IsSupportedOutputFormat(fmtdesc.pixelformat)) {
1907       output_format_fourcc_ = fmtdesc.pixelformat;
1908       break;
1909     }
1910     ++fmtdesc.index;
1911   }
1912 
1913   if (output_format_fourcc_ == 0) {
1914     VLOGF(2) << "Image processor not available";
1915     return false;
1916   }
1917   VLOGF(2) << "Output format=" << output_format_fourcc_;
1918 
1919   // Just set the fourcc for output; resolution, etc., will come from the
1920   // driver once it extracts it from the stream.
1921   memset(&format, 0, sizeof(format));
1922   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1923   format.fmt.pix_mp.pixelformat = output_format_fourcc_;
1924   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1925 
1926   return true;
1927 }
1928 
CreateOutputBuffers()1929 bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1930   VLOGF(2);
1931   DCHECK(decoder_state_ == kInitialized ||
1932          decoder_state_ == kChangingResolution);
1933   DCHECK(!output_streamon_);
1934   DCHECK(output_buffer_map_.empty());
1935   DCHECK_EQ(output_mode_, Config::OutputMode::IMPORT);
1936 
1937   // Number of output buffers we need.
1938   struct v4l2_control ctrl;
1939   memset(&ctrl, 0, sizeof(ctrl));
1940   ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1941   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1942   output_dpb_size_ = ctrl.value;
1943 
1944   // Output format setup in Initialize().
1945 
1946   uint32_t buffer_count = output_dpb_size_ + kDpbOutputBufferExtraCount;
1947 
1948   VideoPixelFormat pixel_format =
1949       V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_);
1950 
1951   child_task_runner_->PostTask(
1952       FROM_HERE, base::Bind(&Client::ProvidePictureBuffers, client_,
1953                             buffer_count, pixel_format, coded_size_));
1954 
1955 
1956   // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding
1957   // or event handling while we are waiting for AssignPictureBuffers(). Not
1958   // having Pictures available would not have prevented us from making decoding
1959   // progress entirely e.g. in the case of H.264 where we could further decode
1960   // non-slice NALUs and could even get another resolution change before we were
1961   // done with this one. After we get the buffers, we'll go back into kIdle and
1962   // kick off further event processing, and eventually go back into kDecoding
1963   // once no more events are pending (if any).
1964   decoder_state_ = kAwaitingPictureBuffers;
1965 
1966   return true;
1967 }
1968 
DestroyInputBuffers()1969 void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1970   VLOGF(2);
1971   DCHECK(!decoder_thread_.IsRunning() ||
1972          decoder_thread_.task_runner()->BelongsToCurrentThread());
1973   DCHECK(!input_streamon_);
1974 
1975   if (input_buffer_map_.empty())
1976     return;
1977 
1978   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1979     if (input_buffer_map_[i].address != NULL) {
1980       device_->Munmap(input_buffer_map_[i].address,
1981                       input_buffer_map_[i].length);
1982     }
1983   }
1984 
1985   struct v4l2_requestbuffers reqbufs;
1986   memset(&reqbufs, 0, sizeof(reqbufs));
1987   reqbufs.count = 0;
1988   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1989   reqbufs.memory = V4L2_MEMORY_MMAP;
1990   IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1991 
1992   input_buffer_map_.clear();
1993   free_input_buffers_.clear();
1994 }
1995 
DestroyOutputBuffers()1996 bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1997   VLOGF(2);
1998   DCHECK(!decoder_thread_.IsRunning() ||
1999          decoder_thread_.task_runner()->BelongsToCurrentThread());
2000   DCHECK(!output_streamon_);
2001   bool success = true;
2002 
2003   if (output_buffer_map_.empty())
2004     return true;
2005 
2006   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
2007     OutputRecord& output_record = output_buffer_map_[i];
2008 
2009     DVLOGF(3) << "dismissing PictureBuffer id=" << output_record.picture_id;
2010     child_task_runner_->PostTask(
2011         FROM_HERE, base::Bind(&Client::DismissPictureBuffer, client_,
2012                               output_record.picture_id));
2013   }
2014 
2015   struct v4l2_requestbuffers reqbufs;
2016   memset(&reqbufs, 0, sizeof(reqbufs));
2017   reqbufs.count = 0;
2018   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2019   reqbufs.memory = V4L2_MEMORY_MMAP;
2020   if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
2021     VPLOGF(1) << "ioctl() failed: VIDIOC_REQBUFS";
2022     NOTIFY_ERROR(PLATFORM_FAILURE);
2023     success = false;
2024   }
2025 
2026   output_buffer_map_.clear();
2027   while (!free_output_buffers_.empty())
2028     free_output_buffers_.pop_front();
2029   output_buffer_queued_count_ = 0;
2030   // The client may still hold some buffers. The texture holds a reference to
2031   // the buffer. It is OK to free the buffer and destroy EGLImage here.
2032   decoder_frames_at_client_ = 0;
2033 
2034   return success;
2035 }
2036 
SendPictureReady()2037 void V4L2VideoDecodeAccelerator::SendPictureReady() {
2038   DVLOGF(4);
2039   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2040   bool send_now = (decoder_state_ == kChangingResolution ||
2041                    decoder_state_ == kResetting || decoder_flushing_);
2042   while (pending_picture_ready_.size() > 0) {
2043     bool cleared = pending_picture_ready_.front().cleared;
2044     const Picture& picture = pending_picture_ready_.front().picture;
2045     if (cleared && picture_clearing_count_ == 0) {
2046       // This picture is cleared. It can be posted to a thread different than
2047       // the main GPU thread to reduce latency. This should be the case after
2048       // all pictures are cleared at the beginning.
2049       decode_task_runner_->PostTask(
2050           FROM_HERE,
2051           base::Bind(&Client::PictureReady, decode_client_, picture));
2052       pending_picture_ready_.pop();
2053     } else if (!cleared || send_now) {
2054       DVLOGF(4) << "cleared=" << pending_picture_ready_.front().cleared
2055                 << ", decoder_state_=" << decoder_state_
2056                 << ", decoder_flushing_=" << decoder_flushing_
2057                 << ", picture_clearing_count_=" << picture_clearing_count_;
2058       // If the picture is not cleared, post it to the child thread because it
2059       // has to be cleared in the child thread. A picture only needs to be
2060       // cleared once. If the decoder is changing resolution, resetting or
2061       // flushing, send all pictures to ensure PictureReady arrive before
2062       // ProvidePictureBuffers, NotifyResetDone, or NotifyFlushDone.
2063       child_task_runner_->PostTaskAndReply(
2064           FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
2065           // Unretained is safe. If Client::PictureReady gets to run, |this| is
2066           // alive. Destroy() will wait the decode thread to finish.
2067           base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
2068                      base::Unretained(this)));
2069       picture_clearing_count_++;
2070       pending_picture_ready_.pop();
2071     } else {
2072       // This picture is cleared. But some pictures are about to be cleared on
2073       // the child thread. To preserve the order, do not send this until those
2074       // pictures are cleared.
2075       break;
2076     }
2077   }
2078 }
2079 
PictureCleared()2080 void V4L2VideoDecodeAccelerator::PictureCleared() {
2081   DVLOGF(4) << "clearing count=" << picture_clearing_count_;
2082   DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
2083   DCHECK_GT(picture_clearing_count_, 0);
2084   picture_clearing_count_--;
2085   SendPictureReady();
2086 }
2087 
2088 }  // namespace media
2089