• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
6 
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h"
11 #include "content/common/gpu/gpu_channel.h"
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
13 #include "media/base/bitstream_buffer.h"
14 #include "media/base/limits.h"
15 #include "media/video/picture.h"
16 #include "ui/gl/android/scoped_java_surface.h"
17 #include "ui/gl/gl_bindings.h"
18 
19 namespace content {
20 
21 // Helper macros for dealing with failure.  If |result| evaluates false, emit
22 // |log| to ERROR, register |error| with the decoder, and return.
23 #define RETURN_ON_FAILURE(result, log, error)                       \
24   do {                                                              \
25     if (!(result)) {                                                \
26       DLOG(ERROR) << log;                                           \
27       base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \
28           &AndroidVideoDecodeAccelerator::NotifyError,              \
29           base::AsWeakPtr(this), error));                           \
30       state_ = ERROR;                                               \
31       return;                                                       \
32     }                                                               \
33   } while (0)
34 
35 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
36 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
37 // have actual use case.
38 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
39 
40 // Max number of bitstreams notified to the client with
41 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
42 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
43 
44 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
45 // has no callback mechanism (b/11990118), we must drive it by polling for
46 // complete frames (and available input buffers, when the codec is fully
47 // saturated).  This function defines the polling delay.  The value used is an
48 // arbitrary choice that trades off CPU utilization (spinning) against latency.
49 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
DecodePollDelay()50 static inline const base::TimeDelta DecodePollDelay() {
51   // An alternative to this polling scheme could be to dedicate a new thread
52   // (instead of using the ChildThread) to run the MediaCodec, and make that
53   // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
54   // believes the codec should complete "soon" (e.g. waiting for an input
55   // buffer, or waiting for a picture when it knows enough complete input
56   // pictures have been fed to saturate any internal buffering).  This is
57   // speculative and it's unclear that this would be a win (nor that there's a
58   // reasonably device-agnostic way to fill in the "believes" above).
59   return base::TimeDelta::FromMilliseconds(10);
60 }
61 
NoWaitTimeOut()62 static inline const base::TimeDelta NoWaitTimeOut() {
63   return base::TimeDelta::FromMicroseconds(0);
64 }
65 
AndroidVideoDecodeAccelerator(media::VideoDecodeAccelerator::Client * client,const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,const base::Callback<bool (void)> & make_context_current)66 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
67     media::VideoDecodeAccelerator::Client* client,
68     const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
69     const base::Callback<bool(void)>& make_context_current)
70     : client_(client),
71       make_context_current_(make_context_current),
72       codec_(media::kCodecH264),
73       state_(NO_ERROR),
74       surface_texture_id_(0),
75       picturebuffers_requested_(false),
76       gl_decoder_(decoder) {
77 }
78 
~AndroidVideoDecodeAccelerator()79 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
80   DCHECK(thread_checker_.CalledOnValidThread());
81 }
82 
Initialize(media::VideoCodecProfile profile)83 bool AndroidVideoDecodeAccelerator::Initialize(
84     media::VideoCodecProfile profile) {
85   DCHECK(!media_codec_);
86   DCHECK(thread_checker_.CalledOnValidThread());
87 
88   if (!media::MediaCodecBridge::IsAvailable())
89     return false;
90 
91   if (profile == media::VP8PROFILE_MAIN) {
92     codec_ = media::kCodecVP8;
93   } else {
94     // TODO(dwkang): enable H264 once b/8125974 is fixed.
95     LOG(ERROR) << "Unsupported profile: " << profile;
96     return false;
97   }
98 
99   // Only consider using MediaCodec if it's likely backed by hardware.
100   if (media::VideoCodecBridge::IsKnownUnaccelerated(
101           codec_, media::MEDIA_CODEC_DECODER)) {
102     return false;
103   }
104 
105   if (!make_context_current_.Run()) {
106     LOG(ERROR) << "Failed to make this decoder's GL context current.";
107     return false;
108   }
109 
110   if (!gl_decoder_) {
111     LOG(ERROR) << "Failed to get gles2 decoder instance.";
112     return false;
113   }
114   glGenTextures(1, &surface_texture_id_);
115   glActiveTexture(GL_TEXTURE0);
116   glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
117 
118   glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
119   glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
120   glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
121                   GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
122   glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
123                   GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
124   gl_decoder_->RestoreTextureUnitBindings(0);
125   gl_decoder_->RestoreActiveTexture();
126 
127   surface_texture_ = new gfx::SurfaceTexture(surface_texture_id_);
128 
129   if (!ConfigureMediaCodec()) {
130     LOG(ERROR) << "Failed to create MediaCodec instance.";
131     return false;
132   }
133 
134   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
135       &AndroidVideoDecodeAccelerator::NotifyInitializeDone,
136       base::AsWeakPtr(this)));
137   return true;
138 }
139 
DoIOTask()140 void AndroidVideoDecodeAccelerator::DoIOTask() {
141   if (state_ == ERROR) {
142     return;
143   }
144 
145   QueueInput();
146   DequeueOutput();
147 }
148 
QueueInput()149 void AndroidVideoDecodeAccelerator::QueueInput() {
150   if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
151     return;
152   if (pending_bitstream_buffers_.empty())
153     return;
154 
155   int input_buf_index = 0;
156   media::MediaCodecStatus status = media_codec_->DequeueInputBuffer(
157       NoWaitTimeOut(), &input_buf_index);
158   if (status != media::MEDIA_CODEC_OK) {
159     DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
160            status == media::MEDIA_CODEC_ERROR);
161     return;
162   }
163 
164   base::Time queued_time = pending_bitstream_buffers_.front().second;
165   UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
166                       base::Time::Now() - queued_time);
167   media::BitstreamBuffer bitstream_buffer =
168       pending_bitstream_buffers_.front().first;
169   pending_bitstream_buffers_.pop();
170 
171   if (bitstream_buffer.id() == -1) {
172     media_codec_->QueueEOS(input_buf_index);
173     return;
174   }
175 
176   // Abuse the presentation time argument to propagate the bitstream
177   // buffer ID to the output, so we can report it back to the client in
178   // PictureReady().
179   base::TimeDelta timestamp =
180       base::TimeDelta::FromMicroseconds(bitstream_buffer.id());
181 
182   scoped_ptr<base::SharedMemory> shm(
183       new base::SharedMemory(bitstream_buffer.handle(), true));
184 
185   RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
186                     "Failed to SharedMemory::Map()",
187                     UNREADABLE_INPUT);
188 
189   status =
190       media_codec_->QueueInputBuffer(input_buf_index,
191                                      static_cast<const uint8*>(shm->memory()),
192                                      bitstream_buffer.size(),
193                                      timestamp);
194   RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
195                     "Failed to QueueInputBuffer: " << status,
196                     PLATFORM_FAILURE);
197 
198   // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
199   // will be returned from the bitstream buffer. However, MediaCodec API is
200   // not enough to guarantee it.
201   // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
202   // keep getting more bitstreams from the client, and throttle them by using
203   // |bitstreams_notified_in_advance_|.
204   // TODO(dwkang): check if there is a way to remove this workaround.
205   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
206       &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
207       base::AsWeakPtr(this), bitstream_buffer.id()));
208   bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
209 }
210 
DequeueOutput()211 void AndroidVideoDecodeAccelerator::DequeueOutput() {
212   if (picturebuffers_requested_ && output_picture_buffers_.empty())
213     return;
214 
215   if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
216     // Don't have any picture buffer to send. Need to wait more.
217     return;
218   }
219 
220   bool eos = false;
221   base::TimeDelta timestamp;
222   int32 buf_index = 0;
223   do {
224     size_t offset = 0;
225     size_t size = 0;
226 
227     media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
228         NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos, NULL);
229     switch (status) {
230       case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
231       case media::MEDIA_CODEC_ERROR:
232         return;
233 
234       case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: {
235         int32 width, height;
236         media_codec_->GetOutputFormat(&width, &height);
237 
238         if (!picturebuffers_requested_) {
239           picturebuffers_requested_ = true;
240           size_ = gfx::Size(width, height);
241           base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
242               &AndroidVideoDecodeAccelerator::RequestPictureBuffers,
243               base::AsWeakPtr(this)));
244         } else {
245           // Dynamic resolution change support is not specified by the Android
246           // platform at and before JB-MR1, so it's not possible to smoothly
247           // continue playback at this point.  Instead, error out immediately,
248           // expecting clients to Reset() as appropriate to avoid this.
249           // b/7093648
250           RETURN_ON_FAILURE(size_ == gfx::Size(width, height),
251                             "Dynamic resolution change is not supported.",
252                             PLATFORM_FAILURE);
253         }
254         return;
255       }
256 
257       case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
258         RETURN_ON_FAILURE(media_codec_->GetOutputBuffers(),
259                           "Cannot get output buffer from MediaCodec.",
260                           PLATFORM_FAILURE);
261         break;
262 
263       case media::MEDIA_CODEC_OK:
264         DCHECK_GE(buf_index, 0);
265         break;
266 
267       default:
268         NOTREACHED();
269         break;
270     }
271   } while (buf_index < 0);
272 
273   // This ignores the emitted ByteBuffer and instead relies on rendering to the
274   // codec's SurfaceTexture and then copying from that texture to the client's
275   // PictureBuffer's texture.  This means that each picture's data is written
276   // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
277   // to the client's texture.  It would be nicer to either:
278   // 1) Render directly to the client's texture from MediaCodec (one write); or
279   // 2) Upload the ByteBuffer to the client's texture (two writes).
280   // Unfortunately neither is possible:
281   // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
282   //    written to can't change during the codec's lifetime.  b/11990461
283   // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
284   //    opaque/non-standard format.  It's not possible to negotiate the decoder
285   //    to emit a specific colorspace, even using HW CSC.  b/10706245
286   // So, we live with these two extra copies per picture :(
287   media_codec_->ReleaseOutputBuffer(buf_index, true);
288 
289   if (eos) {
290     base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
291         &AndroidVideoDecodeAccelerator::NotifyFlushDone,
292         base::AsWeakPtr(this)));
293   } else {
294     int64 bitstream_buffer_id = timestamp.InMicroseconds();
295     SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
296 
297     // Removes ids former or equal than the id from decoder. Note that
298     // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
299     // because of frame reordering issue. We just maintain this roughly and use
300     // for the throttling purpose.
301     std::list<int32>::iterator it;
302     for (it = bitstreams_notified_in_advance_.begin();
303         it != bitstreams_notified_in_advance_.end();
304         ++it) {
305       if (*it == bitstream_buffer_id) {
306         bitstreams_notified_in_advance_.erase(
307             bitstreams_notified_in_advance_.begin(), ++it);
308         break;
309       }
310     }
311   }
312 }
313 
SendCurrentSurfaceToClient(int32 bitstream_id)314 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
315     int32 bitstream_id) {
316   DCHECK(thread_checker_.CalledOnValidThread());
317   DCHECK_NE(bitstream_id, -1);
318   DCHECK(!free_picture_ids_.empty());
319 
320   RETURN_ON_FAILURE(make_context_current_.Run(),
321                     "Failed to make this decoder's GL context current.",
322                     PLATFORM_FAILURE);
323 
324   int32 picture_buffer_id = free_picture_ids_.front();
325   free_picture_ids_.pop();
326 
327   float transfrom_matrix[16];
328   surface_texture_->UpdateTexImage();
329   surface_texture_->GetTransformMatrix(transfrom_matrix);
330 
331   OutputBufferMap::const_iterator i =
332       output_picture_buffers_.find(picture_buffer_id);
333   RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
334                     "Can't find a PictureBuffer for " << picture_buffer_id,
335                     PLATFORM_FAILURE);
336   uint32 picture_buffer_texture_id = i->second.texture_id();
337 
338   RETURN_ON_FAILURE(gl_decoder_.get(),
339                     "Failed to get gles2 decoder instance.",
340                     ILLEGAL_STATE);
341   // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
342   // needed because it takes 10s of milliseconds to initialize.
343   if (!copier_) {
344     copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
345     copier_->Initialize(gl_decoder_.get());
346   }
347 
348   // Here, we copy |surface_texture_id_| to the picture buffer instead of
349   // setting new texture to |surface_texture_| by calling attachToGLContext()
350   // because:
351   // 1. Once we call detachFrameGLContext(), it deletes the texture previous
352   //    attached.
353   // 2. SurfaceTexture requires us to apply a transform matrix when we show
354   //    the texture.
355   copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES,
356                          GL_TEXTURE_2D, surface_texture_id_,
357                          picture_buffer_texture_id, 0, size_.width(),
358                          size_.height(), false, false, false);
359 
360   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
361       &AndroidVideoDecodeAccelerator::NotifyPictureReady,
362       base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id)));
363 }
364 
Decode(const media::BitstreamBuffer & bitstream_buffer)365 void AndroidVideoDecodeAccelerator::Decode(
366     const media::BitstreamBuffer& bitstream_buffer) {
367   DCHECK(thread_checker_.CalledOnValidThread());
368   if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
369     base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
370         &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
371         base::AsWeakPtr(this), bitstream_buffer.id()));
372     return;
373   }
374 
375   pending_bitstream_buffers_.push(
376       std::make_pair(bitstream_buffer, base::Time::Now()));
377 
378   DoIOTask();
379 }
380 
AssignPictureBuffers(const std::vector<media::PictureBuffer> & buffers)381 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
382     const std::vector<media::PictureBuffer>& buffers) {
383   DCHECK(thread_checker_.CalledOnValidThread());
384   DCHECK(output_picture_buffers_.empty());
385   DCHECK(free_picture_ids_.empty());
386 
387   for (size_t i = 0; i < buffers.size(); ++i) {
388     RETURN_ON_FAILURE(buffers[i].size() == size_,
389                       "Invalid picture buffer size was passed.",
390                       INVALID_ARGUMENT);
391     int32 id = buffers[i].id();
392     output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
393     free_picture_ids_.push(id);
394     // Since the client might be re-using |picture_buffer_id| values, forget
395     // about previously-dismissed IDs now.  See ReusePictureBuffer() comment
396     // about "zombies" for why we maintain this set in the first place.
397     dismissed_picture_ids_.erase(id);
398   }
399 
400   RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers,
401                     "Invalid picture buffers were passed.",
402                     INVALID_ARGUMENT);
403 
404   DoIOTask();
405 }
406 
ReusePictureBuffer(int32 picture_buffer_id)407 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
408     int32 picture_buffer_id) {
409   DCHECK(thread_checker_.CalledOnValidThread());
410 
411   // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
412   // IPC, or in a PostTask either at the sender or receiver) when we sent a
413   // DismissPictureBuffer() for this |picture_buffer_id|.  Account for such
414   // potential "zombie" IDs here.
415   if (dismissed_picture_ids_.erase(picture_buffer_id))
416     return;
417 
418   free_picture_ids_.push(picture_buffer_id);
419 
420   DoIOTask();
421 }
422 
Flush()423 void AndroidVideoDecodeAccelerator::Flush() {
424   DCHECK(thread_checker_.CalledOnValidThread());
425 
426   Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
427 }
428 
ConfigureMediaCodec()429 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
430   DCHECK(surface_texture_.get());
431 
432   gfx::ScopedJavaSurface surface(surface_texture_.get());
433 
434   // Pass a dummy 320x240 canvas size and let the codec signal the real size
435   // when it's known from the bitstream.
436   media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
437       codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
438   if (!media_codec_)
439     return false;
440 
441   io_timer_.Start(FROM_HERE,
442                   DecodePollDelay(),
443                   this,
444                   &AndroidVideoDecodeAccelerator::DoIOTask);
445   return true;
446 }
447 
Reset()448 void AndroidVideoDecodeAccelerator::Reset() {
449   DCHECK(thread_checker_.CalledOnValidThread());
450 
451   while (!pending_bitstream_buffers_.empty()) {
452     int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
453     pending_bitstream_buffers_.pop();
454 
455     if (bitstream_buffer_id != -1) {
456       base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
457           &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
458           base::AsWeakPtr(this), bitstream_buffer_id));
459     }
460   }
461   bitstreams_notified_in_advance_.clear();
462 
463   for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
464        it != output_picture_buffers_.end();
465        ++it) {
466     client_->DismissPictureBuffer(it->first);
467     dismissed_picture_ids_.insert(it->first);
468   }
469   output_picture_buffers_.clear();
470   std::queue<int32> empty;
471   std::swap(free_picture_ids_, empty);
472   CHECK(free_picture_ids_.empty());
473   picturebuffers_requested_ = false;
474 
475   // On some devices, and up to at least JB-MR1,
476   // - flush() can fail after EOS (b/8125974); and
477   // - mid-stream resolution change is unsupported (b/7093648).
478   // To cope with these facts, we always stop & restart the codec on Reset().
479   io_timer_.Stop();
480   media_codec_->Stop();
481   ConfigureMediaCodec();
482   state_ = NO_ERROR;
483 
484   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
485       &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this)));
486 }
487 
Destroy()488 void AndroidVideoDecodeAccelerator::Destroy() {
489   DCHECK(thread_checker_.CalledOnValidThread());
490 
491   if (media_codec_) {
492     io_timer_.Stop();
493     media_codec_->Stop();
494   }
495   if (surface_texture_id_)
496     glDeleteTextures(1, &surface_texture_id_);
497   if (copier_)
498     copier_->Destroy();
499   delete this;
500 }
501 
NotifyInitializeDone()502 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() {
503   client_->NotifyInitializeDone();
504 }
505 
RequestPictureBuffers()506 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
507   client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
508 }
509 
NotifyPictureReady(const media::Picture & picture)510 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
511     const media::Picture& picture) {
512   client_->PictureReady(picture);
513 }
514 
NotifyEndOfBitstreamBuffer(int input_buffer_id)515 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
516     int input_buffer_id) {
517   client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
518 }
519 
NotifyFlushDone()520 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
521   client_->NotifyFlushDone();
522 }
523 
NotifyResetDone()524 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
525   client_->NotifyResetDone();
526 }
527 
NotifyError(media::VideoDecodeAccelerator::Error error)528 void AndroidVideoDecodeAccelerator::NotifyError(
529     media::VideoDecodeAccelerator::Error error) {
530   client_->NotifyError(error);
531 }
532 
533 }  // namespace content
534