• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "content/renderer/media/rtc_video_decoder.h"
6 
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/memory/ref_counted.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "base/metrics/histogram.h"
12 #include "base/numerics/safe_conversions.h"
13 #include "base/stl_util.h"
14 #include "base/synchronization/waitable_event.h"
15 #include "base/task_runner_util.h"
16 #include "content/child/child_thread.h"
17 #include "content/renderer/media/native_handle_impl.h"
18 #include "gpu/command_buffer/common/mailbox_holder.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/filters/gpu_video_accelerator_factories.h"
21 #include "third_party/skia/include/core/SkBitmap.h"
22 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
23 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
24 
25 namespace content {
26 
27 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
28 const int32 RTCVideoDecoder::ID_HALF = 0x20000000;
29 const int32 RTCVideoDecoder::ID_INVALID = -1;
30 
31 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
32 // Higher values allow better pipelining in the GPU, but also require more
33 // resources.
34 static const size_t kMaxInFlightDecodes = 8;
35 
36 // Size of shared-memory segments we allocate.  Since we reuse them we let them
37 // be on the beefy side.
38 static const size_t kSharedMemorySegmentBytes = 100 << 10;
39 
40 // Maximum number of allocated shared-memory segments.
41 static const int kMaxNumSharedMemorySegments = 16;
42 
43 // Maximum number of pending WebRTC buffers that are waiting for the shared
44 // memory. 10 seconds for 30 fps.
45 static const size_t kMaxNumOfPendingBuffers = 300;
46 
47 // A shared memory segment and its allocated size. This class has the ownership
48 // of |shm|.
49 class RTCVideoDecoder::SHMBuffer {
50  public:
51   SHMBuffer(base::SharedMemory* shm, size_t size);
52   ~SHMBuffer();
53   base::SharedMemory* const shm;
54   const size_t size;
55 };
56 
SHMBuffer(base::SharedMemory * shm,size_t size)57 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size)
58     : shm(shm), size(size) {}
59 
~SHMBuffer()60 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); }
61 
BufferData(int32 bitstream_buffer_id,uint32_t timestamp,size_t size)62 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id,
63                                         uint32_t timestamp,
64                                         size_t size)
65     : bitstream_buffer_id(bitstream_buffer_id),
66       timestamp(timestamp),
67       size(size) {}
68 
BufferData()69 RTCVideoDecoder::BufferData::BufferData() {}
70 
~BufferData()71 RTCVideoDecoder::BufferData::~BufferData() {}
72 
RTCVideoDecoder(webrtc::VideoCodecType type,const scoped_refptr<media::GpuVideoAcceleratorFactories> & factories)73 RTCVideoDecoder::RTCVideoDecoder(
74     webrtc::VideoCodecType type,
75     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories)
76     : video_codec_type_(type),
77       factories_(factories),
78       decoder_texture_target_(0),
79       next_picture_buffer_id_(0),
80       state_(UNINITIALIZED),
81       decode_complete_callback_(NULL),
82       num_shm_buffers_(0),
83       next_bitstream_buffer_id_(0),
84       reset_bitstream_buffer_id_(ID_INVALID),
85       weak_factory_(this) {
86   DCHECK(!factories_->GetTaskRunner()->BelongsToCurrentThread());
87 }
88 
~RTCVideoDecoder()89 RTCVideoDecoder::~RTCVideoDecoder() {
90   DVLOG(2) << "~RTCVideoDecoder";
91   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
92   DestroyVDA();
93 
94   // Delete all shared memories.
95   STLDeleteElements(&available_shm_segments_);
96   STLDeleteValues(&bitstream_buffers_in_decoder_);
97   STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
98                                       decode_buffers_.end());
99   decode_buffers_.clear();
100 
101   // Delete WebRTC input buffers.
102   for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it =
103            pending_buffers_.begin();
104        it != pending_buffers_.end();
105        ++it) {
106     delete[] it->first._buffer;
107   }
108 }
109 
110 // static
Create(webrtc::VideoCodecType type,const scoped_refptr<media::GpuVideoAcceleratorFactories> & factories)111 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
112     webrtc::VideoCodecType type,
113     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories) {
114   scoped_ptr<RTCVideoDecoder> decoder;
115   // Convert WebRTC codec type to media codec profile.
116   media::VideoCodecProfile profile;
117   switch (type) {
118     case webrtc::kVideoCodecVP8:
119       profile = media::VP8PROFILE_ANY;
120       break;
121     case webrtc::kVideoCodecH264:
122       profile = media::H264PROFILE_MAIN;
123       break;
124     default:
125       DVLOG(2) << "Video codec not supported:" << type;
126       return decoder.Pass();
127   }
128 
129   base::WaitableEvent waiter(true, false);
130   decoder.reset(new RTCVideoDecoder(type, factories));
131   decoder->factories_->GetTaskRunner()->PostTask(
132       FROM_HERE,
133       base::Bind(&RTCVideoDecoder::CreateVDA,
134                  base::Unretained(decoder.get()),
135                  profile,
136                  &waiter));
137   waiter.Wait();
138   // vda can be NULL if the codec is not supported.
139   if (decoder->vda_ != NULL) {
140     decoder->state_ = INITIALIZED;
141   } else {
142     factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder.release());
143   }
144   return decoder.Pass();
145 }
146 
InitDecode(const webrtc::VideoCodec * codecSettings,int32_t)147 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
148                                     int32_t /*numberOfCores*/) {
149   DVLOG(2) << "InitDecode";
150   DCHECK_EQ(video_codec_type_, codecSettings->codecType);
151   if (codecSettings->codecType == webrtc::kVideoCodecVP8 &&
152       codecSettings->codecSpecific.VP8.feedbackModeOn) {
153     LOG(ERROR) << "Feedback mode not supported";
154     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
155   }
156 
157   base::AutoLock auto_lock(lock_);
158   if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
159     LOG(ERROR) << "VDA is not initialized. state=" << state_;
160     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
161   }
162   // Create some shared memory if the queue is empty.
163   if (available_shm_segments_.size() == 0) {
164     factories_->GetTaskRunner()->PostTask(
165         FROM_HERE,
166         base::Bind(&RTCVideoDecoder::CreateSHM,
167                    weak_factory_.GetWeakPtr(),
168                    kMaxInFlightDecodes,
169                    kSharedMemorySegmentBytes));
170   }
171   return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
172 }
173 
Decode(const webrtc::EncodedImage & inputImage,bool missingFrames,const webrtc::RTPFragmentationHeader *,const webrtc::CodecSpecificInfo *,int64_t)174 int32_t RTCVideoDecoder::Decode(
175     const webrtc::EncodedImage& inputImage,
176     bool missingFrames,
177     const webrtc::RTPFragmentationHeader* /*fragmentation*/,
178     const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
179     int64_t /*renderTimeMs*/) {
180   DVLOG(3) << "Decode";
181 
182   base::AutoLock auto_lock(lock_);
183 
184   if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) {
185     LOG(ERROR) << "The decoder has not initialized.";
186     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
187   }
188 
189   if (state_ == DECODE_ERROR) {
190     LOG(ERROR) << "Decoding error occurred.";
191     return WEBRTC_VIDEO_CODEC_ERROR;
192   }
193 
194   if (missingFrames || !inputImage._completeFrame) {
195     DLOG(ERROR) << "Missing or incomplete frames.";
196     // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
197     // Return an error to request a key frame.
198     return WEBRTC_VIDEO_CODEC_ERROR;
199   }
200 
201   // Most platforms' VDA implementations support mid-stream resolution change
202   // internally.  Platforms whose VDAs fail to support mid-stream resolution
203   // change gracefully need to have their clients cover for them, and we do that
204   // here.
205 #ifdef ANDROID
206   const bool kVDACanHandleMidstreamResize = false;
207 #else
208   const bool kVDACanHandleMidstreamResize = true;
209 #endif
210 
211   bool need_to_reset_for_midstream_resize = false;
212   if (inputImage._frameType == webrtc::kKeyFrame) {
213     DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x"
214              << inputImage._encodedHeight;
215     gfx::Size prev_frame_size = frame_size_;
216     frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight);
217     if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
218         prev_frame_size != frame_size_) {
219       need_to_reset_for_midstream_resize = true;
220     }
221   } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
222                                      reset_bitstream_buffer_id_)) {
223     // TODO(wuchengli): VDA should handle it. Remove this when
224     // http://crosbug.com/p/21913 is fixed.
225     DVLOG(1) << "The first frame should be a key frame. Drop this.";
226     return WEBRTC_VIDEO_CODEC_ERROR;
227   }
228 
229   // Create buffer metadata.
230   BufferData buffer_data(next_bitstream_buffer_id_,
231                          inputImage._timeStamp,
232                          inputImage._length);
233   // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
234   next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
235 
236   // If a shared memory segment is available, there are no pending buffers, and
237   // this isn't a mid-stream resolution change, then send the buffer for decode
238   // immediately. Otherwise, save the buffer in the queue for later decode.
239   scoped_ptr<SHMBuffer> shm_buffer;
240   if (!need_to_reset_for_midstream_resize && pending_buffers_.size() == 0)
241     shm_buffer = GetSHM_Locked(inputImage._length);
242   if (!shm_buffer) {
243     if (!SaveToPendingBuffers_Locked(inputImage, buffer_data))
244       return WEBRTC_VIDEO_CODEC_ERROR;
245     if (need_to_reset_for_midstream_resize) {
246       base::AutoUnlock auto_unlock(lock_);
247       Reset();
248     }
249     return WEBRTC_VIDEO_CODEC_OK;
250   }
251 
252   SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data);
253   factories_->GetTaskRunner()->PostTask(
254       FROM_HERE,
255       base::Bind(&RTCVideoDecoder::RequestBufferDecode,
256                  weak_factory_.GetWeakPtr()));
257   return WEBRTC_VIDEO_CODEC_OK;
258 }
259 
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback * callback)260 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
261     webrtc::DecodedImageCallback* callback) {
262   DVLOG(2) << "RegisterDecodeCompleteCallback";
263   base::AutoLock auto_lock(lock_);
264   decode_complete_callback_ = callback;
265   return WEBRTC_VIDEO_CODEC_OK;
266 }
267 
Release()268 int32_t RTCVideoDecoder::Release() {
269   DVLOG(2) << "Release";
270   // Do not destroy VDA because WebRTC can call InitDecode and start decoding
271   // again.
272   return Reset();
273 }
274 
Reset()275 int32_t RTCVideoDecoder::Reset() {
276   DVLOG(2) << "Reset";
277   base::AutoLock auto_lock(lock_);
278   if (state_ == UNINITIALIZED) {
279     LOG(ERROR) << "Decoder not initialized.";
280     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
281   }
282   if (next_bitstream_buffer_id_ != 0)
283     reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
284   else
285     reset_bitstream_buffer_id_ = ID_LAST;
286   // If VDA is already resetting, no need to request the reset again.
287   if (state_ != RESETTING) {
288     state_ = RESETTING;
289     factories_->GetTaskRunner()->PostTask(
290         FROM_HERE,
291         base::Bind(&RTCVideoDecoder::ResetInternal,
292                    weak_factory_.GetWeakPtr()));
293   }
294   return WEBRTC_VIDEO_CODEC_OK;
295 }
296 
ProvidePictureBuffers(uint32 count,const gfx::Size & size,uint32 texture_target)297 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count,
298                                             const gfx::Size& size,
299                                             uint32 texture_target) {
300   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
301   DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
302 
303   if (!vda_)
304     return;
305 
306   std::vector<uint32> texture_ids;
307   std::vector<gpu::Mailbox> texture_mailboxes;
308   decoder_texture_target_ = texture_target;
309   if (!factories_->CreateTextures(count,
310                                   size,
311                                   &texture_ids,
312                                   &texture_mailboxes,
313                                   decoder_texture_target_)) {
314     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
315     return;
316   }
317   DCHECK_EQ(count, texture_ids.size());
318   DCHECK_EQ(count, texture_mailboxes.size());
319 
320   std::vector<media::PictureBuffer> picture_buffers;
321   for (size_t i = 0; i < texture_ids.size(); ++i) {
322     picture_buffers.push_back(media::PictureBuffer(
323         next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i]));
324     bool inserted = assigned_picture_buffers_.insert(std::make_pair(
325         picture_buffers.back().id(), picture_buffers.back())).second;
326     DCHECK(inserted);
327   }
328   vda_->AssignPictureBuffers(picture_buffers);
329 }
330 
DismissPictureBuffer(int32 id)331 void RTCVideoDecoder::DismissPictureBuffer(int32 id) {
332   DVLOG(3) << "DismissPictureBuffer. id=" << id;
333   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
334 
335   std::map<int32, media::PictureBuffer>::iterator it =
336       assigned_picture_buffers_.find(id);
337   if (it == assigned_picture_buffers_.end()) {
338     NOTREACHED() << "Missing picture buffer: " << id;
339     return;
340   }
341 
342   media::PictureBuffer buffer_to_dismiss = it->second;
343   assigned_picture_buffers_.erase(it);
344 
345   if (!picture_buffers_at_display_.count(id)) {
346     // We can delete the texture immediately as it's not being displayed.
347     factories_->DeleteTexture(buffer_to_dismiss.texture_id());
348     return;
349   }
350   // Not destroying a texture in display in |picture_buffers_at_display_|.
351   // Postpone deletion until after it's returned to us.
352 }
353 
PictureReady(const media::Picture & picture)354 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
355   DVLOG(3) << "PictureReady";
356   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
357 
358   std::map<int32, media::PictureBuffer>::iterator it =
359       assigned_picture_buffers_.find(picture.picture_buffer_id());
360   if (it == assigned_picture_buffers_.end()) {
361     NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
362     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
363     return;
364   }
365   const media::PictureBuffer& pb = it->second;
366 
367   // Validate picture rectangle from GPU.
368   if (picture.visible_rect().IsEmpty() ||
369       !gfx::Rect(pb.size()).Contains(picture.visible_rect())) {
370     NOTREACHED() << "Invalid picture size from VDA: "
371                  << picture.visible_rect().ToString() << " should fit in "
372                  << pb.size().ToString();
373     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
374     return;
375   }
376 
377   // Create a media::VideoFrame.
378   uint32_t timestamp = 0;
379   GetBufferData(picture.bitstream_buffer_id(), &timestamp);
380   scoped_refptr<media::VideoFrame> frame =
381       CreateVideoFrame(picture, pb, timestamp);
382   bool inserted =
383       picture_buffers_at_display_.insert(std::make_pair(
384                                              picture.picture_buffer_id(),
385                                              pb.texture_id())).second;
386   DCHECK(inserted);
387 
388   // Create a WebRTC video frame.
389   webrtc::RefCountImpl<NativeHandleImpl>* handle =
390       new webrtc::RefCountImpl<NativeHandleImpl>(frame);
391   webrtc::TextureVideoFrame decoded_image(handle,
392                                           picture.visible_rect().width(),
393                                           picture.visible_rect().height(),
394                                           timestamp,
395                                           0);
396 
397   // Invoke decode callback. WebRTC expects no callback after Reset or Release.
398   {
399     base::AutoLock auto_lock(lock_);
400     DCHECK(decode_complete_callback_ != NULL);
401     if (IsBufferAfterReset(picture.bitstream_buffer_id(),
402                            reset_bitstream_buffer_id_)) {
403       decode_complete_callback_->Decoded(decoded_image);
404     }
405   }
406 }
407 
ReadPixelsSyncInner(const scoped_refptr<media::GpuVideoAcceleratorFactories> & factories,uint32 texture_id,const gfx::Rect & visible_rect,const SkBitmap & pixels,base::WaitableEvent * event)408 static void ReadPixelsSyncInner(
409     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
410     uint32 texture_id,
411     const gfx::Rect& visible_rect,
412     const SkBitmap& pixels,
413     base::WaitableEvent* event) {
414   factories->ReadPixels(texture_id, visible_rect, pixels);
415   event->Signal();
416 }
417 
ReadPixelsSync(const scoped_refptr<media::GpuVideoAcceleratorFactories> & factories,uint32 texture_id,const gfx::Rect & visible_rect,const SkBitmap & pixels)418 static void ReadPixelsSync(
419     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
420     uint32 texture_id,
421     const gfx::Rect& visible_rect,
422     const SkBitmap& pixels) {
423   base::WaitableEvent event(true, false);
424   if (!factories->GetTaskRunner()->PostTask(FROM_HERE,
425                                             base::Bind(&ReadPixelsSyncInner,
426                                                        factories,
427                                                        texture_id,
428                                                        visible_rect,
429                                                        pixels,
430                                                        &event)))
431     return;
432   event.Wait();
433 }
434 
CreateVideoFrame(const media::Picture & picture,const media::PictureBuffer & pb,uint32_t timestamp)435 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
436     const media::Picture& picture,
437     const media::PictureBuffer& pb,
438     uint32_t timestamp) {
439   gfx::Rect visible_rect(picture.visible_rect());
440   DCHECK(decoder_texture_target_);
441   // Convert timestamp from 90KHz to ms.
442   base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
443       base::checked_cast<uint64_t>(timestamp) * 1000 / 90);
444   return media::VideoFrame::WrapNativeTexture(
445       make_scoped_ptr(new gpu::MailboxHolder(
446           pb.texture_mailbox(), decoder_texture_target_, 0)),
447       media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReleaseMailbox,
448                                           weak_factory_.GetWeakPtr(),
449                                           factories_,
450                                           picture.picture_buffer_id(),
451                                           pb.texture_id())),
452       pb.size(),
453       visible_rect,
454       visible_rect.size(),
455       timestamp_ms,
456       base::Bind(&ReadPixelsSync, factories_, pb.texture_id(), visible_rect));
457 }
458 
NotifyEndOfBitstreamBuffer(int32 id)459 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) {
460   DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
461   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
462 
463   std::map<int32, SHMBuffer*>::iterator it =
464       bitstream_buffers_in_decoder_.find(id);
465   if (it == bitstream_buffers_in_decoder_.end()) {
466     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
467     NOTREACHED() << "Missing bitstream buffer: " << id;
468     return;
469   }
470 
471   {
472     base::AutoLock auto_lock(lock_);
473     PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second));
474   }
475   bitstream_buffers_in_decoder_.erase(it);
476 
477   RequestBufferDecode();
478 }
479 
NotifyFlushDone()480 void RTCVideoDecoder::NotifyFlushDone() {
481   DVLOG(3) << "NotifyFlushDone";
482   NOTREACHED() << "Unexpected flush done notification.";
483 }
484 
NotifyResetDone()485 void RTCVideoDecoder::NotifyResetDone() {
486   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
487   DVLOG(3) << "NotifyResetDone";
488 
489   if (!vda_)
490     return;
491 
492   input_buffer_data_.clear();
493   {
494     base::AutoLock auto_lock(lock_);
495     state_ = INITIALIZED;
496   }
497   // Send the pending buffers for decoding.
498   RequestBufferDecode();
499 }
500 
NotifyError(media::VideoDecodeAccelerator::Error error)501 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
502   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
503   if (!vda_)
504     return;
505 
506   LOG(ERROR) << "VDA Error:" << error;
507   UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
508                             error,
509                             media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM);
510   DestroyVDA();
511 
512   base::AutoLock auto_lock(lock_);
513   state_ = DECODE_ERROR;
514 }
515 
RequestBufferDecode()516 void RTCVideoDecoder::RequestBufferDecode() {
517   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
518   if (!vda_)
519     return;
520 
521   MovePendingBuffersToDecodeBuffers();
522 
523   while (CanMoreDecodeWorkBeDone()) {
524     // Get a buffer and data from the queue.
525     SHMBuffer* shm_buffer = NULL;
526     BufferData buffer_data;
527     {
528       base::AutoLock auto_lock(lock_);
529       // Do not request decode if VDA is resetting.
530       if (decode_buffers_.size() == 0 || state_ == RESETTING)
531         return;
532       shm_buffer = decode_buffers_.front().first;
533       buffer_data = decode_buffers_.front().second;
534       decode_buffers_.pop_front();
535       // Drop the buffers before Reset or Release is called.
536       if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
537                               reset_bitstream_buffer_id_)) {
538         PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer));
539         continue;
540       }
541     }
542 
543     // Create a BitstreamBuffer and send to VDA to decode.
544     media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id,
545                                             shm_buffer->shm->handle(),
546                                             buffer_data.size);
547     bool inserted = bitstream_buffers_in_decoder_
548         .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second;
549     DCHECK(inserted);
550     RecordBufferData(buffer_data);
551     vda_->Decode(bitstream_buffer);
552   }
553 }
554 
CanMoreDecodeWorkBeDone()555 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
556   return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
557 }
558 
IsBufferAfterReset(int32 id_buffer,int32 id_reset)559 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) {
560   if (id_reset == ID_INVALID)
561     return true;
562   int32 diff = id_buffer - id_reset;
563   if (diff <= 0)
564     diff += ID_LAST + 1;
565   return diff < ID_HALF;
566 }
567 
IsFirstBufferAfterReset(int32 id_buffer,int32 id_reset)568 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer, int32 id_reset) {
569   if (id_reset == ID_INVALID)
570     return id_buffer == 0;
571   return id_buffer == ((id_reset + 1) & ID_LAST);
572 }
573 
SaveToDecodeBuffers_Locked(const webrtc::EncodedImage & input_image,scoped_ptr<SHMBuffer> shm_buffer,const BufferData & buffer_data)574 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
575     const webrtc::EncodedImage& input_image,
576     scoped_ptr<SHMBuffer> shm_buffer,
577     const BufferData& buffer_data) {
578   memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length);
579   std::pair<SHMBuffer*, BufferData> buffer_pair =
580       std::make_pair(shm_buffer.release(), buffer_data);
581 
582   // Store the buffer and the metadata to the queue.
583   decode_buffers_.push_back(buffer_pair);
584 }
585 
SaveToPendingBuffers_Locked(const webrtc::EncodedImage & input_image,const BufferData & buffer_data)586 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
587     const webrtc::EncodedImage& input_image,
588     const BufferData& buffer_data) {
589   DVLOG(2) << "SaveToPendingBuffers_Locked"
590            << ". pending_buffers size=" << pending_buffers_.size()
591            << ". decode_buffers_ size=" << decode_buffers_.size()
592            << ". available_shm size=" << available_shm_segments_.size();
593   // Queued too many buffers. Something goes wrong.
594   if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
595     LOG(WARNING) << "Too many pending buffers!";
596     return false;
597   }
598 
599   // Clone the input image and save it to the queue.
600   uint8_t* buffer = new uint8_t[input_image._length];
601   // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
602   // interface to take a non-const ptr to the frame and add a method to the
603   // frame that will swap buffers with another.
604   memcpy(buffer, input_image._buffer, input_image._length);
605   webrtc::EncodedImage encoded_image(
606       buffer, input_image._length, input_image._length);
607   std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
608       std::make_pair(encoded_image, buffer_data);
609 
610   pending_buffers_.push_back(buffer_pair);
611   return true;
612 }
613 
MovePendingBuffersToDecodeBuffers()614 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
615   base::AutoLock auto_lock(lock_);
616   while (pending_buffers_.size() > 0) {
617     // Get a pending buffer from the queue.
618     const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
619     const BufferData& buffer_data = pending_buffers_.front().second;
620 
621     // Drop the frame if it comes before Reset or Release.
622     if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
623                             reset_bitstream_buffer_id_)) {
624       delete[] input_image._buffer;
625       pending_buffers_.pop_front();
626       continue;
627     }
628     // Get shared memory and save it to decode buffers.
629     scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length);
630     if (!shm_buffer)
631       return;
632     SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data);
633     delete[] input_image._buffer;
634     pending_buffers_.pop_front();
635   }
636 }
637 
ResetInternal()638 void RTCVideoDecoder::ResetInternal() {
639   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
640   DVLOG(2) << "ResetInternal";
641   if (vda_)
642     vda_->Reset();
643 }
644 
645 // static
ReleaseMailbox(base::WeakPtr<RTCVideoDecoder> decoder,const scoped_refptr<media::GpuVideoAcceleratorFactories> & factories,int64 picture_buffer_id,uint32 texture_id,uint32 release_sync_point)646 void RTCVideoDecoder::ReleaseMailbox(
647     base::WeakPtr<RTCVideoDecoder> decoder,
648     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories,
649     int64 picture_buffer_id,
650     uint32 texture_id,
651     uint32 release_sync_point) {
652   DCHECK(factories->GetTaskRunner()->BelongsToCurrentThread());
653   factories->WaitSyncPoint(release_sync_point);
654 
655   if (decoder) {
656     decoder->ReusePictureBuffer(picture_buffer_id);
657     return;
658   }
659   // It's the last chance to delete the texture after display,
660   // because RTCVideoDecoder was destructed.
661   factories->DeleteTexture(texture_id);
662 }
663 
ReusePictureBuffer(int64 picture_buffer_id)664 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) {
665   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
666   DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
667 
668   DCHECK(!picture_buffers_at_display_.empty());
669   PictureBufferTextureMap::iterator display_iterator =
670       picture_buffers_at_display_.find(picture_buffer_id);
671   uint32 texture_id = display_iterator->second;
672   DCHECK(display_iterator != picture_buffers_at_display_.end());
673   picture_buffers_at_display_.erase(display_iterator);
674 
675   if (!assigned_picture_buffers_.count(picture_buffer_id)) {
676     // This picture was dismissed while in display, so we postponed deletion.
677     factories_->DeleteTexture(texture_id);
678     return;
679   }
680 
681   // DestroyVDA() might already have been called.
682   if (vda_)
683     vda_->ReusePictureBuffer(picture_buffer_id);
684 }
685 
CreateVDA(media::VideoCodecProfile profile,base::WaitableEvent * waiter)686 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile,
687                                 base::WaitableEvent* waiter) {
688   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
689   vda_ = factories_->CreateVideoDecodeAccelerator();
690   if (vda_ && !vda_->Initialize(profile, this))
691     vda_.release()->Destroy();
692   waiter->Signal();
693 }
694 
DestroyTextures()695 void RTCVideoDecoder::DestroyTextures() {
696   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
697 
698   // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since
699   // their textures may still be in use by the user of this RTCVideoDecoder.
700   for (PictureBufferTextureMap::iterator it =
701            picture_buffers_at_display_.begin();
702        it != picture_buffers_at_display_.end();
703        ++it) {
704     assigned_picture_buffers_.erase(it->first);
705   }
706 
707   for (std::map<int32, media::PictureBuffer>::iterator it =
708            assigned_picture_buffers_.begin();
709        it != assigned_picture_buffers_.end();
710        ++it) {
711     factories_->DeleteTexture(it->second.texture_id());
712   }
713   assigned_picture_buffers_.clear();
714 }
715 
DestroyVDA()716 void RTCVideoDecoder::DestroyVDA() {
717   DVLOG(2) << "DestroyVDA";
718   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
719   if (vda_)
720     vda_.release()->Destroy();
721   DestroyTextures();
722   base::AutoLock auto_lock(lock_);
723   state_ = UNINITIALIZED;
724 }
725 
GetSHM_Locked(size_t min_size)726 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked(
727     size_t min_size) {
728   // Reuse a SHM if possible.
729   SHMBuffer* ret = NULL;
730   if (!available_shm_segments_.empty() &&
731       available_shm_segments_.back()->size >= min_size) {
732     ret = available_shm_segments_.back();
733     available_shm_segments_.pop_back();
734   }
735   // Post to vda thread to create shared memory if SHM cannot be reused or the
736   // queue is almost empty.
737   if (num_shm_buffers_ < kMaxNumSharedMemorySegments &&
738       (ret == NULL || available_shm_segments_.size() <= 1)) {
739     factories_->GetTaskRunner()->PostTask(
740         FROM_HERE,
741         base::Bind(&RTCVideoDecoder::CreateSHM,
742                    weak_factory_.GetWeakPtr(),
743                    1,
744                    min_size));
745   }
746   return scoped_ptr<SHMBuffer>(ret);
747 }
748 
PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer)749 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) {
750   available_shm_segments_.push_back(shm_buffer.release());
751 }
752 
CreateSHM(int number,size_t min_size)753 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) {
754   DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
755   DVLOG(2) << "CreateSHM. size=" << min_size;
756   int number_to_allocate;
757   {
758     base::AutoLock auto_lock(lock_);
759     number_to_allocate =
760         std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number);
761   }
762   size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes);
763   for (int i = 0; i < number_to_allocate; i++) {
764     base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate);
765     if (shm != NULL) {
766       base::AutoLock auto_lock(lock_);
767       num_shm_buffers_++;
768       PutSHM_Locked(
769           scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate)));
770     }
771   }
772   // Kick off the decoding.
773   RequestBufferDecode();
774 }
775 
RecordBufferData(const BufferData & buffer_data)776 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
777   input_buffer_data_.push_front(buffer_data);
778   // Why this value?  Because why not.  avformat.h:MAX_REORDER_DELAY is 16, but
779   // that's too small for some pathological B-frame test videos.  The cost of
780   // using too-high a value is low (192 bits per extra slot).
781   static const size_t kMaxInputBufferDataSize = 128;
782   // Pop from the back of the list, because that's the oldest and least likely
783   // to be useful in the future data.
784   if (input_buffer_data_.size() > kMaxInputBufferDataSize)
785     input_buffer_data_.pop_back();
786 }
787 
GetBufferData(int32 bitstream_buffer_id,uint32_t * timestamp)788 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id,
789                                     uint32_t* timestamp) {
790   for (std::list<BufferData>::iterator it = input_buffer_data_.begin();
791        it != input_buffer_data_.end();
792        ++it) {
793     if (it->bitstream_buffer_id != bitstream_buffer_id)
794       continue;
795     *timestamp = it->timestamp;
796     return;
797   }
798   NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
799 }
800 
RecordInitDecodeUMA(int32_t status)801 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
802   // Logging boolean is enough to know if HW decoding has been used. Also,
803   // InitDecode is less likely to return an error so enum is not used here.
804   bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
805   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
806   return status;
807 }
808 
DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent() const809 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent()
810     const {
811   DCHECK(factories_->GetTaskRunner()->BelongsToCurrentThread());
812 }
813 
814 }  // namespace content
815