• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
6 
7 #if !defined(OS_WIN)
8 #error This file should only be built on Windows.
9 #endif   // !defined(OS_WIN)
10 
11 #include <ks.h>
12 #include <codecapi.h>
13 #include <mfapi.h>
14 #include <mferror.h>
15 #include <wmcodecdsp.h>
16 
17 #include "base/bind.h"
18 #include "base/callback.h"
19 #include "base/command_line.h"
20 #include "base/debug/trace_event.h"
21 #include "base/logging.h"
22 #include "base/memory/scoped_ptr.h"
23 #include "base/memory/shared_memory.h"
24 #include "base/message_loop/message_loop.h"
25 #include "base/win/windows_version.h"
26 #include "media/video/video_decode_accelerator.h"
27 #include "ui/gl/gl_bindings.h"
28 #include "ui/gl/gl_surface_egl.h"
29 #include "ui/gl/gl_switches.h"
30 
31 namespace content {
32 
33 // We only request 5 picture buffers from the client which are used to hold the
34 // decoded samples. These buffers are then reused when the client tells us that
35 // it is done with the buffer.
36 static const int kNumPictureBuffers = 5;
37 
38 #define RETURN_ON_FAILURE(result, log, ret)  \
39   do {                                       \
40     if (!(result)) {                         \
41       DLOG(ERROR) << log;                    \
42       return ret;                            \
43     }                                        \
44   } while (0)
45 
46 #define RETURN_ON_HR_FAILURE(result, log, ret)                    \
47   RETURN_ON_FAILURE(SUCCEEDED(result),                            \
48                     log << ", HRESULT: 0x" << std::hex << result, \
49                     ret);
50 
51 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret)  \
52   do {                                                              \
53     if (!(result)) {                                                \
54       DVLOG(1) << log;                                              \
55       StopOnError(error_code);                                      \
56       return ret;                                                   \
57     }                                                               \
58   } while (0)
59 
60 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret)  \
61   RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result),                      \
62                                log << ", HRESULT: 0x" << std::hex << result, \
63                                error_code, ret);
64 
65 // Maximum number of iterations we allow before aborting the attempt to flush
66 // the batched queries to the driver and allow torn/corrupt frames to be
67 // rendered.
68 enum { kMaxIterationsForD3DFlush = 10 };
69 
CreateEmptySample()70 static IMFSample* CreateEmptySample() {
71   base::win::ScopedComPtr<IMFSample> sample;
72   HRESULT hr = MFCreateSample(sample.Receive());
73   RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
74   return sample.Detach();
75 }
76 
77 // Creates a Media Foundation sample with one buffer of length |buffer_length|
78 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
CreateEmptySampleWithBuffer(int buffer_length,int align)79 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
80   CHECK_GT(buffer_length, 0);
81 
82   base::win::ScopedComPtr<IMFSample> sample;
83   sample.Attach(CreateEmptySample());
84 
85   base::win::ScopedComPtr<IMFMediaBuffer> buffer;
86   HRESULT hr = E_FAIL;
87   if (align == 0) {
88     // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
89     // with the align argument being 0.
90     hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
91   } else {
92     hr = MFCreateAlignedMemoryBuffer(buffer_length,
93                                      align - 1,
94                                      buffer.Receive());
95   }
96   RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
97 
98   hr = sample->AddBuffer(buffer);
99   RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
100 
101   return sample.Detach();
102 }
103 
104 // Creates a Media Foundation sample with one buffer containing a copy of the
105 // given Annex B stream data.
106 // If duration and sample time are not known, provide 0.
107 // |min_size| specifies the minimum size of the buffer (might be required by
108 // the decoder for input). If no alignment is required, provide 0.
CreateInputSample(const uint8 * stream,int size,int min_size,int alignment)109 static IMFSample* CreateInputSample(const uint8* stream, int size,
110                                     int min_size, int alignment) {
111   CHECK(stream);
112   CHECK_GT(size, 0);
113   base::win::ScopedComPtr<IMFSample> sample;
114   sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
115                                             alignment));
116   RETURN_ON_FAILURE(sample, "Failed to create empty sample", NULL);
117 
118   base::win::ScopedComPtr<IMFMediaBuffer> buffer;
119   HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
120   RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
121 
122   DWORD max_length = 0;
123   DWORD current_length = 0;
124   uint8* destination = NULL;
125   hr = buffer->Lock(&destination, &max_length, &current_length);
126   RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
127 
128   CHECK_EQ(current_length, 0u);
129   CHECK_GE(static_cast<int>(max_length), size);
130   memcpy(destination, stream, size);
131 
132   hr = buffer->Unlock();
133   RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
134 
135   hr = buffer->SetCurrentLength(size);
136   RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
137 
138   return sample.Detach();
139 }
140 
CreateSampleFromInputBuffer(const media::BitstreamBuffer & bitstream_buffer,DWORD stream_size,DWORD alignment)141 static IMFSample* CreateSampleFromInputBuffer(
142     const media::BitstreamBuffer& bitstream_buffer,
143     DWORD stream_size,
144     DWORD alignment) {
145   base::SharedMemory shm(bitstream_buffer.handle(), true);
146   RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
147                     "Failed in base::SharedMemory::Map", NULL);
148 
149   return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
150                            bitstream_buffer.size(),
151                            stream_size,
152                            alignment);
153 }
154 
155 // Maintains information about a DXVA picture buffer, i.e. whether it is
156 // available for rendering, the texture information, etc.
157 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
158  public:
159   static linked_ptr<DXVAPictureBuffer> Create(
160       const DXVAVideoDecodeAccelerator& decoder,
161       const media::PictureBuffer& buffer,
162       EGLConfig egl_config);
163   ~DXVAPictureBuffer();
164 
165   void ReusePictureBuffer();
166   // Copies the output sample data to the picture buffer provided by the
167   // client.
168   // The dest_surface parameter contains the decoded bits.
169   bool CopyOutputSampleDataToPictureBuffer(
170       const DXVAVideoDecodeAccelerator& decoder,
171       IDirect3DSurface9* dest_surface);
172 
availablecontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer173   bool available() const {
174     return available_;
175   }
176 
set_availablecontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer177   void set_available(bool available) {
178     available_ = available;
179   }
180 
idcontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer181   int id() const {
182     return picture_buffer_.id();
183   }
184 
sizecontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer185   gfx::Size size() const {
186     return picture_buffer_.size();
187   }
188 
189  private:
190   explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
191 
192   bool available_;
193   media::PictureBuffer picture_buffer_;
194   EGLSurface decoding_surface_;
195   base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
196   // Set to true if RGB is supported by the texture.
197   // Defaults to true.
198   bool use_rgb_;
199 
200   DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
201 };
202 
203 // static
204 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
Create(const DXVAVideoDecodeAccelerator & decoder,const media::PictureBuffer & buffer,EGLConfig egl_config)205 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
206     const DXVAVideoDecodeAccelerator& decoder,
207     const media::PictureBuffer& buffer,
208     EGLConfig egl_config) {
209   linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
210 
211   EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
212 
213   EGLint use_rgb = 1;
214   eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB,
215                      &use_rgb);
216 
217   EGLint attrib_list[] = {
218     EGL_WIDTH, buffer.size().width(),
219     EGL_HEIGHT, buffer.size().height(),
220     EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA,
221     EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
222     EGL_NONE
223   };
224 
225   picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
226       egl_display,
227       egl_config,
228       attrib_list);
229   RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
230                     "Failed to create surface",
231                     linked_ptr<DXVAPictureBuffer>(NULL));
232 
233   HANDLE share_handle = NULL;
234   EGLBoolean ret = eglQuerySurfacePointerANGLE(
235       egl_display,
236       picture_buffer->decoding_surface_,
237       EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
238       &share_handle);
239 
240   RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
241                     "Failed to query ANGLE surface pointer",
242                     linked_ptr<DXVAPictureBuffer>(NULL));
243 
244   HRESULT hr = decoder.device_->CreateTexture(
245       buffer.size().width(),
246       buffer.size().height(),
247       1,
248       D3DUSAGE_RENDERTARGET,
249       use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8,
250       D3DPOOL_DEFAULT,
251       picture_buffer->decoding_texture_.Receive(),
252       &share_handle);
253 
254   RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
255                        linked_ptr<DXVAPictureBuffer>(NULL));
256   picture_buffer->use_rgb_ = !!use_rgb;
257   return picture_buffer;
258 }
259 
DXVAPictureBuffer(const media::PictureBuffer & buffer)260 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
261     const media::PictureBuffer& buffer)
262     : available_(true),
263       picture_buffer_(buffer),
264       decoding_surface_(NULL),
265       use_rgb_(true) {
266 }
267 
~DXVAPictureBuffer()268 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
269   if (decoding_surface_) {
270     EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
271 
272     eglReleaseTexImage(
273         egl_display,
274         decoding_surface_,
275         EGL_BACK_BUFFER);
276 
277     eglDestroySurface(
278         egl_display,
279         decoding_surface_);
280     decoding_surface_ = NULL;
281   }
282 }
283 
ReusePictureBuffer()284 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
285   DCHECK(decoding_surface_);
286   EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
287   eglReleaseTexImage(
288     egl_display,
289     decoding_surface_,
290     EGL_BACK_BUFFER);
291   set_available(true);
292 }
293 
294 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
CopyOutputSampleDataToPictureBuffer(const DXVAVideoDecodeAccelerator & decoder,IDirect3DSurface9 * dest_surface)295     CopyOutputSampleDataToPictureBuffer(
296         const DXVAVideoDecodeAccelerator& decoder,
297         IDirect3DSurface9* dest_surface) {
298   DCHECK(dest_surface);
299 
300   D3DSURFACE_DESC surface_desc;
301   HRESULT hr = dest_surface->GetDesc(&surface_desc);
302   RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
303 
304   D3DSURFACE_DESC texture_desc;
305   decoding_texture_->GetLevelDesc(0, &texture_desc);
306 
307   if (texture_desc.Width != surface_desc.Width ||
308       texture_desc.Height != surface_desc.Height) {
309     NOTREACHED() << "Decode surface of different dimension than texture";
310     return false;
311   }
312 
313   hr = decoder.d3d9_->CheckDeviceFormatConversion(
314       D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format,
315       use_rgb_ ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8);
316   RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false);
317 
318   // This function currently executes in the context of IPC handlers in the
319   // GPU process which ensures that there is always an OpenGL context.
320   GLint current_texture = 0;
321   glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
322 
323   glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
324 
325   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
326 
327   base::win::ScopedComPtr<IDirect3DSurface9> d3d_surface;
328   hr = decoding_texture_->GetSurfaceLevel(0, d3d_surface.Receive());
329   RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
330 
331   hr = decoder.device_->StretchRect(
332       dest_surface, NULL, d3d_surface, NULL, D3DTEXF_NONE);
333   RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",
334                         false);
335 
336   // Ideally, this should be done immediately before the draw call that uses
337   // the texture. Flush it once here though.
338   hr = decoder.query_->Issue(D3DISSUE_END);
339   RETURN_ON_HR_FAILURE(hr, "Failed to issue END", false);
340 
341   // The DXVA decoder has its own device which it uses for decoding. ANGLE
342   // has its own device which we don't have access to.
343   // The above code attempts to copy the decoded picture into a surface
344   // which is owned by ANGLE. As there are multiple devices involved in
345   // this, the StretchRect call above is not synchronous.
346   // We attempt to flush the batched operations to ensure that the picture is
347   // copied to the surface owned by ANGLE.
348   // We need to do this in a loop and call flush multiple times.
349   // We have seen the GetData call for flushing the command buffer fail to
350   // return success occassionally on multi core machines, leading to an
351   // infinite loop.
352   // Workaround is to have an upper limit of 10 on the number of iterations to
353   // wait for the Flush to finish.
354   int iterations = 0;
355   while ((decoder.query_->GetData(NULL, 0, D3DGETDATA_FLUSH) == S_FALSE) &&
356          ++iterations < kMaxIterationsForD3DFlush) {
357     Sleep(1);  // Poor-man's Yield().
358   }
359   EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
360   eglBindTexImage(
361       egl_display,
362       decoding_surface_,
363       EGL_BACK_BUFFER);
364   glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
365   glBindTexture(GL_TEXTURE_2D, current_texture);
366   return true;
367 }
368 
PendingSampleInfo(int32 buffer_id,IMFSample * sample)369 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
370     int32 buffer_id, IMFSample* sample)
371     : input_buffer_id(buffer_id) {
372   output_sample.Attach(sample);
373 }
374 
~PendingSampleInfo()375 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
376 
377 // static
CreateD3DDevManager()378 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
379   TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
380 
381   HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive());
382   RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
383 
384   D3DPRESENT_PARAMETERS present_params = {0};
385   present_params.BackBufferWidth = 1;
386   present_params.BackBufferHeight = 1;
387   present_params.BackBufferFormat = D3DFMT_UNKNOWN;
388   present_params.BackBufferCount = 1;
389   present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
390   present_params.hDeviceWindow = ::GetShellWindow();
391   present_params.Windowed = TRUE;
392   present_params.Flags = D3DPRESENTFLAG_VIDEO;
393   present_params.FullScreen_RefreshRateInHz = 0;
394   present_params.PresentationInterval = 0;
395 
396   hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
397                              D3DDEVTYPE_HAL,
398                              ::GetShellWindow(),
399                              D3DCREATE_FPU_PRESERVE |
400                              D3DCREATE_SOFTWARE_VERTEXPROCESSING |
401                              D3DCREATE_DISABLE_PSGP_THREADING |
402                              D3DCREATE_MULTITHREADED,
403                              &present_params,
404                              NULL,
405                              device_.Receive());
406   RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
407 
408   hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
409                                          device_manager_.Receive());
410   RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
411 
412   hr = device_manager_->ResetDevice(device_, dev_manager_reset_token_);
413   RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
414 
415   hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive());
416   RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
417   // Ensure query_ API works (to avoid an infinite loop later in
418   // CopyOutputSampleDataToPictureBuffer).
419   hr = query_->Issue(D3DISSUE_END);
420   RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
421   return true;
422 }
423 
DXVAVideoDecodeAccelerator(const base::Callback<bool (void)> & make_context_current)424 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
425     const base::Callback<bool(void)>& make_context_current)
426     : client_(NULL),
427       dev_manager_reset_token_(0),
428       egl_config_(NULL),
429       state_(kUninitialized),
430       pictures_requested_(false),
431       inputs_before_decode_(0),
432       make_context_current_(make_context_current),
433       weak_this_factory_(this) {
434   memset(&input_stream_info_, 0, sizeof(input_stream_info_));
435   memset(&output_stream_info_, 0, sizeof(output_stream_info_));
436 }
437 
~DXVAVideoDecodeAccelerator()438 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
439   client_ = NULL;
440 }
441 
Initialize(media::VideoCodecProfile profile,Client * client)442 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
443                                             Client* client) {
444   DCHECK(CalledOnValidThread());
445 
446   client_ = client;
447 
448   // Not all versions of Windows 7 and later include Media Foundation DLLs.
449   // Instead of crashing while delay loading the DLL when calling MFStartup()
450   // below, probe whether we can successfully load the DLL now.
451   //
452   // See http://crbug.com/339678 for details.
453   HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll");
454   RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", false);
455 
456   // TODO(ananta)
457   // H264PROFILE_HIGH video decoding is janky at times. Needs more
458   // investigation.
459   if (profile != media::H264PROFILE_BASELINE &&
460       profile != media::H264PROFILE_MAIN &&
461       profile != media::H264PROFILE_HIGH) {
462     RETURN_AND_NOTIFY_ON_FAILURE(false,
463         "Unsupported h264 profile", PLATFORM_FAILURE, false);
464   }
465 
466   RETURN_AND_NOTIFY_ON_FAILURE(
467       gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
468       "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
469       PLATFORM_FAILURE,
470       false);
471 
472   RETURN_AND_NOTIFY_ON_FAILURE((state_ == kUninitialized),
473       "Initialize: invalid state: " << state_, ILLEGAL_STATE, false);
474 
475   HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
476   RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE,
477       false);
478 
479   RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
480                                "Failed to initialize D3D device and manager",
481                                PLATFORM_FAILURE,
482                                false);
483 
484   RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile),
485       "Failed to initialize decoder", PLATFORM_FAILURE, false);
486 
487   RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
488       "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
489 
490   RETURN_AND_NOTIFY_ON_FAILURE(
491       SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
492       "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
493       PLATFORM_FAILURE, false);
494 
495   RETURN_AND_NOTIFY_ON_FAILURE(
496       SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
497       "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
498       PLATFORM_FAILURE, false);
499 
500   state_ = kNormal;
501   return true;
502 }
503 
Decode(const media::BitstreamBuffer & bitstream_buffer)504 void DXVAVideoDecodeAccelerator::Decode(
505     const media::BitstreamBuffer& bitstream_buffer) {
506   DCHECK(CalledOnValidThread());
507 
508   RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped ||
509                                 state_ == kFlushing),
510       "Invalid state: " << state_, ILLEGAL_STATE,);
511 
512   base::win::ScopedComPtr<IMFSample> sample;
513   sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
514                                             input_stream_info_.cbSize,
515                                             input_stream_info_.cbAlignment));
516   RETURN_AND_NOTIFY_ON_FAILURE(sample, "Failed to create input sample",
517                                PLATFORM_FAILURE,);
518 
519   RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
520       "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
521 
522   DecodeInternal(sample);
523 }
524 
AssignPictureBuffers(const std::vector<media::PictureBuffer> & buffers)525 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
526     const std::vector<media::PictureBuffer>& buffers) {
527   DCHECK(CalledOnValidThread());
528 
529   RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
530       "Invalid state: " << state_, ILLEGAL_STATE,);
531   RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
532       "Failed to provide requested picture buffers. (Got " << buffers.size() <<
533       ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
534 
535   // Copy the picture buffers provided by the client to the available list,
536   // and mark these buffers as available for use.
537   for (size_t buffer_index = 0; buffer_index < buffers.size();
538        ++buffer_index) {
539     linked_ptr<DXVAPictureBuffer> picture_buffer =
540         DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_);
541     RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
542         "Failed to allocate picture buffer", PLATFORM_FAILURE,);
543 
544     bool inserted = output_picture_buffers_.insert(std::make_pair(
545         buffers[buffer_index].id(), picture_buffer)).second;
546     DCHECK(inserted);
547   }
548   ProcessPendingSamples();
549   if (state_ == kFlushing && pending_output_samples_.empty())
550     FlushInternal();
551 }
552 
ReusePictureBuffer(int32 picture_buffer_id)553 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
554     int32 picture_buffer_id) {
555   DCHECK(CalledOnValidThread());
556 
557   RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
558       "Invalid state: " << state_, ILLEGAL_STATE,);
559 
560   if (output_picture_buffers_.empty())
561     return;
562 
563   OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
564   RETURN_AND_NOTIFY_ON_FAILURE(it != output_picture_buffers_.end(),
565       "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
566 
567   it->second->ReusePictureBuffer();
568   ProcessPendingSamples();
569 
570   if (state_ == kFlushing && pending_output_samples_.empty())
571     FlushInternal();
572 }
573 
Flush()574 void DXVAVideoDecodeAccelerator::Flush() {
575   DCHECK(CalledOnValidThread());
576 
577   DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
578 
579   RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
580       "Unexpected decoder state: " << state_, ILLEGAL_STATE,);
581 
582   state_ = kFlushing;
583 
584   RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
585       "Failed to send drain message", PLATFORM_FAILURE,);
586 
587   if (!pending_output_samples_.empty())
588     return;
589 
590   FlushInternal();
591 }
592 
Reset()593 void DXVAVideoDecodeAccelerator::Reset() {
594   DCHECK(CalledOnValidThread());
595 
596   DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
597 
598   RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
599       "Reset: invalid state: " << state_, ILLEGAL_STATE,);
600 
601   state_ = kResetting;
602 
603   pending_output_samples_.clear();
604 
605   NotifyInputBuffersDropped();
606 
607   RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
608       "Reset: Failed to send message.", PLATFORM_FAILURE,);
609 
610   base::MessageLoop::current()->PostTask(
611       FROM_HERE,
612       base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone,
613                  weak_this_factory_.GetWeakPtr()));
614 
615   state_ = DXVAVideoDecodeAccelerator::kNormal;
616 }
617 
Destroy()618 void DXVAVideoDecodeAccelerator::Destroy() {
619   DCHECK(CalledOnValidThread());
620   Invalidate();
621   delete this;
622 }
623 
CanDecodeOnIOThread()624 bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() {
625   return false;
626 }
627 
InitDecoder(media::VideoCodecProfile profile)628 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) {
629   if (profile < media::H264PROFILE_MIN || profile > media::H264PROFILE_MAX)
630     return false;
631 
632   // We mimic the steps CoCreateInstance uses to instantiate the object. This
633   // was previously done because it failed inside the sandbox, and now is done
634   // as a more minimal approach to avoid other side-effects CCI might have (as
635   // we are still in a reduced sandbox).
636   HMODULE decoder_dll = ::LoadLibrary(L"msmpeg2vdec.dll");
637   RETURN_ON_FAILURE(decoder_dll,
638                     "msmpeg2vdec.dll required for decoding is not loaded",
639                     false);
640 
641   typedef HRESULT(WINAPI * GetClassObject)(
642       const CLSID & clsid, const IID & iid, void * *object);
643 
644   GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
645       GetProcAddress(decoder_dll, "DllGetClassObject"));
646   RETURN_ON_FAILURE(
647       get_class_object, "Failed to get DllGetClassObject pointer", false);
648 
649   base::win::ScopedComPtr<IClassFactory> factory;
650   HRESULT hr = get_class_object(__uuidof(CMSH264DecoderMFT),
651                                 __uuidof(IClassFactory),
652                                 reinterpret_cast<void**>(factory.Receive()));
653   RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false);
654 
655   hr = factory->CreateInstance(NULL,
656                                __uuidof(IMFTransform),
657                                reinterpret_cast<void**>(decoder_.Receive()));
658   RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
659 
660   RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
661                     "Failed to check decoder DXVA support", false);
662 
663   hr = decoder_->ProcessMessage(
664             MFT_MESSAGE_SET_D3D_MANAGER,
665             reinterpret_cast<ULONG_PTR>(device_manager_.get()));
666   RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
667 
668   EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
669 
670   EGLint config_attribs[] = {
671     EGL_BUFFER_SIZE, 32,
672     EGL_RED_SIZE, 8,
673     EGL_GREEN_SIZE, 8,
674     EGL_BLUE_SIZE, 8,
675     EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
676     EGL_ALPHA_SIZE, 0,
677     EGL_NONE
678   };
679 
680   EGLint num_configs;
681 
682   if (!eglChooseConfig(
683       egl_display,
684       config_attribs,
685       &egl_config_,
686       1,
687       &num_configs))
688     return false;
689 
690   return SetDecoderMediaTypes();
691 }
692 
CheckDecoderDxvaSupport()693 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
694   base::win::ScopedComPtr<IMFAttributes> attributes;
695   HRESULT hr = decoder_->GetAttributes(attributes.Receive());
696   RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
697 
698   UINT32 dxva = 0;
699   hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
700   RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
701 
702   hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
703   RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
704   return true;
705 }
706 
SetDecoderMediaTypes()707 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
708   RETURN_ON_FAILURE(SetDecoderInputMediaType(),
709                     "Failed to set decoder input media type", false);
710   return SetDecoderOutputMediaType(MFVideoFormat_NV12);
711 }
712 
SetDecoderInputMediaType()713 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
714   base::win::ScopedComPtr<IMFMediaType> media_type;
715   HRESULT hr = MFCreateMediaType(media_type.Receive());
716   RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
717 
718   hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
719   RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
720 
721   hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
722   RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
723 
724   // Not sure about this. msdn recommends setting this value on the input
725   // media type.
726   hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
727                              MFVideoInterlace_MixedInterlaceOrProgressive);
728   RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
729 
730   hr = decoder_->SetInputType(0, media_type, 0);  // No flags
731   RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
732   return true;
733 }
734 
SetDecoderOutputMediaType(const GUID & subtype)735 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
736     const GUID& subtype) {
737   base::win::ScopedComPtr<IMFMediaType> out_media_type;
738 
739   for (uint32 i = 0;
740        SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
741                                                   out_media_type.Receive()));
742        ++i) {
743     GUID out_subtype = {0};
744     HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
745     RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
746 
747     if (out_subtype == subtype) {
748       hr = decoder_->SetOutputType(0, out_media_type, 0);  // No flags
749       RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
750       return true;
751     }
752     out_media_type.Release();
753   }
754   return false;
755 }
756 
SendMFTMessage(MFT_MESSAGE_TYPE msg,int32 param)757 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
758                                                 int32 param) {
759   HRESULT hr = decoder_->ProcessMessage(msg, param);
760   return SUCCEEDED(hr);
761 }
762 
763 // Gets the minimum buffer sizes for input and output samples. The MFT will not
764 // allocate buffer for input nor output, so we have to do it ourselves and make
765 // sure they're the correct size. We only provide decoding if DXVA is enabled.
GetStreamsInfoAndBufferReqs()766 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
767   HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
768   RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
769 
770   hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
771   RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
772 
773   DVLOG(1) << "Input stream info: ";
774   DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
775   // There should be three flags, one for requiring a whole frame be in a
776   // single sample, one for requiring there be one buffer only in a single
777   // sample, and one that specifies a fixed sample size. (as in cbSize)
778   CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
779 
780   DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
781   DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
782   DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
783 
784   DVLOG(1) << "Output stream info: ";
785   // The flags here should be the same and mean the same thing, except when
786   // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
787   // allocate its own sample.
788   DVLOG(1) << "Flags: "
789           << std::hex << std::showbase << output_stream_info_.dwFlags;
790   CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
791   DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
792   DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
793   return true;
794 }
795 
DoDecode()796 void DXVAVideoDecodeAccelerator::DoDecode() {
797   // This function is also called from FlushInternal in a loop which could
798   // result in the state transitioning to kStopped due to no decoded output.
799   RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kFlushing ||
800                                 state_ == kStopped),
801       "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
802 
803   MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
804   DWORD status = 0;
805 
806   HRESULT hr = decoder_->ProcessOutput(0,  // No flags
807                                        1,  // # of out streams to pull from
808                                        &output_data_buffer,
809                                        &status);
810   IMFCollection* events = output_data_buffer.pEvents;
811   if (events != NULL) {
812     VLOG(1) << "Got events from ProcessOuput, but discarding";
813     events->Release();
814   }
815   if (FAILED(hr)) {
816     // A stream change needs further ProcessInput calls to get back decoder
817     // output which is why we need to set the state to stopped.
818     if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
819       if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
820         // Decoder didn't let us set NV12 output format. Not sure as to why
821         // this can happen. Give up in disgust.
822         NOTREACHED() << "Failed to set decoder output media type to NV12";
823         state_ = kStopped;
824       } else {
825         DVLOG(1) << "Received output format change from the decoder."
826                     " Recursively invoking DoDecode";
827         DoDecode();
828       }
829       return;
830     } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
831       // No more output from the decoder. Stop playback.
832       state_ = kStopped;
833       return;
834     } else {
835       NOTREACHED() << "Unhandled error in DoDecode()";
836       return;
837     }
838   }
839   TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
840 
841   TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
842                  inputs_before_decode_);
843 
844   inputs_before_decode_ = 0;
845 
846   RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
847       "Failed to process output sample.", PLATFORM_FAILURE,);
848 }
849 
ProcessOutputSample(IMFSample * sample)850 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
851   RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
852 
853   base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
854   HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
855   RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
856 
857   base::win::ScopedComPtr<IDirect3DSurface9> surface;
858   hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
859                     IID_PPV_ARGS(surface.Receive()));
860   RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
861                        false);
862 
863   LONGLONG input_buffer_id = 0;
864   RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
865                        "Failed to get input buffer id associated with sample",
866                        false);
867 
868   pending_output_samples_.push_back(
869       PendingSampleInfo(input_buffer_id, sample));
870 
871   // If we have available picture buffers to copy the output data then use the
872   // first one and then flag it as not being available for use.
873   if (output_picture_buffers_.size()) {
874     ProcessPendingSamples();
875     return true;
876   }
877   if (pictures_requested_) {
878     DVLOG(1) << "Waiting for picture slots from the client.";
879     return true;
880   }
881 
882   // We only read the surface description, which contains its width/height when
883   // we need the picture buffers from the client. Once we have those, then they
884   // are reused.
885   D3DSURFACE_DESC surface_desc;
886   hr = surface->GetDesc(&surface_desc);
887   RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
888 
889   // Go ahead and request picture buffers.
890   base::MessageLoop::current()->PostTask(
891       FROM_HERE,
892       base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
893                  weak_this_factory_.GetWeakPtr(),
894                  surface_desc.Width,
895                  surface_desc.Height));
896 
897   pictures_requested_ = true;
898   return true;
899 }
900 
ProcessPendingSamples()901 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
902   RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
903       "Failed to make context current", PLATFORM_FAILURE,);
904 
905   OutputBuffers::iterator index;
906 
907   for (index = output_picture_buffers_.begin();
908        index != output_picture_buffers_.end() &&
909        !pending_output_samples_.empty();
910        ++index) {
911     if (index->second->available()) {
912       PendingSampleInfo sample_info = pending_output_samples_.front();
913 
914       base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
915       HRESULT hr = sample_info.output_sample->GetBufferByIndex(
916           0, output_buffer.Receive());
917       RETURN_AND_NOTIFY_ON_HR_FAILURE(
918           hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,);
919 
920       base::win::ScopedComPtr<IDirect3DSurface9> surface;
921       hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
922                         IID_PPV_ARGS(surface.Receive()));
923       RETURN_AND_NOTIFY_ON_HR_FAILURE(
924           hr, "Failed to get D3D surface from output sample",
925           PLATFORM_FAILURE,);
926 
927       D3DSURFACE_DESC surface_desc;
928       hr = surface->GetDesc(&surface_desc);
929       RETURN_AND_NOTIFY_ON_HR_FAILURE(
930           hr, "Failed to get surface description", PLATFORM_FAILURE,);
931 
932       if (surface_desc.Width !=
933               static_cast<uint32>(index->second->size().width()) ||
934           surface_desc.Height !=
935               static_cast<uint32>(index->second->size().height())) {
936         HandleResolutionChanged(surface_desc.Width, surface_desc.Height);
937         return;
938       }
939 
940       RETURN_AND_NOTIFY_ON_FAILURE(
941           index->second->CopyOutputSampleDataToPictureBuffer(*this, surface),
942           "Failed to copy output sample",
943           PLATFORM_FAILURE, );
944 
945       media::Picture output_picture(index->second->id(),
946                                     sample_info.input_buffer_id);
947       base::MessageLoop::current()->PostTask(
948           FROM_HERE,
949           base::Bind(&DXVAVideoDecodeAccelerator::NotifyPictureReady,
950                      weak_this_factory_.GetWeakPtr(),
951                      output_picture));
952 
953       index->second->set_available(false);
954       pending_output_samples_.pop_front();
955     }
956   }
957 
958   if (!pending_input_buffers_.empty() && pending_output_samples_.empty()) {
959     base::MessageLoop::current()->PostTask(
960         FROM_HERE,
961         base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
962                    weak_this_factory_.GetWeakPtr()));
963   }
964 }
965 
StopOnError(media::VideoDecodeAccelerator::Error error)966 void DXVAVideoDecodeAccelerator::StopOnError(
967   media::VideoDecodeAccelerator::Error error) {
968   DCHECK(CalledOnValidThread());
969 
970   if (client_)
971     client_->NotifyError(error);
972   client_ = NULL;
973 
974   if (state_ != kUninitialized) {
975     Invalidate();
976   }
977 }
978 
Invalidate()979 void DXVAVideoDecodeAccelerator::Invalidate() {
980   if (state_ == kUninitialized)
981     return;
982   weak_this_factory_.InvalidateWeakPtrs();
983   output_picture_buffers_.clear();
984   pending_output_samples_.clear();
985   pending_input_buffers_.clear();
986   decoder_.Release();
987   MFShutdown();
988   state_ = kUninitialized;
989 }
990 
NotifyInputBufferRead(int input_buffer_id)991 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
992   if (client_)
993     client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
994 }
995 
NotifyFlushDone()996 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
997   if (client_)
998     client_->NotifyFlushDone();
999 }
1000 
NotifyResetDone()1001 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
1002   if (client_)
1003     client_->NotifyResetDone();
1004 }
1005 
RequestPictureBuffers(int width,int height)1006 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
1007   // This task could execute after the decoder has been torn down.
1008   if (state_ != kUninitialized && client_) {
1009     client_->ProvidePictureBuffers(
1010         kNumPictureBuffers,
1011         gfx::Size(width, height),
1012         GL_TEXTURE_2D);
1013   }
1014 }
1015 
NotifyPictureReady(const media::Picture & picture)1016 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1017     const media::Picture& picture) {
1018   // This task could execute after the decoder has been torn down.
1019   if (state_ != kUninitialized && client_)
1020     client_->PictureReady(picture);
1021 }
1022 
NotifyInputBuffersDropped()1023 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1024   if (!client_ || !pending_output_samples_.empty())
1025     return;
1026 
1027   for (PendingInputs::iterator it = pending_input_buffers_.begin();
1028        it != pending_input_buffers_.end(); ++it) {
1029     LONGLONG input_buffer_id = 0;
1030     RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1031                          "Failed to get buffer id associated with sample",);
1032     client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1033   }
1034   pending_input_buffers_.clear();
1035 }
1036 
DecodePendingInputBuffers()1037 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1038   RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
1039       "Invalid state: " << state_, ILLEGAL_STATE,);
1040 
1041   if (pending_input_buffers_.empty() || !pending_output_samples_.empty())
1042     return;
1043 
1044   PendingInputs pending_input_buffers_copy;
1045   std::swap(pending_input_buffers_, pending_input_buffers_copy);
1046 
1047   for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1048        it != pending_input_buffers_copy.end(); ++it) {
1049     DecodeInternal(*it);
1050   }
1051 }
1052 
FlushInternal()1053 void DXVAVideoDecodeAccelerator::FlushInternal() {
1054   // The DoDecode function sets the state to kStopped when the decoder returns
1055   // MF_E_TRANSFORM_NEED_MORE_INPUT.
1056   // The MFT decoder can buffer upto 30 frames worth of input before returning
1057   // an output frame. This loop here attempts to retrieve as many output frames
1058   // as possible from the buffered set.
1059   while (state_ != kStopped) {
1060     DoDecode();
1061     if (!pending_output_samples_.empty())
1062       return;
1063   }
1064 
1065   base::MessageLoop::current()->PostTask(
1066       FROM_HERE,
1067       base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone,
1068                  weak_this_factory_.GetWeakPtr()));
1069 
1070   state_ = kNormal;
1071 }
1072 
DecodeInternal(const base::win::ScopedComPtr<IMFSample> & sample)1073 void DXVAVideoDecodeAccelerator::DecodeInternal(
1074     const base::win::ScopedComPtr<IMFSample>& sample) {
1075   DCHECK(CalledOnValidThread());
1076 
1077   if (state_ == kUninitialized)
1078     return;
1079 
1080   if (!pending_output_samples_.empty() || !pending_input_buffers_.empty()) {
1081     pending_input_buffers_.push_back(sample);
1082     return;
1083   }
1084 
1085   if (!inputs_before_decode_) {
1086     TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1087   }
1088   inputs_before_decode_++;
1089 
1090   HRESULT hr = decoder_->ProcessInput(0, sample, 0);
1091   // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1092   // has enough data to produce one or more output samples. In this case the
1093   // recommended options are to
1094   // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1095   //    returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1096   // 2. Flush the input data
1097   // We implement the first option, i.e to retrieve the output sample and then
1098   // process the input again. Failure in either of these steps is treated as a
1099   // decoder failure.
1100   if (hr == MF_E_NOTACCEPTING) {
1101     DoDecode();
1102     RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1103         "Failed to process output. Unexpected decoder state: " << state_,
1104         PLATFORM_FAILURE,);
1105     hr = decoder_->ProcessInput(0, sample, 0);
1106     // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1107     // 1. Add the input sample to the pending queue.
1108     // 2. If we don't have any output samples we post the
1109     //    DecodePendingInputBuffers task to process the pending input samples.
1110     //    If we have an output sample then the above task is posted when the
1111     //    output samples are sent to the client.
1112     // This is because we only support 1 pending output sample at any
1113     // given time due to the limitation with the Microsoft media foundation
1114     // decoder where it recycles the output Decoder surfaces.
1115     if (hr == MF_E_NOTACCEPTING) {
1116       pending_input_buffers_.push_back(sample);
1117       if (pending_output_samples_.empty()) {
1118         base::MessageLoop::current()->PostTask(
1119             FROM_HERE,
1120             base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1121                        weak_this_factory_.GetWeakPtr()));
1122       }
1123       return;
1124     }
1125   }
1126   RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1127       PLATFORM_FAILURE,);
1128 
1129   DoDecode();
1130 
1131   RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1132       "Failed to process output. Unexpected decoder state: " << state_,
1133       ILLEGAL_STATE,);
1134 
1135   LONGLONG input_buffer_id = 0;
1136   RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1137                        "Failed to get input buffer id associated with sample",);
1138   // The Microsoft Media foundation decoder internally buffers up to 30 frames
1139   // before returning a decoded frame. We need to inform the client that this
1140   // input buffer is processed as it may stop sending us further input.
1141   // Note: This may break clients which expect every input buffer to be
1142   // associated with a decoded output buffer.
1143   // TODO(ananta)
1144   // Do some more investigation into whether it is possible to get the MFT
1145   // decoder to emit an output packet for every input packet.
1146   // http://code.google.com/p/chromium/issues/detail?id=108121
1147   // http://code.google.com/p/chromium/issues/detail?id=150925
1148   base::MessageLoop::current()->PostTask(
1149       FROM_HERE,
1150       base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1151                  weak_this_factory_.GetWeakPtr(),
1152                  input_buffer_id));
1153 }
1154 
HandleResolutionChanged(int width,int height)1155 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
1156                                                          int height) {
1157   base::MessageLoop::current()->PostTask(
1158       FROM_HERE,
1159       base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
1160                  weak_this_factory_.GetWeakPtr(),
1161                  output_picture_buffers_));
1162 
1163   base::MessageLoop::current()->PostTask(
1164       FROM_HERE,
1165       base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1166                  weak_this_factory_.GetWeakPtr(),
1167                  width,
1168                  height));
1169 
1170   output_picture_buffers_.clear();
1171 }
1172 
DismissStaleBuffers(const OutputBuffers & picture_buffers)1173 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(
1174     const OutputBuffers& picture_buffers) {
1175   OutputBuffers::const_iterator index;
1176 
1177   for (index = picture_buffers.begin();
1178        index != picture_buffers.end();
1179        ++index) {
1180     DVLOG(1) << "Dismissing picture id: " << index->second->id();
1181     client_->DismissPictureBuffer(index->second->id());
1182   }
1183 }
1184 
1185 }  // namespace content
1186