1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h"
6
7 #if !defined(OS_WIN)
8 #error This file should only be built on Windows.
9 #endif // !defined(OS_WIN)
10
11 #include <ks.h>
12 #include <codecapi.h>
13 #include <mfapi.h>
14 #include <mferror.h>
15 #include <wmcodecdsp.h>
16
17 #include "base/bind.h"
18 #include "base/callback.h"
19 #include "base/command_line.h"
20 #include "base/debug/trace_event.h"
21 #include "base/logging.h"
22 #include "base/memory/scoped_handle.h"
23 #include "base/memory/scoped_ptr.h"
24 #include "base/memory/shared_memory.h"
25 #include "base/message_loop/message_loop.h"
26 #include "base/win/windows_version.h"
27 #include "media/video/video_decode_accelerator.h"
28 #include "ui/gl/gl_bindings.h"
29 #include "ui/gl/gl_surface_egl.h"
30 #include "ui/gl/gl_switches.h"
31
32 namespace content {
33
34 // We only request 5 picture buffers from the client which are used to hold the
35 // decoded samples. These buffers are then reused when the client tells us that
36 // it is done with the buffer.
37 static const int kNumPictureBuffers = 5;
38
39 #define RETURN_ON_FAILURE(result, log, ret) \
40 do { \
41 if (!(result)) { \
42 DLOG(ERROR) << log; \
43 return ret; \
44 } \
45 } while (0)
46
47 #define RETURN_ON_HR_FAILURE(result, log, ret) \
48 RETURN_ON_FAILURE(SUCCEEDED(result), \
49 log << ", HRESULT: 0x" << std::hex << result, \
50 ret);
51
52 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
53 do { \
54 if (!(result)) { \
55 DVLOG(1) << log; \
56 StopOnError(error_code); \
57 return ret; \
58 } \
59 } while (0)
60
61 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \
62 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \
63 log << ", HRESULT: 0x" << std::hex << result, \
64 error_code, ret);
65
66 // Maximum number of iterations we allow before aborting the attempt to flush
67 // the batched queries to the driver and allow torn/corrupt frames to be
68 // rendered.
69 enum { kMaxIterationsForD3DFlush = 10 };
70
CreateEmptySample()71 static IMFSample* CreateEmptySample() {
72 base::win::ScopedComPtr<IMFSample> sample;
73 HRESULT hr = MFCreateSample(sample.Receive());
74 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
75 return sample.Detach();
76 }
77
78 // Creates a Media Foundation sample with one buffer of length |buffer_length|
79 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
CreateEmptySampleWithBuffer(int buffer_length,int align)80 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
81 CHECK_GT(buffer_length, 0);
82
83 base::win::ScopedComPtr<IMFSample> sample;
84 sample.Attach(CreateEmptySample());
85
86 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
87 HRESULT hr = E_FAIL;
88 if (align == 0) {
89 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
90 // with the align argument being 0.
91 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
92 } else {
93 hr = MFCreateAlignedMemoryBuffer(buffer_length,
94 align - 1,
95 buffer.Receive());
96 }
97 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
98
99 hr = sample->AddBuffer(buffer);
100 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
101
102 return sample.Detach();
103 }
104
105 // Creates a Media Foundation sample with one buffer containing a copy of the
106 // given Annex B stream data.
107 // If duration and sample time are not known, provide 0.
108 // |min_size| specifies the minimum size of the buffer (might be required by
109 // the decoder for input). If no alignment is required, provide 0.
CreateInputSample(const uint8 * stream,int size,int min_size,int alignment)110 static IMFSample* CreateInputSample(const uint8* stream, int size,
111 int min_size, int alignment) {
112 CHECK(stream);
113 CHECK_GT(size, 0);
114 base::win::ScopedComPtr<IMFSample> sample;
115 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
116 alignment));
117 RETURN_ON_FAILURE(sample, "Failed to create empty sample", NULL);
118
119 base::win::ScopedComPtr<IMFMediaBuffer> buffer;
120 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
121 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
122
123 DWORD max_length = 0;
124 DWORD current_length = 0;
125 uint8* destination = NULL;
126 hr = buffer->Lock(&destination, &max_length, ¤t_length);
127 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
128
129 CHECK_EQ(current_length, 0u);
130 CHECK_GE(static_cast<int>(max_length), size);
131 memcpy(destination, stream, size);
132
133 hr = buffer->Unlock();
134 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
135
136 hr = buffer->SetCurrentLength(size);
137 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
138
139 return sample.Detach();
140 }
141
CreateSampleFromInputBuffer(const media::BitstreamBuffer & bitstream_buffer,DWORD stream_size,DWORD alignment)142 static IMFSample* CreateSampleFromInputBuffer(
143 const media::BitstreamBuffer& bitstream_buffer,
144 DWORD stream_size,
145 DWORD alignment) {
146 base::SharedMemory shm(bitstream_buffer.handle(), true);
147 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
148 "Failed in base::SharedMemory::Map", NULL);
149
150 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
151 bitstream_buffer.size(),
152 stream_size,
153 alignment);
154 }
155
156 // Maintains information about a DXVA picture buffer, i.e. whether it is
157 // available for rendering, the texture information, etc.
158 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
159 public:
160 static linked_ptr<DXVAPictureBuffer> Create(
161 const DXVAVideoDecodeAccelerator& decoder,
162 const media::PictureBuffer& buffer,
163 EGLConfig egl_config);
164 ~DXVAPictureBuffer();
165
166 void ReusePictureBuffer();
167 // Copies the output sample data to the picture buffer provided by the
168 // client.
169 // The dest_surface parameter contains the decoded bits.
170 bool CopyOutputSampleDataToPictureBuffer(
171 const DXVAVideoDecodeAccelerator& decoder,
172 IDirect3DSurface9* dest_surface);
173
availablecontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer174 bool available() const {
175 return available_;
176 }
177
set_availablecontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer178 void set_available(bool available) {
179 available_ = available;
180 }
181
idcontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer182 int id() const {
183 return picture_buffer_.id();
184 }
185
sizecontent::DXVAVideoDecodeAccelerator::DXVAPictureBuffer186 gfx::Size size() const {
187 return picture_buffer_.size();
188 }
189
190 private:
191 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
192
193 bool available_;
194 media::PictureBuffer picture_buffer_;
195 EGLSurface decoding_surface_;
196 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
197
198 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
199 };
200
201 // static
202 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
Create(const DXVAVideoDecodeAccelerator & decoder,const media::PictureBuffer & buffer,EGLConfig egl_config)203 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
204 const DXVAVideoDecodeAccelerator& decoder,
205 const media::PictureBuffer& buffer,
206 EGLConfig egl_config) {
207 linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
208
209 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
210
211 EGLint attrib_list[] = {
212 EGL_WIDTH, buffer.size().width(),
213 EGL_HEIGHT, buffer.size().height(),
214 EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGB,
215 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
216 EGL_NONE
217 };
218
219 picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
220 egl_display,
221 egl_config,
222 attrib_list);
223 RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
224 "Failed to create surface",
225 linked_ptr<DXVAPictureBuffer>(NULL));
226
227 HANDLE share_handle = NULL;
228 EGLBoolean ret = eglQuerySurfacePointerANGLE(
229 egl_display,
230 picture_buffer->decoding_surface_,
231 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
232 &share_handle);
233
234 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
235 "Failed to query ANGLE surface pointer",
236 linked_ptr<DXVAPictureBuffer>(NULL));
237
238 HRESULT hr = decoder.device_->CreateTexture(
239 buffer.size().width(),
240 buffer.size().height(),
241 1,
242 D3DUSAGE_RENDERTARGET,
243 D3DFMT_X8R8G8B8,
244 D3DPOOL_DEFAULT,
245 picture_buffer->decoding_texture_.Receive(),
246 &share_handle);
247
248 RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
249 linked_ptr<DXVAPictureBuffer>(NULL));
250 return picture_buffer;
251 }
252
DXVAPictureBuffer(const media::PictureBuffer & buffer)253 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
254 const media::PictureBuffer& buffer)
255 : available_(true),
256 picture_buffer_(buffer),
257 decoding_surface_(NULL) {
258 }
259
~DXVAPictureBuffer()260 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
261 if (decoding_surface_) {
262 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
263
264 eglReleaseTexImage(
265 egl_display,
266 decoding_surface_,
267 EGL_BACK_BUFFER);
268
269 eglDestroySurface(
270 egl_display,
271 decoding_surface_);
272 decoding_surface_ = NULL;
273 }
274 }
275
ReusePictureBuffer()276 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
277 DCHECK(decoding_surface_);
278 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
279 eglReleaseTexImage(
280 egl_display,
281 decoding_surface_,
282 EGL_BACK_BUFFER);
283 set_available(true);
284 }
285
286 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
CopyOutputSampleDataToPictureBuffer(const DXVAVideoDecodeAccelerator & decoder,IDirect3DSurface9 * dest_surface)287 CopyOutputSampleDataToPictureBuffer(
288 const DXVAVideoDecodeAccelerator& decoder,
289 IDirect3DSurface9* dest_surface) {
290 DCHECK(dest_surface);
291
292 D3DSURFACE_DESC surface_desc;
293 HRESULT hr = dest_surface->GetDesc(&surface_desc);
294 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
295
296 D3DSURFACE_DESC texture_desc;
297 decoding_texture_->GetLevelDesc(0, &texture_desc);
298
299 if (texture_desc.Width != surface_desc.Width ||
300 texture_desc.Height != surface_desc.Height) {
301 NOTREACHED() << "Decode surface of different dimension than texture";
302 return false;
303 }
304
305 hr = decoder.d3d9_->CheckDeviceFormatConversion(
306 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format, D3DFMT_X8R8G8B8);
307 RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false);
308
309 // This function currently executes in the context of IPC handlers in the
310 // GPU process which ensures that there is always an OpenGL context.
311 GLint current_texture = 0;
312 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture);
313
314 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
315
316 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
317
318 base::win::ScopedComPtr<IDirect3DSurface9> d3d_surface;
319 hr = decoding_texture_->GetSurfaceLevel(0, d3d_surface.Receive());
320 RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
321
322 hr = decoder.device_->StretchRect(
323 dest_surface, NULL, d3d_surface, NULL, D3DTEXF_NONE);
324 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",
325 false);
326
327 // Ideally, this should be done immediately before the draw call that uses
328 // the texture. Flush it once here though.
329 hr = decoder.query_->Issue(D3DISSUE_END);
330 RETURN_ON_HR_FAILURE(hr, "Failed to issue END", false);
331
332 // The DXVA decoder has its own device which it uses for decoding. ANGLE
333 // has its own device which we don't have access to.
334 // The above code attempts to copy the decoded picture into a surface
335 // which is owned by ANGLE. As there are multiple devices involved in
336 // this, the StretchRect call above is not synchronous.
337 // We attempt to flush the batched operations to ensure that the picture is
338 // copied to the surface owned by ANGLE.
339 // We need to do this in a loop and call flush multiple times.
340 // We have seen the GetData call for flushing the command buffer fail to
341 // return success occassionally on multi core machines, leading to an
342 // infinite loop.
343 // Workaround is to have an upper limit of 10 on the number of iterations to
344 // wait for the Flush to finish.
345 int iterations = 0;
346 while ((decoder.query_->GetData(NULL, 0, D3DGETDATA_FLUSH) == S_FALSE) &&
347 ++iterations < kMaxIterationsForD3DFlush) {
348 Sleep(1); // Poor-man's Yield().
349 }
350 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
351 eglBindTexImage(
352 egl_display,
353 decoding_surface_,
354 EGL_BACK_BUFFER);
355 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
356 glBindTexture(GL_TEXTURE_2D, current_texture);
357 return true;
358 }
359
PendingSampleInfo(int32 buffer_id,IMFSample * sample)360 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
361 int32 buffer_id, IMFSample* sample)
362 : input_buffer_id(buffer_id) {
363 output_sample.Attach(sample);
364 }
365
~PendingSampleInfo()366 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
367
368 // static
CreateD3DDevManager()369 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
370 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
371
372 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive());
373 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
374
375 D3DPRESENT_PARAMETERS present_params = {0};
376 present_params.BackBufferWidth = 1;
377 present_params.BackBufferHeight = 1;
378 present_params.BackBufferFormat = D3DFMT_UNKNOWN;
379 present_params.BackBufferCount = 1;
380 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
381 present_params.hDeviceWindow = ::GetShellWindow();
382 present_params.Windowed = TRUE;
383 present_params.Flags = D3DPRESENTFLAG_VIDEO;
384 present_params.FullScreen_RefreshRateInHz = 0;
385 present_params.PresentationInterval = 0;
386
387 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
388 D3DDEVTYPE_HAL,
389 ::GetShellWindow(),
390 D3DCREATE_FPU_PRESERVE |
391 D3DCREATE_SOFTWARE_VERTEXPROCESSING |
392 D3DCREATE_DISABLE_PSGP_THREADING |
393 D3DCREATE_MULTITHREADED,
394 &present_params,
395 NULL,
396 device_.Receive());
397 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
398
399 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
400 device_manager_.Receive());
401 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
402
403 hr = device_manager_->ResetDevice(device_, dev_manager_reset_token_);
404 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
405
406 hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive());
407 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
408 // Ensure query_ API works (to avoid an infinite loop later in
409 // CopyOutputSampleDataToPictureBuffer).
410 hr = query_->Issue(D3DISSUE_END);
411 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
412 return true;
413 }
414
DXVAVideoDecodeAccelerator(media::VideoDecodeAccelerator::Client * client,const base::Callback<bool (void)> & make_context_current)415 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
416 media::VideoDecodeAccelerator::Client* client,
417 const base::Callback<bool(void)>& make_context_current)
418 : client_(client),
419 dev_manager_reset_token_(0),
420 egl_config_(NULL),
421 state_(kUninitialized),
422 pictures_requested_(false),
423 inputs_before_decode_(0),
424 make_context_current_(make_context_current) {
425 memset(&input_stream_info_, 0, sizeof(input_stream_info_));
426 memset(&output_stream_info_, 0, sizeof(output_stream_info_));
427 }
428
~DXVAVideoDecodeAccelerator()429 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
430 client_ = NULL;
431 }
432
Initialize(media::VideoCodecProfile profile)433 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile) {
434 DCHECK(CalledOnValidThread());
435
436 // Not all versions of Windows 7 and later include Media Foundation DLLs.
437 // Instead of crashing while delay loading the DLL when calling MFStartup()
438 // below, probe whether we can successfully load the DLL now.
439 //
440 // See http://crbug.com/339678 for details.
441 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll");
442 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", false);
443
444 // TODO(ananta)
445 // H264PROFILE_HIGH video decoding is janky at times. Needs more
446 // investigation.
447 if (profile != media::H264PROFILE_BASELINE &&
448 profile != media::H264PROFILE_MAIN &&
449 profile != media::H264PROFILE_HIGH) {
450 RETURN_AND_NOTIFY_ON_FAILURE(false,
451 "Unsupported h264 profile", PLATFORM_FAILURE, false);
452 }
453
454 RETURN_AND_NOTIFY_ON_FAILURE(
455 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
456 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
457 PLATFORM_FAILURE,
458 false);
459
460 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kUninitialized),
461 "Initialize: invalid state: " << state_, ILLEGAL_STATE, false);
462
463 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
464 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE,
465 false);
466
467 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
468 "Failed to initialize D3D device and manager",
469 PLATFORM_FAILURE,
470 false);
471
472 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile),
473 "Failed to initialize decoder", PLATFORM_FAILURE, false);
474
475 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
476 "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
477
478 RETURN_AND_NOTIFY_ON_FAILURE(
479 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
480 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
481 PLATFORM_FAILURE, false);
482
483 RETURN_AND_NOTIFY_ON_FAILURE(
484 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
485 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
486 PLATFORM_FAILURE, false);
487
488 state_ = kNormal;
489 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
490 &DXVAVideoDecodeAccelerator::NotifyInitializeDone,
491 base::AsWeakPtr(this)));
492 return true;
493 }
494
Decode(const media::BitstreamBuffer & bitstream_buffer)495 void DXVAVideoDecodeAccelerator::Decode(
496 const media::BitstreamBuffer& bitstream_buffer) {
497 DCHECK(CalledOnValidThread());
498
499 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped ||
500 state_ == kFlushing),
501 "Invalid state: " << state_, ILLEGAL_STATE,);
502
503 base::win::ScopedComPtr<IMFSample> sample;
504 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
505 input_stream_info_.cbSize,
506 input_stream_info_.cbAlignment));
507 RETURN_AND_NOTIFY_ON_FAILURE(sample, "Failed to create input sample",
508 PLATFORM_FAILURE,);
509
510 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
511 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
512
513 DecodeInternal(sample);
514 }
515
AssignPictureBuffers(const std::vector<media::PictureBuffer> & buffers)516 void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
517 const std::vector<media::PictureBuffer>& buffers) {
518 DCHECK(CalledOnValidThread());
519
520 RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
521 "Invalid state: " << state_, ILLEGAL_STATE,);
522 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
523 "Failed to provide requested picture buffers. (Got " << buffers.size() <<
524 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
525
526 // Copy the picture buffers provided by the client to the available list,
527 // and mark these buffers as available for use.
528 for (size_t buffer_index = 0; buffer_index < buffers.size();
529 ++buffer_index) {
530 linked_ptr<DXVAPictureBuffer> picture_buffer =
531 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_);
532 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
533 "Failed to allocate picture buffer", PLATFORM_FAILURE,);
534
535 bool inserted = output_picture_buffers_.insert(std::make_pair(
536 buffers[buffer_index].id(), picture_buffer)).second;
537 DCHECK(inserted);
538 }
539 ProcessPendingSamples();
540 if (state_ == kFlushing && pending_output_samples_.empty())
541 FlushInternal();
542 }
543
ReusePictureBuffer(int32 picture_buffer_id)544 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
545 int32 picture_buffer_id) {
546 DCHECK(CalledOnValidThread());
547
548 RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
549 "Invalid state: " << state_, ILLEGAL_STATE,);
550
551 if (output_picture_buffers_.empty())
552 return;
553
554 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
555 RETURN_AND_NOTIFY_ON_FAILURE(it != output_picture_buffers_.end(),
556 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
557
558 it->second->ReusePictureBuffer();
559 ProcessPendingSamples();
560
561 if (state_ == kFlushing && pending_output_samples_.empty())
562 FlushInternal();
563 }
564
Flush()565 void DXVAVideoDecodeAccelerator::Flush() {
566 DCHECK(CalledOnValidThread());
567
568 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
569
570 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
571 "Unexpected decoder state: " << state_, ILLEGAL_STATE,);
572
573 state_ = kFlushing;
574
575 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
576 "Failed to send drain message", PLATFORM_FAILURE,);
577
578 if (!pending_output_samples_.empty())
579 return;
580
581 FlushInternal();
582 }
583
Reset()584 void DXVAVideoDecodeAccelerator::Reset() {
585 DCHECK(CalledOnValidThread());
586
587 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
588
589 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
590 "Reset: invalid state: " << state_, ILLEGAL_STATE,);
591
592 state_ = kResetting;
593
594 pending_output_samples_.clear();
595
596 NotifyInputBuffersDropped();
597
598 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
599 "Reset: Failed to send message.", PLATFORM_FAILURE,);
600
601 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
602 &DXVAVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this)));
603
604 state_ = DXVAVideoDecodeAccelerator::kNormal;
605 }
606
Destroy()607 void DXVAVideoDecodeAccelerator::Destroy() {
608 DCHECK(CalledOnValidThread());
609 Invalidate();
610 delete this;
611 }
612
InitDecoder(media::VideoCodecProfile profile)613 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) {
614 if (profile < media::H264PROFILE_MIN || profile > media::H264PROFILE_MAX)
615 return false;
616
617 // We mimic the steps CoCreateInstance uses to instantiate the object. This
618 // was previously done because it failed inside the sandbox, and now is done
619 // as a more minimal approach to avoid other side-effects CCI might have (as
620 // we are still in a reduced sandbox).
621 HMODULE decoder_dll = ::LoadLibrary(L"msmpeg2vdec.dll");
622 RETURN_ON_FAILURE(decoder_dll,
623 "msmpeg2vdec.dll required for decoding is not loaded",
624 false);
625
626 typedef HRESULT(WINAPI * GetClassObject)(
627 const CLSID & clsid, const IID & iid, void * *object);
628
629 GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
630 GetProcAddress(decoder_dll, "DllGetClassObject"));
631 RETURN_ON_FAILURE(
632 get_class_object, "Failed to get DllGetClassObject pointer", false);
633
634 base::win::ScopedComPtr<IClassFactory> factory;
635 HRESULT hr = get_class_object(__uuidof(CMSH264DecoderMFT),
636 __uuidof(IClassFactory),
637 reinterpret_cast<void**>(factory.Receive()));
638 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false);
639
640 hr = factory->CreateInstance(NULL,
641 __uuidof(IMFTransform),
642 reinterpret_cast<void**>(decoder_.Receive()));
643 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
644
645 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
646 "Failed to check decoder DXVA support", false);
647
648 hr = decoder_->ProcessMessage(
649 MFT_MESSAGE_SET_D3D_MANAGER,
650 reinterpret_cast<ULONG_PTR>(device_manager_.get()));
651 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
652
653 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
654
655 EGLint config_attribs[] = {
656 EGL_BUFFER_SIZE, 32,
657 EGL_RED_SIZE, 8,
658 EGL_GREEN_SIZE, 8,
659 EGL_BLUE_SIZE, 8,
660 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
661 EGL_ALPHA_SIZE, 0,
662 EGL_NONE
663 };
664
665 EGLint num_configs;
666
667 if (!eglChooseConfig(
668 egl_display,
669 config_attribs,
670 &egl_config_,
671 1,
672 &num_configs))
673 return false;
674
675 return SetDecoderMediaTypes();
676 }
677
CheckDecoderDxvaSupport()678 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
679 base::win::ScopedComPtr<IMFAttributes> attributes;
680 HRESULT hr = decoder_->GetAttributes(attributes.Receive());
681 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
682
683 UINT32 dxva = 0;
684 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
685 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
686
687 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
688 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
689 return true;
690 }
691
SetDecoderMediaTypes()692 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
693 RETURN_ON_FAILURE(SetDecoderInputMediaType(),
694 "Failed to set decoder input media type", false);
695 return SetDecoderOutputMediaType(MFVideoFormat_NV12);
696 }
697
SetDecoderInputMediaType()698 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
699 base::win::ScopedComPtr<IMFMediaType> media_type;
700 HRESULT hr = MFCreateMediaType(media_type.Receive());
701 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
702
703 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
704 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
705
706 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
707 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
708
709 // Not sure about this. msdn recommends setting this value on the input
710 // media type.
711 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
712 MFVideoInterlace_MixedInterlaceOrProgressive);
713 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
714
715 hr = decoder_->SetInputType(0, media_type, 0); // No flags
716 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
717 return true;
718 }
719
SetDecoderOutputMediaType(const GUID & subtype)720 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
721 const GUID& subtype) {
722 base::win::ScopedComPtr<IMFMediaType> out_media_type;
723
724 for (uint32 i = 0;
725 SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
726 out_media_type.Receive()));
727 ++i) {
728 GUID out_subtype = {0};
729 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
730 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
731
732 if (out_subtype == subtype) {
733 hr = decoder_->SetOutputType(0, out_media_type, 0); // No flags
734 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
735 return true;
736 }
737 out_media_type.Release();
738 }
739 return false;
740 }
741
SendMFTMessage(MFT_MESSAGE_TYPE msg,int32 param)742 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
743 int32 param) {
744 HRESULT hr = decoder_->ProcessMessage(msg, param);
745 return SUCCEEDED(hr);
746 }
747
748 // Gets the minimum buffer sizes for input and output samples. The MFT will not
749 // allocate buffer for input nor output, so we have to do it ourselves and make
750 // sure they're the correct size. We only provide decoding if DXVA is enabled.
GetStreamsInfoAndBufferReqs()751 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
752 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
753 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
754
755 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
756 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
757
758 DVLOG(1) << "Input stream info: ";
759 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
760 // There should be three flags, one for requiring a whole frame be in a
761 // single sample, one for requiring there be one buffer only in a single
762 // sample, and one that specifies a fixed sample size. (as in cbSize)
763 CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
764
765 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
766 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
767 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
768
769 DVLOG(1) << "Output stream info: ";
770 // The flags here should be the same and mean the same thing, except when
771 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
772 // allocate its own sample.
773 DVLOG(1) << "Flags: "
774 << std::hex << std::showbase << output_stream_info_.dwFlags;
775 CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
776 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
777 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
778 return true;
779 }
780
DoDecode()781 void DXVAVideoDecodeAccelerator::DoDecode() {
782 // This function is also called from FlushInternal in a loop which could
783 // result in the state transitioning to kStopped due to no decoded output.
784 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kFlushing ||
785 state_ == kStopped),
786 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
787
788 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
789 DWORD status = 0;
790
791 HRESULT hr = decoder_->ProcessOutput(0, // No flags
792 1, // # of out streams to pull from
793 &output_data_buffer,
794 &status);
795 IMFCollection* events = output_data_buffer.pEvents;
796 if (events != NULL) {
797 VLOG(1) << "Got events from ProcessOuput, but discarding";
798 events->Release();
799 }
800 if (FAILED(hr)) {
801 // A stream change needs further ProcessInput calls to get back decoder
802 // output which is why we need to set the state to stopped.
803 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
804 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
805 // Decoder didn't let us set NV12 output format. Not sure as to why
806 // this can happen. Give up in disgust.
807 NOTREACHED() << "Failed to set decoder output media type to NV12";
808 state_ = kStopped;
809 } else {
810 DVLOG(1) << "Received output format change from the decoder."
811 " Recursively invoking DoDecode";
812 DoDecode();
813 }
814 return;
815 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
816 // No more output from the decoder. Stop playback.
817 state_ = kStopped;
818 return;
819 } else {
820 NOTREACHED() << "Unhandled error in DoDecode()";
821 return;
822 }
823 }
824 TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
825
826 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
827 inputs_before_decode_);
828
829 inputs_before_decode_ = 0;
830
831 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
832 "Failed to process output sample.", PLATFORM_FAILURE,);
833 }
834
ProcessOutputSample(IMFSample * sample)835 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
836 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
837
838 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
839 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
840 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
841
842 base::win::ScopedComPtr<IDirect3DSurface9> surface;
843 hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
844 IID_PPV_ARGS(surface.Receive()));
845 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
846 false);
847
848 LONGLONG input_buffer_id = 0;
849 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
850 "Failed to get input buffer id associated with sample",
851 false);
852
853 pending_output_samples_.push_back(
854 PendingSampleInfo(input_buffer_id, sample));
855
856 // If we have available picture buffers to copy the output data then use the
857 // first one and then flag it as not being available for use.
858 if (output_picture_buffers_.size()) {
859 ProcessPendingSamples();
860 return true;
861 }
862 if (pictures_requested_) {
863 DVLOG(1) << "Waiting for picture slots from the client.";
864 return true;
865 }
866
867 // We only read the surface description, which contains its width/height when
868 // we need the picture buffers from the client. Once we have those, then they
869 // are reused.
870 D3DSURFACE_DESC surface_desc;
871 hr = surface->GetDesc(&surface_desc);
872 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
873
874 // Go ahead and request picture buffers.
875 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
876 &DXVAVideoDecodeAccelerator::RequestPictureBuffers,
877 base::AsWeakPtr(this), surface_desc.Width, surface_desc.Height));
878
879 pictures_requested_ = true;
880 return true;
881 }
882
ProcessPendingSamples()883 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
884 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
885 "Failed to make context current", PLATFORM_FAILURE,);
886
887 OutputBuffers::iterator index;
888
889 for (index = output_picture_buffers_.begin();
890 index != output_picture_buffers_.end() &&
891 !pending_output_samples_.empty();
892 ++index) {
893 if (index->second->available()) {
894 PendingSampleInfo sample_info = pending_output_samples_.front();
895
896 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
897 HRESULT hr = sample_info.output_sample->GetBufferByIndex(
898 0, output_buffer.Receive());
899 RETURN_AND_NOTIFY_ON_HR_FAILURE(
900 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,);
901
902 base::win::ScopedComPtr<IDirect3DSurface9> surface;
903 hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
904 IID_PPV_ARGS(surface.Receive()));
905 RETURN_AND_NOTIFY_ON_HR_FAILURE(
906 hr, "Failed to get D3D surface from output sample",
907 PLATFORM_FAILURE,);
908
909 D3DSURFACE_DESC surface_desc;
910 hr = surface->GetDesc(&surface_desc);
911 RETURN_AND_NOTIFY_ON_HR_FAILURE(
912 hr, "Failed to get surface description", PLATFORM_FAILURE,);
913
914 if (surface_desc.Width !=
915 static_cast<uint32>(index->second->size().width()) ||
916 surface_desc.Height !=
917 static_cast<uint32>(index->second->size().height())) {
918 HandleResolutionChanged(surface_desc.Width, surface_desc.Height);
919 return;
920 }
921
922 RETURN_AND_NOTIFY_ON_FAILURE(
923 index->second->CopyOutputSampleDataToPictureBuffer(*this, surface),
924 "Failed to copy output sample",
925 PLATFORM_FAILURE, );
926
927 media::Picture output_picture(index->second->id(),
928 sample_info.input_buffer_id);
929 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
930 &DXVAVideoDecodeAccelerator::NotifyPictureReady,
931 base::AsWeakPtr(this), output_picture));
932
933 index->second->set_available(false);
934 pending_output_samples_.pop_front();
935 }
936 }
937
938 if (!pending_input_buffers_.empty() && pending_output_samples_.empty()) {
939 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
940 &DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
941 base::AsWeakPtr(this)));
942 }
943 }
944
StopOnError(media::VideoDecodeAccelerator::Error error)945 void DXVAVideoDecodeAccelerator::StopOnError(
946 media::VideoDecodeAccelerator::Error error) {
947 DCHECK(CalledOnValidThread());
948
949 if (client_)
950 client_->NotifyError(error);
951 client_ = NULL;
952
953 if (state_ != kUninitialized) {
954 Invalidate();
955 }
956 }
957
Invalidate()958 void DXVAVideoDecodeAccelerator::Invalidate() {
959 if (state_ == kUninitialized)
960 return;
961 output_picture_buffers_.clear();
962 pending_output_samples_.clear();
963 pending_input_buffers_.clear();
964 decoder_.Release();
965 MFShutdown();
966 state_ = kUninitialized;
967 }
968
NotifyInitializeDone()969 void DXVAVideoDecodeAccelerator::NotifyInitializeDone() {
970 if (client_)
971 client_->NotifyInitializeDone();
972 }
973
NotifyInputBufferRead(int input_buffer_id)974 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
975 if (client_)
976 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
977 }
978
NotifyFlushDone()979 void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
980 if (client_)
981 client_->NotifyFlushDone();
982 }
983
NotifyResetDone()984 void DXVAVideoDecodeAccelerator::NotifyResetDone() {
985 if (client_)
986 client_->NotifyResetDone();
987 }
988
RequestPictureBuffers(int width,int height)989 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
990 // This task could execute after the decoder has been torn down.
991 if (state_ != kUninitialized && client_) {
992 client_->ProvidePictureBuffers(
993 kNumPictureBuffers,
994 gfx::Size(width, height),
995 GL_TEXTURE_2D);
996 }
997 }
998
NotifyPictureReady(const media::Picture & picture)999 void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1000 const media::Picture& picture) {
1001 // This task could execute after the decoder has been torn down.
1002 if (state_ != kUninitialized && client_)
1003 client_->PictureReady(picture);
1004 }
1005
NotifyInputBuffersDropped()1006 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1007 if (!client_ || !pending_output_samples_.empty())
1008 return;
1009
1010 for (PendingInputs::iterator it = pending_input_buffers_.begin();
1011 it != pending_input_buffers_.end(); ++it) {
1012 LONGLONG input_buffer_id = 0;
1013 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1014 "Failed to get buffer id associated with sample",);
1015 client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1016 }
1017 pending_input_buffers_.clear();
1018 }
1019
DecodePendingInputBuffers()1020 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1021 RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
1022 "Invalid state: " << state_, ILLEGAL_STATE,);
1023
1024 if (pending_input_buffers_.empty() || !pending_output_samples_.empty())
1025 return;
1026
1027 PendingInputs pending_input_buffers_copy;
1028 std::swap(pending_input_buffers_, pending_input_buffers_copy);
1029
1030 for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1031 it != pending_input_buffers_copy.end(); ++it) {
1032 DecodeInternal(*it);
1033 }
1034 }
1035
FlushInternal()1036 void DXVAVideoDecodeAccelerator::FlushInternal() {
1037 // The DoDecode function sets the state to kStopped when the decoder returns
1038 // MF_E_TRANSFORM_NEED_MORE_INPUT.
1039 // The MFT decoder can buffer upto 30 frames worth of input before returning
1040 // an output frame. This loop here attempts to retrieve as many output frames
1041 // as possible from the buffered set.
1042 while (state_ != kStopped) {
1043 DoDecode();
1044 if (!pending_output_samples_.empty())
1045 return;
1046 }
1047
1048 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1049 &DXVAVideoDecodeAccelerator::NotifyFlushDone, base::AsWeakPtr(this)));
1050
1051 state_ = kNormal;
1052 }
1053
DecodeInternal(const base::win::ScopedComPtr<IMFSample> & sample)1054 void DXVAVideoDecodeAccelerator::DecodeInternal(
1055 const base::win::ScopedComPtr<IMFSample>& sample) {
1056 DCHECK(CalledOnValidThread());
1057
1058 if (state_ == kUninitialized)
1059 return;
1060
1061 if (!pending_output_samples_.empty() || !pending_input_buffers_.empty()) {
1062 pending_input_buffers_.push_back(sample);
1063 return;
1064 }
1065
1066 if (!inputs_before_decode_) {
1067 TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1068 }
1069 inputs_before_decode_++;
1070
1071 HRESULT hr = decoder_->ProcessInput(0, sample, 0);
1072 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1073 // has enough data to produce one or more output samples. In this case the
1074 // recommended options are to
1075 // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1076 // returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1077 // 2. Flush the input data
1078 // We implement the first option, i.e to retrieve the output sample and then
1079 // process the input again. Failure in either of these steps is treated as a
1080 // decoder failure.
1081 if (hr == MF_E_NOTACCEPTING) {
1082 DoDecode();
1083 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1084 "Failed to process output. Unexpected decoder state: " << state_,
1085 PLATFORM_FAILURE,);
1086 hr = decoder_->ProcessInput(0, sample, 0);
1087 // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1088 // 1. Add the input sample to the pending queue.
1089 // 2. If we don't have any output samples we post the
1090 // DecodePendingInputBuffers task to process the pending input samples.
1091 // If we have an output sample then the above task is posted when the
1092 // output samples are sent to the client.
1093 // This is because we only support 1 pending output sample at any
1094 // given time due to the limitation with the Microsoft media foundation
1095 // decoder where it recycles the output Decoder surfaces.
1096 if (hr == MF_E_NOTACCEPTING) {
1097 pending_input_buffers_.push_back(sample);
1098 if (pending_output_samples_.empty()) {
1099 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1100 &DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1101 base::AsWeakPtr(this)));
1102 }
1103 return;
1104 }
1105 }
1106 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1107 PLATFORM_FAILURE,);
1108
1109 DoDecode();
1110
1111 RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1112 "Failed to process output. Unexpected decoder state: " << state_,
1113 ILLEGAL_STATE,);
1114
1115 LONGLONG input_buffer_id = 0;
1116 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1117 "Failed to get input buffer id associated with sample",);
1118 // The Microsoft Media foundation decoder internally buffers up to 30 frames
1119 // before returning a decoded frame. We need to inform the client that this
1120 // input buffer is processed as it may stop sending us further input.
1121 // Note: This may break clients which expect every input buffer to be
1122 // associated with a decoded output buffer.
1123 // TODO(ananta)
1124 // Do some more investigation into whether it is possible to get the MFT
1125 // decoder to emit an output packet for every input packet.
1126 // http://code.google.com/p/chromium/issues/detail?id=108121
1127 // http://code.google.com/p/chromium/issues/detail?id=150925
1128 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1129 &DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1130 base::AsWeakPtr(this), input_buffer_id));
1131 }
1132
HandleResolutionChanged(int width,int height)1133 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
1134 int height) {
1135 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1136 &DXVAVideoDecodeAccelerator::DismissStaleBuffers,
1137 base::AsWeakPtr(this), output_picture_buffers_));
1138
1139 base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1140 &DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1141 base::AsWeakPtr(this), width, height));
1142
1143 output_picture_buffers_.clear();
1144 }
1145
DismissStaleBuffers(const OutputBuffers & picture_buffers)1146 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(
1147 const OutputBuffers& picture_buffers) {
1148 OutputBuffers::const_iterator index;
1149
1150 for (index = picture_buffers.begin();
1151 index != picture_buffers.end();
1152 ++index) {
1153 DVLOG(1) << "Dismissing picture id: " << index->second->id();
1154 client_->DismissPictureBuffer(index->second->id());
1155 }
1156 }
1157
1158 } // namespace content
1159