1 /* GStreamer
2 * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 /**
21 * SECTION:element-d3d11mpeg2dec
22 * @title: d3d11mpeg2dec
23 *
24 * A Direct3D11/DXVA based MPEG-2 video decoder
25 *
26 * ## Example launch line
27 * ```
28 * gst-launch-1.0 filesrc location=/path/to/mpeg2/file ! parsebin ! d3d11mpeg2dec ! d3d11videosink
29 * ```
30 *
31 * Since: 1.20
32 *
33 */
34
35 #ifdef HAVE_CONFIG_H
36 #include <config.h>
37 #endif
38
39 #include "gstd3d11mpeg2dec.h"
40
41 #include <gst/codecs/gstmpeg2decoder.h>
42 #include <string.h>
43 #include <vector>
44
45 /* HACK: to expose dxva data structure on UWP */
46 #ifdef WINAPI_PARTITION_DESKTOP
47 #undef WINAPI_PARTITION_DESKTOP
48 #endif
49 #define WINAPI_PARTITION_DESKTOP 1
50 #include <d3d9.h>
51 #include <dxva.h>
52
53 GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_mpeg2_dec_debug);
54 #define GST_CAT_DEFAULT gst_d3d11_mpeg2_dec_debug
55
56 /* reference list 2 + 4 margin */
57 #define NUM_OUTPUT_VIEW 6
58
59 /* *INDENT-OFF* */
60 typedef struct _GstD3D11Mpeg2DecInner
61 {
62 GstD3D11Device *device = nullptr;
63 GstD3D11Decoder *d3d11_decoder = nullptr;
64
65 DXVA_PictureParameters pic_params;
66 DXVA_QmatrixData iq_matrix;
67
68 std::vector<DXVA_SliceInfo> slice_list;
69 std::vector<guint8> bitstream_buffer;
70
71 gboolean submit_iq_data;
72
73 gint width = 0;
74 gint height = 0;
75 guint width_in_mb = 0;
76 guint height_in_mb = 0;
77 GstVideoFormat out_format = GST_VIDEO_FORMAT_UNKNOWN;
78 GstMpegVideoSequenceHdr seq;
79 GstMpegVideoProfile profile = GST_MPEG_VIDEO_PROFILE_MAIN;
80 gboolean interlaced = FALSE;
81 } GstD3D11Mpeg2DecInner;
82 /* *INDENT-ON* */
83
84 typedef struct _GstD3D11Mpeg2Dec
85 {
86 GstMpeg2Decoder parent;
87 GstD3D11Mpeg2DecInner *inner;
88 } GstD3D11Mpeg2Dec;
89
90 typedef struct _GstD3D11Mpeg2DecClass
91 {
92 GstMpeg2DecoderClass parent_class;
93 GstD3D11DecoderSubClassData class_data;
94 } GstD3D11Mpeg2DecClass;
95
96 static GstElementClass *parent_class = NULL;
97
98 #define GST_D3D11_MPEG2_DEC(object) ((GstD3D11Mpeg2Dec *) (object))
99 #define GST_D3D11_MPEG2_DEC_GET_CLASS(object) \
100 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstD3D11Mpeg2DecClass))
101
102 static void gst_d3d11_mpeg2_dec_get_property (GObject * object,
103 guint prop_id, GValue * value, GParamSpec * pspec);
104 static void gst_d3d11_mpeg2_dec_finalize (GObject * object);
105 static void gst_d3d11_mpeg2_dec_set_context (GstElement * element,
106 GstContext * context);
107
108 static gboolean gst_d3d11_mpeg2_dec_open (GstVideoDecoder * decoder);
109 static gboolean gst_d3d11_mpeg2_dec_close (GstVideoDecoder * decoder);
110 static gboolean gst_d3d11_mpeg2_dec_negotiate (GstVideoDecoder * decoder);
111 static gboolean gst_d3d11_mpeg2_dec_decide_allocation (GstVideoDecoder *
112 decoder, GstQuery * query);
113 static gboolean gst_d3d11_mpeg2_dec_src_query (GstVideoDecoder * decoder,
114 GstQuery * query);
115 static gboolean gst_d3d11_mpeg2_dec_sink_event (GstVideoDecoder * decoder,
116 GstEvent * event);
117
118 /* GstMpeg2Decoder */
119 static GstFlowReturn gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder *
120 decoder, const GstMpegVideoSequenceHdr * seq,
121 const GstMpegVideoSequenceExt * seq_ext,
122 const GstMpegVideoSequenceDisplayExt * seq_display_ext,
123 const GstMpegVideoSequenceScalableExt * seq_scalable_ext);
124 static GstFlowReturn gst_d3d11_mpeg2_dec_new_picture (GstMpeg2Decoder * decoder,
125 GstVideoCodecFrame * frame, GstMpeg2Picture * picture);
126 static GstFlowReturn gst_d3d11_mpeg2_dec_new_field_picture (GstMpeg2Decoder *
127 decoder, const GstMpeg2Picture * first_field,
128 GstMpeg2Picture * second_field);
129 static GstFlowReturn gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder *
130 decoder, GstMpeg2Picture * picture, GstMpeg2Slice * slice,
131 GstMpeg2Picture * prev_picture, GstMpeg2Picture * next_picture);
132 static GstFlowReturn gst_d3d11_mpeg2_dec_decode_slice (GstMpeg2Decoder *
133 decoder, GstMpeg2Picture * picture, GstMpeg2Slice * slice);
134 static GstFlowReturn gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
135 GstMpeg2Picture * picture);
136 static GstFlowReturn gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder *
137 decoder, GstVideoCodecFrame * frame, GstMpeg2Picture * picture);
138
139 static void
gst_d3d11_mpeg2_dec_class_init(GstD3D11Mpeg2DecClass * klass,gpointer data)140 gst_d3d11_mpeg2_dec_class_init (GstD3D11Mpeg2DecClass * klass, gpointer data)
141 {
142 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
143 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
144 GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
145 GstMpeg2DecoderClass *mpeg2decoder_class = GST_MPEG2_DECODER_CLASS (klass);
146 GstD3D11DecoderClassData *cdata = (GstD3D11DecoderClassData *) data;
147
148 gobject_class->get_property = gst_d3d11_mpeg2_dec_get_property;
149 gobject_class->finalize = gst_d3d11_mpeg2_dec_finalize;
150
151 element_class->set_context =
152 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_set_context);
153
154 parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
155 gst_d3d11_decoder_class_data_fill_subclass_data (cdata, &klass->class_data);
156
157 /**
158 * GstD3D11Mpeg2Dec:adapter-luid:
159 *
160 * DXGI Adapter LUID for this element
161 *
162 * Since: 1.20
163 */
164 gst_d3d11_decoder_proxy_class_init (element_class, cdata,
165 "Seungha Yang <seungha@centricular.com>");
166
167 decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_open);
168 decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_close);
169 decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_negotiate);
170 decoder_class->decide_allocation =
171 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_decide_allocation);
172 decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_src_query);
173 decoder_class->sink_event =
174 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_sink_event);
175
176 mpeg2decoder_class->new_sequence =
177 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_sequence);
178 mpeg2decoder_class->new_picture =
179 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_picture);
180 mpeg2decoder_class->new_field_picture =
181 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_new_field_picture);
182 mpeg2decoder_class->start_picture =
183 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_start_picture);
184 mpeg2decoder_class->decode_slice =
185 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_decode_slice);
186 mpeg2decoder_class->end_picture =
187 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_end_picture);
188 mpeg2decoder_class->output_picture =
189 GST_DEBUG_FUNCPTR (gst_d3d11_mpeg2_dec_output_picture);
190 }
191
192 static void
gst_d3d11_mpeg2_dec_init(GstD3D11Mpeg2Dec * self)193 gst_d3d11_mpeg2_dec_init (GstD3D11Mpeg2Dec * self)
194 {
195 self->inner = new GstD3D11Mpeg2DecInner ();
196 }
197
198 static void
gst_d3d11_mpeg2_dec_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)199 gst_d3d11_mpeg2_dec_get_property (GObject * object, guint prop_id,
200 GValue * value, GParamSpec * pspec)
201 {
202 GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (object);
203 GstD3D11DecoderSubClassData *cdata = &klass->class_data;
204
205 gst_d3d11_decoder_proxy_get_property (object, prop_id, value, pspec, cdata);
206 }
207
208 static void
gst_d3d11_mpeg2_dec_finalize(GObject * object)209 gst_d3d11_mpeg2_dec_finalize (GObject * object)
210 {
211 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (object);
212
213 delete self->inner;
214
215 G_OBJECT_CLASS (parent_class)->finalize (object);
216 }
217
218 static void
gst_d3d11_mpeg2_dec_set_context(GstElement * element,GstContext * context)219 gst_d3d11_mpeg2_dec_set_context (GstElement * element, GstContext * context)
220 {
221 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (element);
222 GstD3D11Mpeg2DecInner *inner = self->inner;
223 GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (self);
224 GstD3D11DecoderSubClassData *cdata = &klass->class_data;
225
226 gst_d3d11_handle_set_context_for_adapter_luid (element,
227 context, cdata->adapter_luid, &inner->device);
228
229 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
230 }
231
232 static gboolean
gst_d3d11_mpeg2_dec_open(GstVideoDecoder * decoder)233 gst_d3d11_mpeg2_dec_open (GstVideoDecoder * decoder)
234 {
235 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
236 GstD3D11Mpeg2DecInner *inner = self->inner;
237 GstD3D11Mpeg2DecClass *klass = GST_D3D11_MPEG2_DEC_GET_CLASS (self);
238 GstD3D11DecoderSubClassData *cdata = &klass->class_data;
239
240 if (!gst_d3d11_decoder_proxy_open (decoder,
241 cdata, &inner->device, &inner->d3d11_decoder)) {
242 GST_ERROR_OBJECT (self, "Failed to open decoder");
243 return FALSE;
244 }
245
246 return TRUE;
247 }
248
249 static gboolean
gst_d3d11_mpeg2_dec_close(GstVideoDecoder * decoder)250 gst_d3d11_mpeg2_dec_close (GstVideoDecoder * decoder)
251 {
252 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
253 GstD3D11Mpeg2DecInner *inner = self->inner;
254
255 gst_clear_object (&inner->d3d11_decoder);
256 gst_clear_object (&inner->device);
257
258 return TRUE;
259 }
260
261 static gboolean
gst_d3d11_mpeg2_dec_negotiate(GstVideoDecoder * decoder)262 gst_d3d11_mpeg2_dec_negotiate (GstVideoDecoder * decoder)
263 {
264 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
265 GstD3D11Mpeg2DecInner *inner = self->inner;
266
267 if (!gst_d3d11_decoder_negotiate (inner->d3d11_decoder, decoder))
268 return FALSE;
269
270 return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
271 }
272
273 static gboolean
gst_d3d11_mpeg2_dec_decide_allocation(GstVideoDecoder * decoder,GstQuery * query)274 gst_d3d11_mpeg2_dec_decide_allocation (GstVideoDecoder * decoder,
275 GstQuery * query)
276 {
277 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
278 GstD3D11Mpeg2DecInner *inner = self->inner;
279
280 if (!gst_d3d11_decoder_decide_allocation (inner->d3d11_decoder,
281 decoder, query)) {
282 return FALSE;
283 }
284
285 return GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation
286 (decoder, query);
287 }
288
289 static gboolean
gst_d3d11_mpeg2_dec_src_query(GstVideoDecoder * decoder,GstQuery * query)290 gst_d3d11_mpeg2_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
291 {
292 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
293 GstD3D11Mpeg2DecInner *inner = self->inner;
294
295 switch (GST_QUERY_TYPE (query)) {
296 case GST_QUERY_CONTEXT:
297 if (gst_d3d11_handle_context_query (GST_ELEMENT (decoder),
298 query, inner->device)) {
299 return TRUE;
300 }
301 break;
302 default:
303 break;
304 }
305
306 return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
307 }
308
309 static gboolean
gst_d3d11_mpeg2_dec_sink_event(GstVideoDecoder * decoder,GstEvent * event)310 gst_d3d11_mpeg2_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
311 {
312 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
313 GstD3D11Mpeg2DecInner *inner = self->inner;
314
315 switch (GST_EVENT_TYPE (event)) {
316 case GST_EVENT_FLUSH_START:
317 if (inner->d3d11_decoder)
318 gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, TRUE);
319 break;
320 case GST_EVENT_FLUSH_STOP:
321 if (inner->d3d11_decoder)
322 gst_d3d11_decoder_set_flushing (inner->d3d11_decoder, decoder, FALSE);
323 default:
324 break;
325 }
326
327 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
328 }
329
330 static GstFlowReturn
gst_d3d11_mpeg2_dec_new_sequence(GstMpeg2Decoder * decoder,const GstMpegVideoSequenceHdr * seq,const GstMpegVideoSequenceExt * seq_ext,const GstMpegVideoSequenceDisplayExt * seq_display_ext,const GstMpegVideoSequenceScalableExt * seq_scalable_ext)331 gst_d3d11_mpeg2_dec_new_sequence (GstMpeg2Decoder * decoder,
332 const GstMpegVideoSequenceHdr * seq,
333 const GstMpegVideoSequenceExt * seq_ext,
334 const GstMpegVideoSequenceDisplayExt * seq_display_ext,
335 const GstMpegVideoSequenceScalableExt * seq_scalable_ext)
336 {
337 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
338 GstD3D11Mpeg2DecInner *inner = self->inner;
339 gboolean interlaced;
340 gboolean modified = FALSE;
341 gint width, height;
342 GstMpegVideoProfile mpeg_profile;
343
344 GST_LOG_OBJECT (self, "new sequence");
345
346 interlaced = seq_ext ? !seq_ext->progressive : FALSE;
347 if (inner->interlaced != interlaced) {
348 GST_INFO_OBJECT (self, "interlaced sequence change");
349 inner->interlaced = interlaced;
350 modified = TRUE;
351 }
352
353 width = seq->width;
354 height = seq->height;
355 if (seq_ext) {
356 width = (width & 0x0fff) | ((guint32) seq_ext->horiz_size_ext << 12);
357 height = (height & 0x0fff) | ((guint32) seq_ext->vert_size_ext << 12);
358 }
359
360 if (inner->width != width || inner->height != height) {
361 GST_INFO_OBJECT (self, "resolution change %dx%d -> %dx%d",
362 inner->width, inner->height, width, height);
363 inner->width = width;
364 inner->height = height;
365 inner->width_in_mb = GST_ROUND_UP_16 (width) >> 4;
366 inner->height_in_mb = GST_ROUND_UP_16 (height) >> 4;
367 modified = TRUE;
368 }
369
370 mpeg_profile = GST_MPEG_VIDEO_PROFILE_MAIN;
371 if (seq_ext)
372 mpeg_profile = (GstMpegVideoProfile) seq_ext->profile;
373
374 if (mpeg_profile != GST_MPEG_VIDEO_PROFILE_MAIN &&
375 mpeg_profile != GST_MPEG_VIDEO_PROFILE_SIMPLE) {
376 GST_ERROR_OBJECT (self, "Cannot support profile %d", mpeg_profile);
377 return GST_FLOW_NOT_NEGOTIATED;
378 }
379
380 if (inner->profile != mpeg_profile) {
381 GST_INFO_OBJECT (self, "Profile change %d -> %d",
382 inner->profile, mpeg_profile);
383 inner->profile = mpeg_profile;
384 modified = TRUE;
385 }
386
387 if (modified || !gst_d3d11_decoder_is_configured (inner->d3d11_decoder)) {
388 GstVideoInfo info;
389
390 /* FIXME: support I420 */
391 inner->out_format = GST_VIDEO_FORMAT_NV12;
392
393 gst_video_info_set_format (&info,
394 inner->out_format, inner->width, inner->height);
395 if (inner->interlaced)
396 GST_VIDEO_INFO_INTERLACE_MODE (&info) = GST_VIDEO_INTERLACE_MODE_MIXED;
397
398 if (!gst_d3d11_decoder_configure (inner->d3d11_decoder,
399 decoder->input_state, &info,
400 inner->width, inner->height, NUM_OUTPUT_VIEW)) {
401 GST_ERROR_OBJECT (self, "Failed to create decoder");
402 return GST_FLOW_NOT_NEGOTIATED;
403 }
404
405 if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
406 GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
407 return GST_FLOW_NOT_NEGOTIATED;
408 }
409 }
410
411 return GST_FLOW_OK;
412 }
413
414 static GstFlowReturn
gst_d3d11_mpeg2_dec_new_picture(GstMpeg2Decoder * decoder,GstVideoCodecFrame * frame,GstMpeg2Picture * picture)415 gst_d3d11_mpeg2_dec_new_picture (GstMpeg2Decoder * decoder,
416 GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
417 {
418 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
419 GstD3D11Mpeg2DecInner *inner = self->inner;
420 GstBuffer *view_buffer;
421
422 view_buffer = gst_d3d11_decoder_get_output_view_buffer (inner->d3d11_decoder,
423 GST_VIDEO_DECODER (decoder));
424 if (!view_buffer) {
425 GST_DEBUG_OBJECT (self, "No available output view buffer");
426 return GST_FLOW_ERROR;
427 }
428
429 GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
430
431 gst_mpeg2_picture_set_user_data (picture,
432 view_buffer, (GDestroyNotify) gst_buffer_unref);
433
434 GST_LOG_OBJECT (self, "New MPEG2 picture %p", picture);
435
436 return GST_FLOW_OK;
437 }
438
439 static GstFlowReturn
gst_d3d11_mpeg2_dec_new_field_picture(GstMpeg2Decoder * decoder,const GstMpeg2Picture * first_field,GstMpeg2Picture * second_field)440 gst_d3d11_mpeg2_dec_new_field_picture (GstMpeg2Decoder * decoder,
441 const GstMpeg2Picture * first_field, GstMpeg2Picture * second_field)
442 {
443 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
444 GstBuffer *view_buffer;
445
446 view_buffer = (GstBuffer *)
447 gst_mpeg2_picture_get_user_data ((GstMpeg2Picture *) first_field);
448
449 if (!view_buffer) {
450 GST_WARNING_OBJECT (self, "First picture does not have output view buffer");
451 return GST_FLOW_OK;
452 }
453
454 GST_LOG_OBJECT (self, "New field picture with buffer %" GST_PTR_FORMAT,
455 view_buffer);
456
457 gst_mpeg2_picture_set_user_data (second_field,
458 gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
459
460 return GST_FLOW_OK;
461 }
462
463 static ID3D11VideoDecoderOutputView *
gst_d3d11_mpeg2_dec_get_output_view_from_picture(GstD3D11Mpeg2Dec * self,GstMpeg2Picture * picture,guint8 * view_id)464 gst_d3d11_mpeg2_dec_get_output_view_from_picture (GstD3D11Mpeg2Dec * self,
465 GstMpeg2Picture * picture, guint8 * view_id)
466 {
467 GstD3D11Mpeg2DecInner *inner = self->inner;
468 GstBuffer *view_buffer;
469 ID3D11VideoDecoderOutputView *view;
470
471 if (!picture)
472 return NULL;
473
474 view_buffer = (GstBuffer *) gst_mpeg2_picture_get_user_data (picture);
475 if (!view_buffer) {
476 GST_DEBUG_OBJECT (self, "current picture does not have output view buffer");
477 return NULL;
478 }
479
480 view =
481 gst_d3d11_decoder_get_output_view_from_buffer (inner->d3d11_decoder,
482 view_buffer, view_id);
483 if (!view) {
484 GST_DEBUG_OBJECT (self, "current picture does not have output view handle");
485 return NULL;
486 }
487
488 return view;
489 }
490
491 static inline WORD
_pack_f_codes(guint8 f_code[2][2])492 _pack_f_codes (guint8 f_code[2][2])
493 {
494 return (((WORD) f_code[0][0] << 12)
495 | ((WORD) f_code[0][1] << 8)
496 | ((WORD) f_code[1][0] << 4)
497 | (f_code[1][1]));
498 }
499
500 static inline WORD
_pack_pce_elements(GstMpeg2Slice * slice)501 _pack_pce_elements (GstMpeg2Slice * slice)
502 {
503 return (((WORD) slice->pic_ext->intra_dc_precision << 14)
504 | ((WORD) slice->pic_ext->picture_structure << 12)
505 | ((WORD) slice->pic_ext->top_field_first << 11)
506 | ((WORD) slice->pic_ext->frame_pred_frame_dct << 10)
507 | ((WORD) slice->pic_ext->concealment_motion_vectors << 9)
508 | ((WORD) slice->pic_ext->q_scale_type << 8)
509 | ((WORD) slice->pic_ext->intra_vlc_format << 7)
510 | ((WORD) slice->pic_ext->alternate_scan << 6)
511 | ((WORD) slice->pic_ext->repeat_first_field << 5)
512 | ((WORD) slice->pic_ext->chroma_420_type << 4)
513 | ((WORD) slice->pic_ext->progressive_frame << 3));
514 }
515
516 static GstFlowReturn
gst_d3d11_mpeg2_dec_start_picture(GstMpeg2Decoder * decoder,GstMpeg2Picture * picture,GstMpeg2Slice * slice,GstMpeg2Picture * prev_picture,GstMpeg2Picture * next_picture)517 gst_d3d11_mpeg2_dec_start_picture (GstMpeg2Decoder * decoder,
518 GstMpeg2Picture * picture, GstMpeg2Slice * slice,
519 GstMpeg2Picture * prev_picture, GstMpeg2Picture * next_picture)
520 {
521 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
522 GstD3D11Mpeg2DecInner *inner = self->inner;
523 DXVA_PictureParameters *pic_params = &inner->pic_params;
524 DXVA_QmatrixData *iq_matrix = &inner->iq_matrix;
525 ID3D11VideoDecoderOutputView *view;
526 ID3D11VideoDecoderOutputView *other_view;
527 guint8 view_id = 0xff;
528 guint8 other_view_id = 0xff;
529 gboolean is_field =
530 picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
531
532 view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
533 &view_id);
534 if (!view) {
535 GST_ERROR_OBJECT (self, "current picture does not have output view handle");
536 return GST_FLOW_ERROR;
537 }
538
539 memset (pic_params, 0, sizeof (DXVA_PictureParameters));
540 memset (iq_matrix, 0, sizeof (DXVA_QmatrixData));
541
542 /* Fill DXVA_PictureParameters */
543 pic_params->wDecodedPictureIndex = view_id;
544 pic_params->wForwardRefPictureIndex = 0xffff;
545 pic_params->wBackwardRefPictureIndex = 0xffff;
546
547 switch (picture->type) {
548 case GST_MPEG_VIDEO_PICTURE_TYPE_B:{
549 if (next_picture) {
550 other_view =
551 gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
552 next_picture, &other_view_id);
553 if (other_view)
554 pic_params->wBackwardRefPictureIndex = other_view_id;
555 }
556 }
557 /* fall-through */
558 case GST_MPEG_VIDEO_PICTURE_TYPE_P:{
559 if (prev_picture) {
560 other_view =
561 gst_d3d11_mpeg2_dec_get_output_view_from_picture (self,
562 prev_picture, &other_view_id);
563 if (other_view)
564 pic_params->wForwardRefPictureIndex = other_view_id;
565 }
566 }
567 default:
568 break;
569 }
570
571 pic_params->wPicWidthInMBminus1 = inner->width_in_mb - 1;
572 pic_params->wPicHeightInMBminus1 = (inner->height_in_mb >> is_field) - 1;
573 pic_params->bMacroblockWidthMinus1 = 15;
574 pic_params->bMacroblockHeightMinus1 = 15;
575 pic_params->bBlockWidthMinus1 = 7;
576 pic_params->bBlockHeightMinus1 = 7;
577 pic_params->bBPPminus1 = 7;
578 pic_params->bPicStructure = (BYTE) picture->structure;
579 if (picture->first_field && is_field) {
580 pic_params->bSecondField = TRUE;
581 }
582 pic_params->bPicIntra = picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_I;
583 pic_params->bPicBackwardPrediction =
584 picture->type == GST_MPEG_VIDEO_PICTURE_TYPE_B;
585 /* FIXME: 1 -> 4:2:0, 2 -> 4:2:2, 3 -> 4:4:4 */
586 pic_params->bChromaFormat = 1;
587 pic_params->bPicScanFixed = 1;
588 pic_params->bPicScanMethod = slice->pic_ext->alternate_scan;
589 pic_params->wBitstreamFcodes = _pack_f_codes (slice->pic_ext->f_code);
590 pic_params->wBitstreamPCEelements = _pack_pce_elements (slice);
591
592 /* Fill DXVA_QmatrixData */
593 if (slice->quant_matrix &&
594 /* The value in bNewQmatrix[0] and bNewQmatrix[1] must not both be zero.
595 * https://docs.microsoft.com/en-us/windows-hardware/drivers/ddi/dxva/ns-dxva-_dxva_qmatrixdata
596 */
597 (slice->quant_matrix->load_intra_quantiser_matrix ||
598 slice->quant_matrix->load_non_intra_quantiser_matrix)) {
599 GstMpegVideoQuantMatrixExt *quant_matrix = slice->quant_matrix;
600
601 if (quant_matrix->load_intra_quantiser_matrix) {
602 iq_matrix->bNewQmatrix[0] = 1;
603 for (guint i = 0; i < 64; i++) {
604 iq_matrix->Qmatrix[0][i] = quant_matrix->intra_quantiser_matrix[i];
605 }
606 }
607
608 if (quant_matrix->load_non_intra_quantiser_matrix) {
609 iq_matrix->bNewQmatrix[1] = 1;
610 for (guint i = 0; i < 64; i++) {
611 iq_matrix->Qmatrix[1][i] = quant_matrix->non_intra_quantiser_matrix[i];
612 }
613 }
614
615 if (quant_matrix->load_chroma_intra_quantiser_matrix) {
616 iq_matrix->bNewQmatrix[2] = 1;
617 for (guint i = 0; i < 64; i++) {
618 iq_matrix->Qmatrix[2][i] =
619 quant_matrix->chroma_intra_quantiser_matrix[i];
620 }
621 }
622
623 if (quant_matrix->load_chroma_non_intra_quantiser_matrix) {
624 iq_matrix->bNewQmatrix[3] = 1;
625 for (guint i = 0; i < 64; i++) {
626 iq_matrix->Qmatrix[3][i] =
627 quant_matrix->chroma_non_intra_quantiser_matrix[i];
628 }
629 }
630
631 inner->submit_iq_data = TRUE;
632 } else {
633 inner->submit_iq_data = FALSE;
634 }
635
636 inner->slice_list.resize (0);
637 inner->bitstream_buffer.resize (0);
638
639 return GST_FLOW_OK;
640 }
641
642 static GstFlowReturn
gst_d3d11_mpeg2_dec_decode_slice(GstMpeg2Decoder * decoder,GstMpeg2Picture * picture,GstMpeg2Slice * slice)643 gst_d3d11_mpeg2_dec_decode_slice (GstMpeg2Decoder * decoder,
644 GstMpeg2Picture * picture, GstMpeg2Slice * slice)
645 {
646 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
647 GstD3D11Mpeg2DecInner *inner = self->inner;
648 GstMpegVideoSliceHdr *header = &slice->header;
649 GstMpegVideoPacket *packet = &slice->packet;
650 DXVA_SliceInfo slice_info = { 0, };
651
652 g_assert (packet->offset >= 4);
653
654 slice_info.wHorizontalPosition = header->mb_column;
655 slice_info.wVerticalPosition = header->mb_row;
656 /* including start code 4 bytes */
657 slice_info.dwSliceBitsInBuffer = 8 * (packet->size + 4);
658 slice_info.dwSliceDataLocation = inner->bitstream_buffer.size ();
659 /* XXX: We don't have information about the number of MBs in this slice.
660 * Just store offset here, and actual number will be calculated later */
661 slice_info.wNumberMBsInSlice =
662 (header->mb_row * inner->width_in_mb) + header->mb_column;
663 slice_info.wQuantizerScaleCode = header->quantiser_scale_code;
664 slice_info.wMBbitOffset = header->header_size + 32;
665
666 inner->slice_list.push_back (slice_info);
667
668 size_t pos = inner->bitstream_buffer.size ();
669 inner->bitstream_buffer.resize (pos + packet->size + 4);
670 memcpy (&inner->bitstream_buffer[0] + pos, packet->data + packet->offset - 4,
671 packet->size + 4);
672
673 return GST_FLOW_OK;
674 }
675
676 static GstFlowReturn
gst_d3d11_mpeg2_dec_end_picture(GstMpeg2Decoder * decoder,GstMpeg2Picture * picture)677 gst_d3d11_mpeg2_dec_end_picture (GstMpeg2Decoder * decoder,
678 GstMpeg2Picture * picture)
679 {
680 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
681 GstD3D11Mpeg2DecInner *inner = self->inner;
682 ID3D11VideoDecoderOutputView *view;
683 guint8 view_id = 0xff;
684 GstD3D11DecodeInputStreamArgs input_args;
685 gboolean is_field =
686 picture->structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
687 guint mb_count = inner->width_in_mb * (inner->height_in_mb >> is_field);
688
689 if (inner->bitstream_buffer.empty ()) {
690 GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
691 return GST_FLOW_ERROR;
692 }
693
694 view = gst_d3d11_mpeg2_dec_get_output_view_from_picture (self, picture,
695 &view_id);
696 if (!view) {
697 GST_ERROR_OBJECT (self, "current picture does not have output view handle");
698 return GST_FLOW_ERROR;
699 }
700
701 memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
702
703 DXVA_SliceInfo *first = &inner->slice_list[0];
704 for (size_t i = 0; i < inner->slice_list.size (); i++) {
705 DXVA_SliceInfo *slice = first + i;
706
707 /* Update the number of MBs per slice */
708 if (i == inner->slice_list.size () - 1) {
709 slice->wNumberMBsInSlice = mb_count - slice->wNumberMBsInSlice;
710 } else {
711 DXVA_SliceInfo *next = first + i + 1;
712 slice->wNumberMBsInSlice =
713 next->wNumberMBsInSlice - slice->wNumberMBsInSlice;
714 }
715 }
716
717 input_args.picture_params = &inner->pic_params;
718 input_args.picture_params_size = sizeof (DXVA_PictureParameters);
719 input_args.slice_control = &inner->slice_list[0];
720 input_args.slice_control_size =
721 sizeof (DXVA_SliceInfo) * inner->slice_list.size ();
722 input_args.bitstream = &inner->bitstream_buffer[0];
723 input_args.bitstream_size = inner->bitstream_buffer.size ();
724 if (inner->submit_iq_data) {
725 input_args.inverse_quantization_matrix = &inner->iq_matrix;
726 input_args.inverse_quantization_matrix_size = sizeof (DXVA_QmatrixData);
727 }
728
729 if (!gst_d3d11_decoder_decode_frame (inner->d3d11_decoder, view, &input_args))
730 return GST_FLOW_ERROR;
731
732 return GST_FLOW_OK;
733 }
734
735 static GstFlowReturn
gst_d3d11_mpeg2_dec_output_picture(GstMpeg2Decoder * decoder,GstVideoCodecFrame * frame,GstMpeg2Picture * picture)736 gst_d3d11_mpeg2_dec_output_picture (GstMpeg2Decoder * decoder,
737 GstVideoCodecFrame * frame, GstMpeg2Picture * picture)
738 {
739 GstD3D11Mpeg2Dec *self = GST_D3D11_MPEG2_DEC (decoder);
740 GstD3D11Mpeg2DecInner *inner = self->inner;
741 GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
742 GstBuffer *view_buffer;
743
744 GST_LOG_OBJECT (self, "Outputting picture %p", picture);
745
746 view_buffer = (GstBuffer *) gst_mpeg2_picture_get_user_data (picture);
747
748 if (!view_buffer) {
749 GST_ERROR_OBJECT (self, "Could not get output view");
750 goto error;
751 }
752
753 if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
754 inner->width, inner->height, view_buffer, &frame->output_buffer)) {
755 GST_ERROR_OBJECT (self, "Failed to copy buffer");
756 goto error;
757 }
758
759 if (picture->buffer_flags != 0) {
760 gboolean interlaced =
761 (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_INTERLACED) != 0;
762 gboolean tff = (picture->buffer_flags & GST_VIDEO_BUFFER_FLAG_TFF) != 0;
763
764 GST_TRACE_OBJECT (self,
765 "apply buffer flags 0x%x (interlaced %d, top-field-first %d)",
766 picture->buffer_flags, interlaced, tff);
767 GST_BUFFER_FLAG_SET (frame->output_buffer, picture->buffer_flags);
768 }
769
770 gst_mpeg2_picture_unref (picture);
771
772 return gst_video_decoder_finish_frame (vdec, frame);
773
774 error:
775 gst_mpeg2_picture_unref (picture);
776 gst_video_decoder_release_frame (vdec, frame);
777
778 return GST_FLOW_ERROR;
779 }
780
781 void
gst_d3d11_mpeg2_dec_register(GstPlugin * plugin,GstD3D11Device * device,guint rank)782 gst_d3d11_mpeg2_dec_register (GstPlugin * plugin, GstD3D11Device * device,
783 guint rank)
784 {
785 GType type;
786 gchar *type_name;
787 gchar *feature_name;
788 guint index = 0;
789 GTypeInfo type_info = {
790 sizeof (GstD3D11Mpeg2DecClass),
791 NULL,
792 NULL,
793 (GClassInitFunc) gst_d3d11_mpeg2_dec_class_init,
794 NULL,
795 NULL,
796 sizeof (GstD3D11Mpeg2Dec),
797 0,
798 (GInstanceInitFunc) gst_d3d11_mpeg2_dec_init,
799 };
800 const GUID *supported_profile = NULL;
801 GstCaps *sink_caps = NULL;
802 GstCaps *src_caps = NULL;
803
804 if (!gst_d3d11_decoder_get_supported_decoder_profile (device,
805 GST_DXVA_CODEC_MPEG2, GST_VIDEO_FORMAT_NV12, &supported_profile)) {
806 GST_INFO_OBJECT (device, "device does not support MPEG-2 video decoding");
807 return;
808 }
809
810 sink_caps = gst_caps_from_string ("video/mpeg, "
811 "mpegversion = (int)2, systemstream = (boolean) false, "
812 "profile = (string) { main, simple }");
813 src_caps = gst_caps_from_string ("video/x-raw("
814 GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY "); video/x-raw");
815
816 /* NOTE: We are supporting only 4:2:0, main or simple profiles */
817 gst_caps_set_simple (src_caps, "format", G_TYPE_STRING, "NV12", NULL);
818
819 gst_caps_set_simple (sink_caps,
820 "width", GST_TYPE_INT_RANGE, 1, 1920,
821 "height", GST_TYPE_INT_RANGE, 1, 1920, NULL);
822 gst_caps_set_simple (src_caps,
823 "width", GST_TYPE_INT_RANGE, 1, 1920,
824 "height", GST_TYPE_INT_RANGE, 1, 1920, NULL);
825
826 type_info.class_data =
827 gst_d3d11_decoder_class_data_new (device, GST_DXVA_CODEC_MPEG2,
828 sink_caps, src_caps);
829
830 type_name = g_strdup ("GstD3D11Mpeg2Dec");
831 feature_name = g_strdup ("d3d11mpeg2dec");
832
833 while (g_type_from_name (type_name)) {
834 index++;
835 g_free (type_name);
836 g_free (feature_name);
837 type_name = g_strdup_printf ("GstD3D11Mpeg2Device%dDec", index);
838 feature_name = g_strdup_printf ("d3d11mpeg2device%ddec", index);
839 }
840
841 type = g_type_register_static (GST_TYPE_MPEG2_DECODER,
842 type_name, &type_info, (GTypeFlags) 0);
843
844 /* make lower rank than default device */
845 if (rank > 0 && index != 0)
846 rank--;
847
848 if (index != 0)
849 gst_element_type_set_skip_documentation (type);
850
851 if (!gst_element_register (plugin, feature_name, rank, type))
852 GST_WARNING ("Failed to register plugin '%s'", type_name);
853
854 g_free (type_name);
855 g_free (feature_name);
856 }
857