1 /* GStreamer
2 * Copyright (C) 2021 Seungha Yang <seungha@centricular.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 /**
21 * SECTION:element-d3d11deinterlaceelement
22 * @title: d3d11deinterlaceelement
23 *
24 * Deinterlacing interlaced video frames to progressive video frames by using
25 * ID3D11VideoProcessor API. Depending on the hardware it runs on,
26 * this element will only support a very limited set of video formats.
27 * Use #d3d11deinterlace instead, which will take care of conversion.
28 *
29 * Since: 1.20
30 *
31 */
32
33 #ifdef HAVE_CONFIG_H
34 #include <config.h>
35 #endif
36
37 #include <gst/video/video.h>
38 #include <gst/base/gstbasetransform.h>
39
40 #include "gstd3d11deinterlace.h"
41 #include "gstd3d11pluginutils.h"
42 #include <wrl.h>
43 #include <string.h>
44
45 GST_DEBUG_CATEGORY_EXTERN (gst_d3d11_deinterlace_debug);
46 #define GST_CAT_DEFAULT gst_d3d11_deinterlace_debug
47
48 /* *INDENT-OFF* */
49 using namespace Microsoft::WRL;
50 /* *INDENT-ON* */
51
52 /* Deinterlacing Methods:
53 * Direct3D11 provides Blend, Bob, Adaptive, Motion Compensation, and
54 * Inverse Telecine methods. But depending on video processor device,
55 * some of method might not be supported.
56 * - Blend: the two fields of a interlaced frame are blended into a single
57 * progressive frame. Output rate will be half of input (e.g., 60i -> 30p)
58 * but due to the way of framerate signalling of GStreamer, that is, it uses
59 * frame rate, not field rate for interlaced stream, in/output framerate
60 * of caps will be identical.
61 * - Bob: missing field lines are interpolated from the lines above and below.
62 * Output rate will be the same as that of input (e.g., 60i -> 60p).
63 * In order words, video processor will generate two frames from two field
64 * of a intelaced frame.
65 * - Adaptive, Motion Compensation: future and past frames are used for
66 * reference frame for deinterlacing process. User should provide sufficent
67 * number of reference frames, otherwise processor device will fallback to
68 * Bob method.
69 *
70 * Direct3D11 doesn't provide a method for explicit deinterlacing method
71 * selection. Instead, it could be done indirectly.
72 * - Blend: sets output rate as half via VideoProcessorSetStreamOutputRate().
73 * - Bob: sets output rate as normal. And performs VideoProcessorBlt() twice per
74 * a interlaced frame. D3D11_VIDEO_PROCESSOR_STREAM::OutputIndex needs to be
75 * incremented per field (e.g., OutputIndex = 0 for the first field,
76 * and 1 for the second field).
77 * - Adaptive, Motion Compensation: in addition to the requirement of Bob,
78 * user should provide reference frames via
79 * D3D11_VIDEO_PROCESSOR_STREAM::ppPastSurfaces and
80 * D3D11_VIDEO_PROCESSOR_STREAM::ppFutureSurfaces
81 */
82
83 /* g_queue_clear_full is available since 2.60 */
84 #if !GLIB_CHECK_VERSION(2,60,0)
85 #define g_queue_clear_full gst_d3d11_deinterlace_g_queue_clear_full
86 static void
gst_d3d11_deinterlace_g_queue_clear_full(GQueue * queue,GDestroyNotify free_func)87 gst_d3d11_deinterlace_g_queue_clear_full (GQueue * queue,
88 GDestroyNotify free_func)
89 {
90 g_return_if_fail (queue != NULL);
91
92 if (free_func != NULL)
93 g_queue_foreach (queue, (GFunc) free_func, NULL);
94
95 g_queue_clear (queue);
96 }
97 #endif
98
99 typedef enum
100 {
101 GST_D3D11_DEINTERLACE_METHOD_BLEND =
102 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BLEND,
103 GST_D3D11_DEINTERLACE_METHOD_BOB =
104 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_BOB,
105 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE =
106 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_ADAPTIVE,
107 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION =
108 D3D11_VIDEO_PROCESSOR_PROCESSOR_CAPS_DEINTERLACE_MOTION_COMPENSATION,
109
110 /* TODO: INVERSE_TELECINE */
111 } GstD3D11DeinterlaceMethod;
112
113 /**
114 * GstD3D11DeinterlaceMethod:
115 *
116 * Deinterlacing method
117 *
118 * Since: 1.20
119 */
120 #define GST_TYPE_D3D11_DEINTERLACE_METHOD (gst_d3d11_deinterlace_method_type())
121
122 static GType
gst_d3d11_deinterlace_method_type(void)123 gst_d3d11_deinterlace_method_type (void)
124 {
125 static gsize method_type = 0;
126
127 if (g_once_init_enter (&method_type)) {
128 static const GFlagsValue method_types[] = {
129 {GST_D3D11_DEINTERLACE_METHOD_BLEND,
130 "Blend: Blending top/bottom field pictures into one frame. "
131 "Framerate will be preserved (e.g., 60i -> 30p)", "blend"},
132 {GST_D3D11_DEINTERLACE_METHOD_BOB,
133 "Bob: Interpolating missing lines by using the adjacent lines. "
134 "Framerate will be doubled (e,g, 60i -> 60p)", "bob"},
135 {GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE,
136 "Adaptive: Interpolating missing lines by using spatial/temporal references. "
137 "Framerate will be doubled (e,g, 60i -> 60p)",
138 "adaptive"},
139 {GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION,
140 "Motion Compensation: Recreating missing lines by using motion vector. "
141 "Framerate will be doubled (e,g, 60i -> 60p)", "mocomp"},
142 {0, NULL, NULL},
143 };
144 GType tmp = g_flags_register_static ("GstD3D11DeinterlaceMethod",
145 method_types);
146 g_once_init_leave (&method_type, tmp);
147 }
148
149 return (GType) method_type;
150 }
151
152 typedef struct
153 {
154 GstD3D11DeinterlaceMethod supported_methods;
155 GstD3D11DeinterlaceMethod default_method;
156
157 guint max_past_frames;
158 guint max_future_frames;
159 } GstD3D11DeinterlaceDeviceCaps;
160
161 typedef struct
162 {
163 GType deinterlace_type;
164
165 GstCaps *sink_caps;
166 GstCaps *src_caps;
167 guint adapter;
168 guint device_id;
169 guint vendor_id;
170 gchar *description;
171
172 GstD3D11DeinterlaceDeviceCaps device_caps;
173
174 guint ref_count;
175 } GstD3D11DeinterlaceClassData;
176
177 static GstD3D11DeinterlaceClassData *
gst_d3d11_deinterlace_class_data_new(void)178 gst_d3d11_deinterlace_class_data_new (void)
179 {
180 GstD3D11DeinterlaceClassData *self = g_new0 (GstD3D11DeinterlaceClassData, 1);
181
182 self->ref_count = 1;
183
184 return self;
185 }
186
187 static GstD3D11DeinterlaceClassData *
gst_d3d11_deinterlace_class_data_ref(GstD3D11DeinterlaceClassData * data)188 gst_d3d11_deinterlace_class_data_ref (GstD3D11DeinterlaceClassData * data)
189 {
190 g_assert (data != NULL);
191
192 g_atomic_int_add (&data->ref_count, 1);
193
194 return data;
195 }
196
197 static void
gst_d3d11_deinterlace_class_data_unref(GstD3D11DeinterlaceClassData * data)198 gst_d3d11_deinterlace_class_data_unref (GstD3D11DeinterlaceClassData * data)
199 {
200 g_assert (data != NULL);
201
202 if (g_atomic_int_dec_and_test (&data->ref_count)) {
203 gst_clear_caps (&data->sink_caps);
204 gst_clear_caps (&data->src_caps);
205 g_free (data->description);
206 g_free (data);
207 }
208 }
209
210 enum
211 {
212 PROP_0,
213 PROP_ADAPTER,
214 PROP_DEVICE_ID,
215 PROP_VENDOR_ID,
216 PROP_METHOD,
217 PROP_SUPPORTED_METHODS,
218 };
219
220 /* hardcoded maximum queue size for each past/future frame queue */
221 #define MAX_NUM_REFERENCES 2
222
223 typedef struct _GstD3D11Deinterlace
224 {
225 GstBaseTransform parent;
226
227 GstVideoInfo in_info;
228 GstVideoInfo out_info;
229 /* Calculated buffer duration by using upstream framerate */
230 GstClockTime default_buffer_duration;
231
232 GstD3D11Device *device;
233
234 ID3D11VideoDevice *video_device;
235 ID3D11VideoContext *video_context;
236 ID3D11VideoProcessorEnumerator *video_enum;
237 ID3D11VideoProcessor *video_proc;
238
239 GstD3D11DeinterlaceMethod method;
240
241 GRecMutex lock;
242 GQueue past_frame_queue;
243 GQueue future_frame_queue;
244 GstBuffer *to_process;
245
246 guint max_past_frames;
247 guint max_future_frames;
248
249 /* D3D11_VIDEO_PROCESSOR_STREAM::InputFrameOrField */
250 guint input_index;
251
252 /* Clear/Update per submit_input_buffer() */
253 guint num_output_per_input;
254 guint num_transformed;
255 gboolean first_output;
256
257 GstBufferPool *fallback_in_pool;
258 GstBufferPool *fallback_out_pool;
259 } GstD3D11Deinterlace;
260
261 typedef struct _GstD3D11DeinterlaceClass
262 {
263 GstBaseTransformClass parent_class;
264
265 guint adapter;
266 guint device_id;
267 guint vendor_id;
268
269 GstD3D11DeinterlaceDeviceCaps device_caps;
270 } GstD3D11DeinterlaceClass;
271
272 static GstElementClass *parent_class = NULL;
273
274 #define GST_D3D11_DEINTERLACE(object) ((GstD3D11Deinterlace *) (object))
275 #define GST_D3D11_DEINTERLACE_GET_CLASS(object) \
276 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object), \
277 GstD3D11DeinterlaceClass))
278 #define GST_D3D11_DEINTERLACE_LOCK(self) \
279 g_rec_mutex_lock (&GST_D3D11_DEINTERLACE (self)->lock);
280 #define GST_D3D11_DEINTERLACE_UNLOCK(self) \
281 g_rec_mutex_unlock (&GST_D3D11_DEINTERLACE (self)->lock);
282
283 static gboolean
284 gst_d3d11_deinterlace_update_method (GstD3D11Deinterlace * self);
285 static void gst_d3d11_deinterlace_reset (GstD3D11Deinterlace * self);
286 static GstFlowReturn gst_d3d11_deinterlace_drain (GstD3D11Deinterlace * self);
287
288 /* GObjectClass vfunc */
289 static void gst_d3d11_deinterlace_get_property (GObject * object,
290 guint prop_id, GValue * value, GParamSpec * pspec);
291 static void gst_d3d11_deinterlace_set_property (GObject * object, guint prop_id,
292 const GValue * value, GParamSpec * pspec);
293 static void gst_d3d11_deinterlace_finalize (GObject * object);
294
295 /* GstElementClass vfunc */
296 static void gst_d3d11_deinterlace_set_context (GstElement * element,
297 GstContext * context);
298
299 /* GstBaseTransformClass vfunc */
300 static gboolean gst_d3d11_deinterlace_start (GstBaseTransform * trans);
301 static gboolean gst_d3d11_deinterlace_stop (GstBaseTransform * trans);
302 static gboolean gst_d3d11_deinterlace_query (GstBaseTransform * trans,
303 GstPadDirection direction, GstQuery * query);
304 static GstCaps *gst_d3d11_deinterlace_transform_caps (GstBaseTransform * trans,
305 GstPadDirection direction, GstCaps * caps, GstCaps * filter);
306 static GstCaps *gst_d3d11_deinterlace_fixate_caps (GstBaseTransform * trans,
307 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
308 static gboolean
309 gst_d3d11_deinterlace_propose_allocation (GstBaseTransform * trans,
310 GstQuery * decide_query, GstQuery * query);
311 static gboolean
312 gst_d3d11_deinterlace_decide_allocation (GstBaseTransform * trans,
313 GstQuery * query);
314 static gboolean gst_d3d11_deinterlace_set_caps (GstBaseTransform * trans,
315 GstCaps * incaps, GstCaps * outcaps);
316 static GstFlowReturn
317 gst_d3d11_deinterlace_submit_input_buffer (GstBaseTransform * trans,
318 gboolean is_discont, GstBuffer * input);
319 static GstFlowReturn
320 gst_d3d11_deinterlace_generate_output (GstBaseTransform * trans,
321 GstBuffer ** outbuf);
322 static GstFlowReturn
323 gst_d3d11_deinterlace_transform (GstBaseTransform * trans, GstBuffer * inbuf,
324 GstBuffer * outbuf);
325 static gboolean gst_d3d11_deinterlace_sink_event (GstBaseTransform * trans,
326 GstEvent * event);
327 static void gst_d3d11_deinterlace_before_transform (GstBaseTransform * trans,
328 GstBuffer * buffer);
329
330 static void
gst_d3d11_deinterlace_class_init(GstD3D11DeinterlaceClass * klass,gpointer data)331 gst_d3d11_deinterlace_class_init (GstD3D11DeinterlaceClass * klass,
332 gpointer data)
333 {
334 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
335 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
336 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
337 GstD3D11DeinterlaceClassData *cdata = (GstD3D11DeinterlaceClassData *) data;
338 gchar *long_name;
339
340 parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
341
342 gobject_class->get_property = gst_d3d11_deinterlace_get_property;
343 gobject_class->set_property = gst_d3d11_deinterlace_set_property;
344 gobject_class->finalize = gst_d3d11_deinterlace_finalize;
345
346 g_object_class_install_property (gobject_class, PROP_ADAPTER,
347 g_param_spec_uint ("adapter", "Adapter",
348 "DXGI Adapter index for creating device",
349 0, G_MAXUINT32, cdata->adapter,
350 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
351 g_object_class_install_property (gobject_class, PROP_DEVICE_ID,
352 g_param_spec_uint ("device-id", "Device Id",
353 "DXGI Device ID", 0, G_MAXUINT32, 0,
354 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
355 g_object_class_install_property (gobject_class, PROP_VENDOR_ID,
356 g_param_spec_uint ("vendor-id", "Vendor Id",
357 "DXGI Vendor ID", 0, G_MAXUINT32, 0,
358 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
359 g_object_class_install_property (gobject_class, PROP_METHOD,
360 g_param_spec_flags ("method", "Method",
361 "Deinterlace Method. Use can set multiple methods as a flagset "
362 "and element will select one of method automatically. "
363 "If deinterlacing device failed to deinterlace with given mode, "
364 "fallback might happen by the device",
365 GST_TYPE_D3D11_DEINTERLACE_METHOD, cdata->device_caps.default_method,
366 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
367 GST_PARAM_MUTABLE_READY)));
368 g_object_class_install_property (gobject_class, PROP_SUPPORTED_METHODS,
369 g_param_spec_flags ("supported-methods", "Supported Methods",
370 "Set of supported deinterlace methods by device",
371 GST_TYPE_D3D11_DEINTERLACE_METHOD,
372 cdata->device_caps.supported_methods,
373 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
374
375 element_class->set_context =
376 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_set_context);
377
378 long_name = g_strdup_printf ("Direct3D11 %s Deinterlacer",
379 cdata->description);
380 gst_element_class_set_metadata (element_class, long_name,
381 "Filter/Effect/Video/Deinterlace/Hardware",
382 "A Direct3D11 based deinterlacer",
383 "Seungha Yang <seungha@centricular.com>");
384 g_free (long_name);
385
386 gst_element_class_add_pad_template (element_class,
387 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
388 cdata->sink_caps));
389 gst_element_class_add_pad_template (element_class,
390 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
391 cdata->src_caps));
392
393 trans_class->passthrough_on_same_caps = TRUE;
394
395 trans_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_start);
396 trans_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_stop);
397 trans_class->query = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_query);
398 trans_class->transform_caps =
399 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_transform_caps);
400 trans_class->fixate_caps =
401 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_fixate_caps);
402 trans_class->propose_allocation =
403 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_propose_allocation);
404 trans_class->decide_allocation =
405 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_decide_allocation);
406 trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_set_caps);
407 trans_class->submit_input_buffer =
408 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_submit_input_buffer);
409 trans_class->generate_output =
410 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_generate_output);
411 trans_class->transform = GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_transform);
412 trans_class->sink_event =
413 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_sink_event);
414 trans_class->before_transform =
415 GST_DEBUG_FUNCPTR (gst_d3d11_deinterlace_before_transform);
416
417 klass->adapter = cdata->adapter;
418 klass->device_id = cdata->device_id;
419 klass->vendor_id = cdata->vendor_id;
420 klass->device_caps = cdata->device_caps;
421
422 gst_d3d11_deinterlace_class_data_unref (cdata);
423
424 gst_type_mark_as_plugin_api (GST_TYPE_D3D11_DEINTERLACE_METHOD,
425 (GstPluginAPIFlags) 0);
426 }
427
428 static void
gst_d3d11_deinterlace_init(GstD3D11Deinterlace * self)429 gst_d3d11_deinterlace_init (GstD3D11Deinterlace * self)
430 {
431 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
432
433 self->method = klass->device_caps.default_method;
434 self->default_buffer_duration = GST_CLOCK_TIME_NONE;
435 gst_d3d11_deinterlace_update_method (self);
436
437 g_queue_init (&self->past_frame_queue);
438 g_queue_init (&self->future_frame_queue);
439 g_rec_mutex_init (&self->lock);
440 }
441
442 static void
gst_d3d11_deinterlace_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)443 gst_d3d11_deinterlace_get_property (GObject * object, guint prop_id,
444 GValue * value, GParamSpec * pspec)
445 {
446 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
447 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (object);
448
449 switch (prop_id) {
450 case PROP_ADAPTER:
451 g_value_set_uint (value, klass->adapter);
452 break;
453 case PROP_DEVICE_ID:
454 g_value_set_uint (value, klass->device_id);
455 break;
456 case PROP_VENDOR_ID:
457 g_value_set_uint (value, klass->vendor_id);
458 break;
459 case PROP_METHOD:
460 g_value_set_flags (value, self->method);
461 break;
462 case PROP_SUPPORTED_METHODS:
463 g_value_set_flags (value, klass->device_caps.supported_methods);
464 break;
465 default:
466 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
467 break;
468 }
469 }
470
471 static gboolean
gst_d3d11_deinterlace_update_method(GstD3D11Deinterlace * self)472 gst_d3d11_deinterlace_update_method (GstD3D11Deinterlace * self)
473 {
474 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
475 GstD3D11DeinterlaceMethod requested_method = self->method;
476 gboolean updated = TRUE;
477
478 /* Verify whether requested method is supported */
479 if ((self->method & klass->device_caps.supported_methods) == 0) {
480 #ifndef GST_DISABLE_GST_DEBUG
481 gchar *supported, *requested;
482
483 supported = g_flags_to_string (GST_TYPE_D3D11_DEINTERLACE_METHOD,
484 klass->device_caps.supported_methods);
485 requested = g_flags_to_string (GST_TYPE_D3D11_DEINTERLACE_METHOD,
486 klass->device_caps.supported_methods);
487
488 GST_WARNING_OBJECT (self,
489 "Requested method %s is not supported (supported: %s)",
490 requested, supported);
491
492 g_free (supported);
493 g_free (requested);
494 #endif
495
496 self->method = klass->device_caps.default_method;
497
498 goto done;
499 }
500
501 /* Drop not supported methods */
502 self->method = (GstD3D11DeinterlaceMethod)
503 (klass->device_caps.supported_methods & self->method);
504
505 /* Single method was requested? */
506 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND ||
507 self->method == GST_D3D11_DEINTERLACE_METHOD_BOB ||
508 self->method == GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE ||
509 self->method == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
510 if (self->method == requested_method)
511 updated = FALSE;
512 } else {
513 /* Pick single method from requested */
514 if ((self->method & GST_D3D11_DEINTERLACE_METHOD_BOB) ==
515 GST_D3D11_DEINTERLACE_METHOD_BOB) {
516 self->method = GST_D3D11_DEINTERLACE_METHOD_BOB;
517 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE) ==
518 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE) {
519 self->method = GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE;
520 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION)
521 == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
522 self->method = GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION;
523 } else if ((self->method & GST_D3D11_DEINTERLACE_METHOD_BLEND) ==
524 GST_D3D11_DEINTERLACE_METHOD_BLEND) {
525 self->method = GST_D3D11_DEINTERLACE_METHOD_BLEND;
526 } else {
527 self->method = klass->device_caps.default_method;
528 g_assert_not_reached ();
529 }
530 }
531
532 done:
533 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND) {
534 /* Both methods don't use reference frame for deinterlacing */
535 self->max_past_frames = self->max_future_frames = 0;
536 } else if (self->method == GST_D3D11_DEINTERLACE_METHOD_BOB) {
537 /* To calculate timestamp and duration of output fraems, we will hold one
538 * future frame even though processor device will not use reference */
539 self->max_past_frames = 0;
540 self->max_future_frames = 1;
541 } else {
542 /* FIXME: how many frames should be allowed? also, this needs to be
543 * configurable */
544 self->max_past_frames = MIN (klass->device_caps.max_past_frames,
545 MAX_NUM_REFERENCES);
546
547 /* Likewise Bob, we need at least one future frame for timestamp/duration
548 * calculation */
549 self->max_future_frames =
550 MAX (MIN (klass->device_caps.max_future_frames, MAX_NUM_REFERENCES), 1);
551 }
552
553 return updated;
554 }
555
556 static void
gst_d3d11_deinterlace_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)557 gst_d3d11_deinterlace_set_property (GObject * object, guint prop_id,
558 const GValue * value, GParamSpec * pspec)
559 {
560 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
561
562 switch (prop_id) {
563 case PROP_METHOD:{
564 gboolean notify_update = FALSE;
565
566 GST_OBJECT_LOCK (self);
567 self->method = (GstD3D11DeinterlaceMethod) g_value_get_flags (value);
568 notify_update = gst_d3d11_deinterlace_update_method (self);
569 GST_OBJECT_UNLOCK (self);
570
571 if (notify_update)
572 g_object_notify (object, "method");
573 break;
574 }
575 default:
576 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
577 break;
578 }
579 }
580
581 static void
gst_d3d11_deinterlace_finalize(GObject * object)582 gst_d3d11_deinterlace_finalize (GObject * object)
583 {
584 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (object);
585
586 g_rec_mutex_clear (&self->lock);
587
588 G_OBJECT_CLASS (parent_class)->finalize (object);
589 }
590
591 static void
gst_d3d11_deinterlace_set_context(GstElement * element,GstContext * context)592 gst_d3d11_deinterlace_set_context (GstElement * element, GstContext * context)
593 {
594 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (element);
595 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
596
597 gst_d3d11_handle_set_context (element, context, klass->adapter,
598 &self->device);
599
600 GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
601 }
602
603 static gboolean
gst_d3d11_deinterlace_open(GstD3D11Deinterlace * self)604 gst_d3d11_deinterlace_open (GstD3D11Deinterlace * self)
605 {
606 ID3D11VideoDevice *video_device;
607 ID3D11VideoContext *video_context;
608
609 video_device = gst_d3d11_device_get_video_device_handle (self->device);
610 if (!video_device) {
611 GST_ERROR_OBJECT (self, "ID3D11VideoDevice is not availale");
612 return FALSE;
613 }
614
615 video_context = gst_d3d11_device_get_video_context_handle (self->device);
616 if (!video_context) {
617 GST_ERROR_OBJECT (self, "ID3D11VideoContext is not available");
618 return FALSE;
619 }
620
621 self->video_device = video_device;
622 video_device->AddRef ();
623
624 self->video_context = video_context;
625 video_context->AddRef ();
626
627 return TRUE;
628 }
629
630 /* Must be called with lock taken */
631 static void
gst_d3d11_deinterlace_reset_history(GstD3D11Deinterlace * self)632 gst_d3d11_deinterlace_reset_history (GstD3D11Deinterlace * self)
633 {
634 self->input_index = 0;
635 self->num_output_per_input = 1;
636 self->num_transformed = 0;
637 self->first_output = TRUE;
638
639 g_queue_clear_full (&self->past_frame_queue,
640 (GDestroyNotify) gst_buffer_unref);
641 g_queue_clear_full (&self->future_frame_queue,
642 (GDestroyNotify) gst_buffer_unref);
643 gst_clear_buffer (&self->to_process);
644 }
645
646 static void
gst_d3d11_deinterlace_reset(GstD3D11Deinterlace * self)647 gst_d3d11_deinterlace_reset (GstD3D11Deinterlace * self)
648 {
649 GST_D3D11_DEINTERLACE_LOCK (self);
650 if (self->fallback_in_pool) {
651 gst_buffer_pool_set_active (self->fallback_in_pool, FALSE);
652 gst_object_unref (self->fallback_in_pool);
653 self->fallback_in_pool = NULL;
654 }
655
656 if (self->fallback_out_pool) {
657 gst_buffer_pool_set_active (self->fallback_out_pool, FALSE);
658 gst_object_unref (self->fallback_out_pool);
659 self->fallback_out_pool = NULL;
660 }
661
662 GST_D3D11_CLEAR_COM (self->video_enum);
663 GST_D3D11_CLEAR_COM (self->video_proc);
664
665 gst_d3d11_deinterlace_reset_history (self);
666 self->default_buffer_duration = GST_CLOCK_TIME_NONE;
667
668 GST_D3D11_DEINTERLACE_UNLOCK (self);
669 }
670
671 static void
gst_d3d11_deinterlace_close(GstD3D11Deinterlace * self)672 gst_d3d11_deinterlace_close (GstD3D11Deinterlace * self)
673 {
674 gst_d3d11_deinterlace_reset (self);
675
676 GST_D3D11_CLEAR_COM (self->video_device);
677 GST_D3D11_CLEAR_COM (self->video_context);
678
679 gst_clear_object (&self->device);
680 }
681
682 static gboolean
gst_d3d11_deinterlace_start(GstBaseTransform * trans)683 gst_d3d11_deinterlace_start (GstBaseTransform * trans)
684 {
685 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
686 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
687
688 if (!gst_d3d11_ensure_element_data (GST_ELEMENT_CAST (self), klass->adapter,
689 &self->device)) {
690 GST_ERROR_OBJECT (self, "Couldn't create d3d11device");
691 return FALSE;
692 }
693
694 if (!gst_d3d11_deinterlace_open (self)) {
695 GST_ERROR_OBJECT (self, "Couldn't open video device");
696 gst_d3d11_deinterlace_close (self);
697 return FALSE;
698 }
699
700 return TRUE;
701 }
702
703 static gboolean
gst_d3d11_deinterlace_stop(GstBaseTransform * trans)704 gst_d3d11_deinterlace_stop (GstBaseTransform * trans)
705 {
706 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
707
708 gst_d3d11_deinterlace_close (self);
709
710 return TRUE;
711 }
712
713 static gboolean
gst_d3d11_deinterlace_query(GstBaseTransform * trans,GstPadDirection direction,GstQuery * query)714 gst_d3d11_deinterlace_query (GstBaseTransform * trans,
715 GstPadDirection direction, GstQuery * query)
716 {
717 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
718
719 switch (GST_QUERY_TYPE (query)) {
720 case GST_QUERY_CONTEXT:
721 if (gst_d3d11_handle_context_query (GST_ELEMENT_CAST (self),
722 query, self->device)) {
723 return TRUE;
724 }
725 break;
726 default:
727 break;
728 }
729
730 return GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
731 query);
732 }
733
734 static GstCaps *
gst_d3d11_deinterlace_remove_interlace_info(GstCaps * caps,gboolean remove_framerate)735 gst_d3d11_deinterlace_remove_interlace_info (GstCaps * caps,
736 gboolean remove_framerate)
737 {
738 GstStructure *st;
739 GstCapsFeatures *f;
740 gint i, n;
741 GstCaps *res;
742 GstCapsFeatures *feature =
743 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY);
744
745 res = gst_caps_new_empty ();
746
747 n = gst_caps_get_size (caps);
748 for (i = 0; i < n; i++) {
749 st = gst_caps_get_structure (caps, i);
750 f = gst_caps_get_features (caps, i);
751
752 /* If this is already expressed by the existing caps
753 * skip this structure */
754 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
755 continue;
756
757 st = gst_structure_copy (st);
758 /* Only remove format info for the cases when we can actually convert */
759 if (!gst_caps_features_is_any (f)
760 && gst_caps_features_is_equal (f, feature)) {
761 if (remove_framerate) {
762 gst_structure_remove_fields (st, "interlace-mode", "field-order",
763 "framerate", NULL);
764 } else {
765 gst_structure_remove_fields (st, "interlace-mode", "field-order", NULL);
766 }
767 }
768
769 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
770 }
771
772 gst_caps_features_free (feature);
773
774 return res;
775 }
776
777 static GstCaps *
gst_d3d11_deinterlace_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)778 gst_d3d11_deinterlace_transform_caps (GstBaseTransform * trans,
779 GstPadDirection direction, GstCaps * caps, GstCaps * filter)
780 {
781 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
782 GstCaps *tmp, *tmp2;
783 GstCaps *result;
784
785 /* Get all possible caps that we can transform to */
786 tmp = gst_d3d11_deinterlace_remove_interlace_info (caps,
787 /* Non-blend mode will double framerate */
788 self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND);
789
790 if (filter) {
791 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
792 gst_caps_unref (tmp);
793 tmp = tmp2;
794 }
795
796 result = tmp;
797
798 GST_DEBUG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " into %"
799 GST_PTR_FORMAT, caps, result);
800
801 return result;
802 }
803
804 static GstCaps *
gst_d3d11_deinterlace_fixate_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)805 gst_d3d11_deinterlace_fixate_caps (GstBaseTransform * trans,
806 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
807 {
808 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
809 GstStructure *s;
810 GstCaps *tmp;
811 gint fps_n, fps_d;
812 GstVideoInfo info;
813 const gchar *interlace_mode;
814
815 othercaps = gst_caps_truncate (othercaps);
816 othercaps = gst_caps_make_writable (othercaps);
817
818 if (direction == GST_PAD_SRC)
819 return gst_caps_fixate (othercaps);
820
821 tmp = gst_caps_copy (caps);
822 tmp = gst_caps_fixate (tmp);
823
824 if (!gst_video_info_from_caps (&info, tmp)) {
825 GST_WARNING_OBJECT (self, "Invalid caps %" GST_PTR_FORMAT, caps);
826 gst_caps_unref (tmp);
827
828 return gst_caps_fixate (othercaps);
829 }
830
831 s = gst_caps_get_structure (tmp, 0);
832 if (gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) {
833 /* for non-blend method, output framerate will be doubled */
834 if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND &&
835 GST_VIDEO_INFO_IS_INTERLACED (&info)) {
836 fps_n *= 2;
837 }
838
839 gst_caps_set_simple (othercaps,
840 "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
841 }
842
843 interlace_mode = gst_structure_get_string (s, "interlace-mode");
844 if (g_strcmp0 ("progressive", interlace_mode) == 0) {
845 /* Just forward interlace-mode=progressive.
846 * By this way, basetransform will enable passthrough for non-interlaced
847 * stream*/
848 gst_caps_set_simple (othercaps,
849 "interlace-mode", G_TYPE_STRING, "progressive", NULL);
850 }
851
852 gst_caps_unref (tmp);
853
854 return gst_caps_fixate (othercaps);
855 }
856
857 static gboolean
gst_d3d11_deinterlace_propose_allocation(GstBaseTransform * trans,GstQuery * decide_query,GstQuery * query)858 gst_d3d11_deinterlace_propose_allocation (GstBaseTransform * trans,
859 GstQuery * decide_query, GstQuery * query)
860 {
861 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
862 GstVideoInfo info;
863 GstBufferPool *pool = NULL;
864 GstCaps *caps;
865 guint n_pools, i;
866 GstStructure *config;
867 guint size;
868 GstD3D11AllocationParams *d3d11_params;
869 guint min_buffers = 0;
870
871 if (!GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
872 decide_query, query))
873 return FALSE;
874
875 /* passthrough, we're done */
876 if (decide_query == NULL)
877 return TRUE;
878
879 gst_query_parse_allocation (query, &caps, NULL);
880
881 if (caps == NULL)
882 return FALSE;
883
884 if (!gst_video_info_from_caps (&info, caps))
885 return FALSE;
886
887 n_pools = gst_query_get_n_allocation_pools (query);
888 for (i = 0; i < n_pools; i++) {
889 gst_query_parse_nth_allocation_pool (query, i, &pool, NULL, NULL, NULL);
890 if (pool) {
891 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
892 gst_clear_object (&pool);
893 } else {
894 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
895 if (dpool->device != self->device)
896 gst_clear_object (&pool);
897 }
898 }
899 }
900
901 if (!pool)
902 pool = gst_d3d11_buffer_pool_new (self->device);
903
904 config = gst_buffer_pool_get_config (pool);
905 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
906
907 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
908 if (!d3d11_params) {
909 d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
910 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
911 } else {
912 d3d11_params->desc[0].BindFlags |= D3D11_BIND_RENDER_TARGET;
913 }
914
915 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
916 gst_d3d11_allocation_params_free (d3d11_params);
917
918 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BOB) {
919 /* For non-blend methods, we will produce two progressive frames from
920 * a single interlaced frame. To determine timestamp and duration,
921 * we might need to hold one past frame if buffer duration is unknown */
922 min_buffers = 2;
923 } else if (self->method == GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE ||
924 self->method == GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION) {
925 /* For advanced deinterlacing methods, we will hold more frame so that
926 * device can use them as reference frames */
927
928 min_buffers += self->max_past_frames;
929 min_buffers += self->max_future_frames;
930 /* And one for current frame */
931 min_buffers++;
932
933 /* we will hold at least one frame for timestamp/duration calculation */
934 min_buffers = MAX (min_buffers, 2);
935 }
936
937 /* size will be updated by d3d11 buffer pool */
938 gst_buffer_pool_config_set_params (config, caps, 0, min_buffers, 0);
939
940 if (!gst_buffer_pool_set_config (pool, config))
941 goto config_failed;
942
943 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
944 gst_query_add_allocation_meta (query,
945 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
946
947 /* d3d11 buffer pool will update buffer size based on allocated texture,
948 * get size from config again */
949 config = gst_buffer_pool_get_config (pool);
950 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
951 gst_structure_free (config);
952
953 gst_query_add_allocation_pool (query, pool, size, min_buffers, 0);
954
955 gst_object_unref (pool);
956
957 return TRUE;
958
959 /* ERRORS */
960 config_failed:
961 {
962 GST_ERROR_OBJECT (self, "failed to set config");
963 gst_object_unref (pool);
964 return FALSE;
965 }
966 }
967
968 static gboolean
gst_d3d11_deinterlace_decide_allocation(GstBaseTransform * trans,GstQuery * query)969 gst_d3d11_deinterlace_decide_allocation (GstBaseTransform * trans,
970 GstQuery * query)
971 {
972 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
973 GstCaps *outcaps = NULL;
974 GstBufferPool *pool = NULL;
975 guint size, min = 0, max = 0;
976 GstStructure *config;
977 GstD3D11AllocationParams *d3d11_params;
978 gboolean update_pool = FALSE;
979 GstVideoInfo info;
980
981 gst_query_parse_allocation (query, &outcaps, NULL);
982
983 if (!outcaps)
984 return FALSE;
985
986 if (!gst_video_info_from_caps (&info, outcaps))
987 return FALSE;
988
989 size = GST_VIDEO_INFO_SIZE (&info);
990
991 if (gst_query_get_n_allocation_pools (query) > 0) {
992 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
993 if (pool) {
994 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
995 gst_clear_object (&pool);
996 } else {
997 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
998 if (dpool->device != self->device)
999 gst_clear_object (&pool);
1000 }
1001 }
1002
1003 update_pool = TRUE;
1004 }
1005
1006 if (!pool)
1007 pool = gst_d3d11_buffer_pool_new (self->device);
1008
1009 config = gst_buffer_pool_get_config (pool);
1010 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
1011
1012 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
1013 if (!d3d11_params) {
1014 d3d11_params = gst_d3d11_allocation_params_new (self->device, &info,
1015 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
1016 } else {
1017 d3d11_params->desc[0].BindFlags |= D3D11_BIND_RENDER_TARGET;
1018 }
1019
1020 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
1021 gst_d3d11_allocation_params_free (d3d11_params);
1022
1023 gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
1024 gst_buffer_pool_set_config (pool, config);
1025
1026 /* d3d11 buffer pool will update buffer size based on allocated texture,
1027 * get size from config again */
1028 config = gst_buffer_pool_get_config (pool);
1029 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
1030 gst_structure_free (config);
1031
1032 if (update_pool)
1033 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
1034 else
1035 gst_query_add_allocation_pool (query, pool, size, min, max);
1036
1037 gst_object_unref (pool);
1038
1039 return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
1040 query);
1041 }
1042
1043 static gboolean
gst_d3d11_deinterlace_prepare_fallback_pool(GstD3D11Deinterlace * self,GstCaps * in_caps,GstVideoInfo * in_info,GstCaps * out_caps,GstVideoInfo * out_info)1044 gst_d3d11_deinterlace_prepare_fallback_pool (GstD3D11Deinterlace * self,
1045 GstCaps * in_caps, GstVideoInfo * in_info, GstCaps * out_caps,
1046 GstVideoInfo * out_info)
1047 {
1048 GstD3D11AllocationParams *d3d11_params;
1049
1050 /* Clearing potentially remaining resource here would be redundant.
1051 * Just to be safe enough */
1052 g_queue_clear_full (&self->past_frame_queue,
1053 (GDestroyNotify) gst_buffer_unref);
1054 g_queue_clear_full (&self->future_frame_queue,
1055 (GDestroyNotify) gst_buffer_unref);
1056
1057 if (self->fallback_in_pool) {
1058 gst_buffer_pool_set_active (self->fallback_in_pool, FALSE);
1059 gst_object_unref (self->fallback_in_pool);
1060 self->fallback_in_pool = NULL;
1061 }
1062
1063 if (self->fallback_out_pool) {
1064 gst_buffer_pool_set_active (self->fallback_out_pool, FALSE);
1065 gst_object_unref (self->fallback_out_pool);
1066 self->fallback_out_pool = NULL;
1067 }
1068
1069 /* Empty bind flag is allowed for video processor input */
1070 d3d11_params = gst_d3d11_allocation_params_new (self->device, in_info,
1071 (GstD3D11AllocationFlags) 0, 0);
1072 self->fallback_in_pool = gst_d3d11_buffer_pool_new_with_options (self->device,
1073 in_caps, d3d11_params, 0, 0);
1074 gst_d3d11_allocation_params_free (d3d11_params);
1075
1076 if (!self->fallback_in_pool) {
1077 GST_ERROR_OBJECT (self, "Failed to create input fallback buffer pool");
1078 return FALSE;
1079 }
1080
1081 /* For processor output, render target bind flag is required */
1082 d3d11_params = gst_d3d11_allocation_params_new (self->device, out_info,
1083 (GstD3D11AllocationFlags) 0, D3D11_BIND_RENDER_TARGET);
1084 self->fallback_out_pool =
1085 gst_d3d11_buffer_pool_new_with_options (self->device,
1086 out_caps, d3d11_params, 0, 0);
1087 gst_d3d11_allocation_params_free (d3d11_params);
1088
1089 if (!self->fallback_out_pool) {
1090 GST_ERROR_OBJECT (self, "Failed to create output fallback buffer pool");
1091 gst_clear_object (&self->fallback_out_pool);
1092 return FALSE;
1093 }
1094
1095 return TRUE;
1096 }
1097
1098 static gboolean
gst_d3d11_deinterlace_set_caps(GstBaseTransform * trans,GstCaps * incaps,GstCaps * outcaps)1099 gst_d3d11_deinterlace_set_caps (GstBaseTransform * trans,
1100 GstCaps * incaps, GstCaps * outcaps)
1101 {
1102 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1103 GstVideoInfo in_info, out_info;
1104 /* *INDENT-OFF* */
1105 ComPtr<ID3D11VideoProcessorEnumerator> video_enum;
1106 ComPtr<ID3D11VideoProcessor> video_proc;
1107 /* *INDENT-ON* */
1108 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
1109 D3D11_VIDEO_PROCESSOR_CAPS proc_caps;
1110 D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rate_conv_caps;
1111 D3D11_VIDEO_PROCESSOR_OUTPUT_RATE output_rate =
1112 D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL;
1113 HRESULT hr;
1114 RECT rect;
1115 guint i;
1116
1117 if (gst_base_transform_is_passthrough (trans))
1118 return TRUE;
1119
1120 if (!gst_video_info_from_caps (&in_info, incaps)) {
1121 GST_ERROR_OBJECT (self, "Invalid input caps %" GST_PTR_FORMAT, incaps);
1122 return FALSE;
1123 }
1124
1125 if (!gst_video_info_from_caps (&out_info, outcaps)) {
1126 GST_ERROR_OBJECT (self, "Invalid output caps %" GST_PTR_FORMAT, outcaps);
1127 return FALSE;
1128 }
1129
1130 self->in_info = in_info;
1131 self->out_info = out_info;
1132
1133 /* Calculate expected buffer duration. We might need to reference this value
1134 * when buffer duration is unknown */
1135 if (GST_VIDEO_INFO_FPS_N (&in_info) > 0 &&
1136 GST_VIDEO_INFO_FPS_D (&in_info) > 0) {
1137 self->default_buffer_duration =
1138 gst_util_uint64_scale_int (GST_SECOND, GST_VIDEO_INFO_FPS_D (&in_info),
1139 GST_VIDEO_INFO_FPS_N (&in_info));
1140 } else {
1141 /* Assume 25 fps. We need this for reporting latency at least */
1142 self->default_buffer_duration =
1143 gst_util_uint64_scale_int (GST_SECOND, 1, 25);
1144 }
1145
1146 gst_d3d11_deinterlace_reset (self);
1147
1148 /* Nothing to do */
1149 if (!GST_VIDEO_INFO_IS_INTERLACED (&in_info)) {
1150 gst_base_transform_set_passthrough (trans, TRUE);
1151
1152 return TRUE;
1153 }
1154
1155 /* TFF or BFF is not important here, this is just for enumerating
1156 * available deinterlace devices */
1157 memset (&desc, 0, sizeof (D3D11_VIDEO_PROCESSOR_CONTENT_DESC));
1158
1159 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1160 if (GST_VIDEO_INFO_FIELD_ORDER (&in_info) ==
1161 GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST)
1162 desc.InputFrameFormat =
1163 D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1164 desc.InputWidth = GST_VIDEO_INFO_WIDTH (&in_info);
1165 desc.InputHeight = GST_VIDEO_INFO_HEIGHT (&in_info);
1166 desc.OutputWidth = GST_VIDEO_INFO_WIDTH (&out_info);
1167 desc.OutputHeight = GST_VIDEO_INFO_HEIGHT (&out_info);
1168 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
1169
1170 hr = self->video_device->CreateVideoProcessorEnumerator (&desc, &video_enum);
1171 if (!gst_d3d11_result (hr, self->device)) {
1172 GST_ERROR_OBJECT (self, "Couldn't create VideoProcessorEnumerator");
1173 return FALSE;
1174 }
1175
1176 hr = video_enum->GetVideoProcessorCaps (&proc_caps);
1177 if (!gst_d3d11_result (hr, self->device)) {
1178 GST_ERROR_OBJECT (self, "Couldn't query processor caps");
1179 return FALSE;
1180 }
1181
1182 /* Shouldn't happen, we checked this already during plugin_init */
1183 if (proc_caps.RateConversionCapsCount == 0) {
1184 GST_ERROR_OBJECT (self, "Deinterlacing is not supported");
1185 return FALSE;
1186 }
1187
1188 for (i = 0; i < proc_caps.RateConversionCapsCount; i++) {
1189 hr = video_enum->GetVideoProcessorRateConversionCaps (i, &rate_conv_caps);
1190 if (FAILED (hr))
1191 continue;
1192
1193 if ((rate_conv_caps.ProcessorCaps & self->method) == self->method)
1194 break;
1195 }
1196
1197 if (i >= proc_caps.RateConversionCapsCount) {
1198 GST_ERROR_OBJECT (self, "Deinterlacing method 0x%x is not supported",
1199 self->method);
1200 return FALSE;
1201 }
1202
1203 hr = self->video_device->CreateVideoProcessor (video_enum.Get (),
1204 i, &video_proc);
1205 if (!gst_d3d11_result (hr, self->device)) {
1206 GST_ERROR_OBJECT (self, "Couldn't create processor");
1207 return FALSE;
1208 }
1209
1210 if (!gst_d3d11_deinterlace_prepare_fallback_pool (self, incaps, &in_info,
1211 outcaps, &out_info)) {
1212 GST_ERROR_OBJECT (self, "Couldn't prepare fallback buffer pool");
1213 return FALSE;
1214 }
1215
1216 self->video_enum = video_enum.Detach ();
1217 self->video_proc = video_proc.Detach ();
1218
1219 rect.left = 0;
1220 rect.top = 0;
1221 rect.right = GST_VIDEO_INFO_WIDTH (&self->in_info);
1222 rect.bottom = GST_VIDEO_INFO_HEIGHT (&self->in_info);
1223
1224 /* Blending seems to be considered as half rate. See also
1225 * https://docs.microsoft.com/en-us/windows/win32/api/d3d12video/ns-d3d12video-d3d12_video_process_input_stream_rate */
1226 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND)
1227 output_rate = D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_HALF;
1228
1229 gst_d3d11_device_lock (self->device);
1230 self->video_context->VideoProcessorSetStreamSourceRect (self->video_proc,
1231 0, TRUE, &rect);
1232 self->video_context->VideoProcessorSetStreamDestRect (self->video_proc,
1233 0, TRUE, &rect);
1234 self->video_context->VideoProcessorSetOutputTargetRect (self->video_proc,
1235 TRUE, &rect);
1236 self->video_context->
1237 VideoProcessorSetStreamAutoProcessingMode (self->video_proc, 0, FALSE);
1238 self->video_context->VideoProcessorSetStreamOutputRate (self->video_proc, 0,
1239 output_rate, TRUE, NULL);
1240 gst_d3d11_device_unlock (self->device);
1241
1242 return TRUE;
1243 }
1244
1245 static ID3D11VideoProcessorInputView *
gst_d3d11_deinterace_get_piv_from_buffer(GstD3D11Deinterlace * self,GstBuffer * buffer)1246 gst_d3d11_deinterace_get_piv_from_buffer (GstD3D11Deinterlace * self,
1247 GstBuffer * buffer)
1248 {
1249 GstMemory *mem;
1250 GstD3D11Memory *dmem;
1251 ID3D11VideoProcessorInputView *piv;
1252
1253 if (gst_buffer_n_memory (buffer) != 1) {
1254 GST_WARNING_OBJECT (self, "Input buffer has more than one memory");
1255 return NULL;
1256 }
1257
1258 mem = gst_buffer_peek_memory (buffer, 0);
1259 if (!gst_is_d3d11_memory (mem)) {
1260 GST_WARNING_OBJECT (self, "Input buffer is holding non-D3D11 memory");
1261 return NULL;
1262 }
1263
1264 dmem = (GstD3D11Memory *) mem;
1265 if (dmem->device != self->device) {
1266 GST_WARNING_OBJECT (self,
1267 "Input D3D11 memory was allocated by other device");
1268 return NULL;
1269 }
1270
1271 piv = gst_d3d11_memory_get_processor_input_view (dmem,
1272 self->video_device, self->video_enum);
1273 if (!piv) {
1274 GST_WARNING_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1275 return NULL;
1276 }
1277
1278 return piv;
1279 }
1280
1281 static GstBuffer *
gst_d3d11_deinterlace_ensure_input_buffer(GstD3D11Deinterlace * self,GstBuffer * input)1282 gst_d3d11_deinterlace_ensure_input_buffer (GstD3D11Deinterlace * self,
1283 GstBuffer * input)
1284 {
1285 GstD3D11Memory *dmem;
1286 ID3D11VideoProcessorInputView *piv;
1287 GstBuffer *new_buf = NULL;
1288
1289 if (!input)
1290 return NULL;
1291
1292 piv = gst_d3d11_deinterace_get_piv_from_buffer (self, input);
1293 if (piv)
1294 return input;
1295
1296 if (!self->fallback_in_pool ||
1297 !gst_buffer_pool_set_active (self->fallback_in_pool, TRUE) ||
1298 gst_buffer_pool_acquire_buffer (self->fallback_in_pool, &new_buf,
1299 NULL) != GST_FLOW_OK) {
1300 GST_ERROR_OBJECT (self, "Fallback input buffer is unavailable");
1301 gst_buffer_unref (input);
1302
1303 return NULL;
1304 }
1305
1306 if (!gst_d3d11_buffer_copy_into (new_buf, input, &self->in_info)) {
1307 GST_ERROR_OBJECT (self, "Couldn't copy input buffer to fallback buffer");
1308 gst_buffer_unref (new_buf);
1309 gst_buffer_unref (input);
1310
1311 return NULL;
1312 }
1313
1314 dmem = (GstD3D11Memory *) gst_buffer_peek_memory (new_buf, 0);
1315 piv = gst_d3d11_memory_get_processor_input_view (dmem,
1316 self->video_device, self->video_enum);
1317 if (!piv) {
1318 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1319 gst_buffer_unref (new_buf);
1320 gst_buffer_unref (input);
1321
1322 return NULL;
1323 }
1324
1325 /* copy metadata, default implemenation of baseclass will copy everything
1326 * what we need */
1327 GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata
1328 (GST_BASE_TRANSFORM_CAST (self), input, new_buf);
1329
1330 gst_buffer_unref (input);
1331
1332 return new_buf;
1333 }
1334
1335 static GstFlowReturn
gst_d3d11_deinterlace_submit_future_frame(GstD3D11Deinterlace * self,GstBuffer * buffer)1336 gst_d3d11_deinterlace_submit_future_frame (GstD3D11Deinterlace * self,
1337 GstBuffer * buffer)
1338 {
1339 GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (self);
1340 guint len;
1341
1342 /* push tail and pop head, so that head frame can be the nearest frame
1343 * of current frame */
1344 if (buffer)
1345 g_queue_push_tail (&self->future_frame_queue, buffer);
1346
1347 len = g_queue_get_length (&self->future_frame_queue);
1348
1349 g_assert (len <= self->max_future_frames + 1);
1350
1351 if (self->to_process) {
1352 GST_WARNING_OBJECT (self, "Found uncleared processing buffer");
1353 gst_clear_buffer (&self->to_process);
1354 }
1355
1356 if (len > self->max_future_frames ||
1357 /* NULL means drain */
1358 (buffer == NULL && len > 0)) {
1359 GstClockTime cur_timestmap = GST_CLOCK_TIME_NONE;
1360 GstClockTime duration = GST_CLOCK_TIME_NONE;
1361 GstBuffer *next_buf;
1362
1363 self->to_process =
1364 (GstBuffer *) g_queue_pop_head (&self->future_frame_queue);
1365
1366 /* For non-blend methods, we will produce two frames from a single
1367 * interlaced frame. So, sufficiently correct buffer duration is required
1368 * to set timestamp for the second output frame */
1369 if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND) {
1370 if (GST_BUFFER_PTS_IS_VALID (self->to_process)) {
1371 cur_timestmap = GST_BUFFER_PTS (self->to_process);
1372 } else {
1373 cur_timestmap = GST_BUFFER_DTS (self->to_process);
1374 }
1375
1376 /* Ensure buffer duration */
1377 next_buf = (GstBuffer *) g_queue_peek_head (&self->future_frame_queue);
1378 if (next_buf && GST_CLOCK_STIME_IS_VALID (cur_timestmap)) {
1379 GstClockTime next_timestamp;
1380
1381 if (GST_BUFFER_PTS_IS_VALID (next_buf)) {
1382 next_timestamp = GST_BUFFER_PTS (next_buf);
1383 } else {
1384 next_timestamp = GST_BUFFER_DTS (next_buf);
1385 }
1386
1387 if (GST_CLOCK_STIME_IS_VALID (next_timestamp)) {
1388 if (trans->segment.rate >= 0.0 && next_timestamp > cur_timestmap) {
1389 duration = next_timestamp - cur_timestmap;
1390 } else if (trans->segment.rate < 0.0
1391 && next_timestamp < cur_timestmap) {
1392 duration = cur_timestmap - next_timestamp;
1393 }
1394 }
1395 }
1396
1397 /* Make sure that we can update buffer duration safely */
1398 self->to_process = gst_buffer_make_writable (self->to_process);
1399 if (GST_CLOCK_TIME_IS_VALID (duration)) {
1400 GST_BUFFER_DURATION (self->to_process) = duration;
1401 } else {
1402 GST_BUFFER_DURATION (self->to_process) = self->default_buffer_duration;
1403 }
1404
1405 /* Bonus points, DTS doesn't make sense for raw video frame */
1406 GST_BUFFER_PTS (self->to_process) = cur_timestmap;
1407 GST_BUFFER_DTS (self->to_process) = GST_CLOCK_TIME_NONE;
1408
1409 /* And mark the number of output frames for this input frame */
1410 self->num_output_per_input = 2;
1411 } else {
1412 self->num_output_per_input = 1;
1413 }
1414
1415 self->first_output = TRUE;
1416 }
1417
1418 return GST_FLOW_OK;
1419 }
1420
1421 static GstFlowReturn
gst_d3d11_deinterlace_submit_input_buffer(GstBaseTransform * trans,gboolean is_discont,GstBuffer * input)1422 gst_d3d11_deinterlace_submit_input_buffer (GstBaseTransform * trans,
1423 gboolean is_discont, GstBuffer * input)
1424 {
1425 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1426 GstFlowReturn ret;
1427 GstBuffer *buf;
1428
1429 /* Let baseclass handle QoS first */
1430 ret = GST_BASE_TRANSFORM_CLASS (parent_class)->submit_input_buffer (trans,
1431 is_discont, input);
1432 if (ret != GST_FLOW_OK)
1433 return ret;
1434
1435 if (gst_base_transform_is_passthrough (trans))
1436 return ret;
1437
1438 /* at this moment, baseclass must hold queued_buf */
1439 g_assert (trans->queued_buf != NULL);
1440
1441 /* Check if we can use this buffer directly. If not, copy this into
1442 * our fallback buffer */
1443 buf = trans->queued_buf;
1444 trans->queued_buf = NULL;
1445
1446 buf = gst_d3d11_deinterlace_ensure_input_buffer (self, buf);
1447 if (!buf) {
1448 GST_ERROR_OBJECT (self, "Invalid input buffer");
1449 return GST_FLOW_ERROR;
1450 }
1451
1452 return gst_d3d11_deinterlace_submit_future_frame (self, buf);
1453 }
1454
1455 static ID3D11VideoProcessorOutputView *
gst_d3d11_deinterace_get_pov_from_buffer(GstD3D11Deinterlace * self,GstBuffer * buffer)1456 gst_d3d11_deinterace_get_pov_from_buffer (GstD3D11Deinterlace * self,
1457 GstBuffer * buffer)
1458 {
1459 GstMemory *mem;
1460 GstD3D11Memory *dmem;
1461 ID3D11VideoProcessorOutputView *pov;
1462
1463 if (gst_buffer_n_memory (buffer) != 1) {
1464 GST_WARNING_OBJECT (self, "Output buffer has more than one memory");
1465 return NULL;
1466 }
1467
1468 mem = gst_buffer_peek_memory (buffer, 0);
1469 if (!gst_is_d3d11_memory (mem)) {
1470 GST_WARNING_OBJECT (self, "Output buffer is holding non-D3D11 memory");
1471 return NULL;
1472 }
1473
1474 dmem = (GstD3D11Memory *) mem;
1475 if (dmem->device != self->device) {
1476 GST_WARNING_OBJECT (self,
1477 "Output D3D11 memory was allocated by other device");
1478 return NULL;
1479 }
1480
1481 pov = gst_d3d11_memory_get_processor_output_view (dmem,
1482 self->video_device, self->video_enum);
1483 if (!pov) {
1484 GST_WARNING_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1485 return NULL;
1486 }
1487
1488 return pov;
1489 }
1490
1491 static GstBuffer *
gst_d3d11_deinterlace_ensure_output_buffer(GstD3D11Deinterlace * self,GstBuffer * output)1492 gst_d3d11_deinterlace_ensure_output_buffer (GstD3D11Deinterlace * self,
1493 GstBuffer * output)
1494 {
1495 GstD3D11Memory *dmem;
1496 ID3D11VideoProcessorOutputView *pov;
1497 GstBuffer *new_buf = NULL;
1498
1499 pov = gst_d3d11_deinterace_get_pov_from_buffer (self, output);
1500 if (pov)
1501 return output;
1502
1503 if (!self->fallback_out_pool ||
1504 !gst_buffer_pool_set_active (self->fallback_out_pool, TRUE) ||
1505 gst_buffer_pool_acquire_buffer (self->fallback_out_pool, &new_buf,
1506 NULL) != GST_FLOW_OK) {
1507 GST_ERROR_OBJECT (self, "Fallback output buffer is unavailable");
1508 gst_buffer_unref (output);
1509
1510 return NULL;
1511 }
1512
1513 dmem = (GstD3D11Memory *) gst_buffer_peek_memory (new_buf, 0);
1514 pov = gst_d3d11_memory_get_processor_output_view (dmem,
1515 self->video_device, self->video_enum);
1516 if (!pov) {
1517 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1518 gst_buffer_unref (new_buf);
1519 gst_buffer_unref (output);
1520
1521 return NULL;
1522 }
1523
1524 /* copy metadata, default implemenation of baseclass will copy everything
1525 * what we need */
1526 GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata
1527 (GST_BASE_TRANSFORM_CAST (self), output, new_buf);
1528
1529 gst_buffer_unref (output);
1530
1531 return new_buf;
1532 }
1533
1534 static GstFlowReturn
gst_d3d11_deinterlace_submit_past_frame(GstD3D11Deinterlace * self,GstBuffer * buffer)1535 gst_d3d11_deinterlace_submit_past_frame (GstD3D11Deinterlace * self,
1536 GstBuffer * buffer)
1537 {
1538 /* push head and pop tail, so that head frame can be the nearest frame
1539 * of current frame */
1540 g_queue_push_head (&self->past_frame_queue, buffer);
1541 while (g_queue_get_length (&self->past_frame_queue) > self->max_past_frames) {
1542 GstBuffer *to_drop =
1543 (GstBuffer *) g_queue_pop_tail (&self->past_frame_queue);
1544
1545 if (to_drop)
1546 gst_buffer_unref (to_drop);
1547 }
1548
1549 return GST_FLOW_OK;
1550 }
1551
1552 static GstFlowReturn
gst_d3d11_deinterlace_generate_output(GstBaseTransform * trans,GstBuffer ** outbuf)1553 gst_d3d11_deinterlace_generate_output (GstBaseTransform * trans,
1554 GstBuffer ** outbuf)
1555 {
1556 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1557 GstFlowReturn ret = GST_FLOW_OK;
1558 GstBuffer *inbuf;
1559 GstBuffer *buf = NULL;
1560
1561 if (gst_base_transform_is_passthrough (trans)) {
1562 return GST_BASE_TRANSFORM_CLASS (parent_class)->generate_output (trans,
1563 outbuf);
1564 }
1565
1566 *outbuf = NULL;
1567 inbuf = self->to_process;
1568 if (inbuf == NULL)
1569 return GST_FLOW_OK;
1570
1571 ret =
1572 GST_BASE_TRANSFORM_CLASS (parent_class)->prepare_output_buffer (trans,
1573 inbuf, &buf);
1574
1575 if (ret != GST_FLOW_OK || !buf) {
1576 GST_WARNING_OBJECT (trans, "could not get buffer from pool: %s",
1577 gst_flow_get_name (ret));
1578
1579 return ret;
1580 }
1581
1582 g_assert (inbuf != buf);
1583
1584 buf = gst_d3d11_deinterlace_ensure_output_buffer (self, buf);
1585 if (!buf) {
1586 GST_ERROR_OBJECT (self, "Failed to allocate output buffer to process");
1587
1588 return GST_FLOW_ERROR;
1589 }
1590
1591 ret = gst_d3d11_deinterlace_transform (trans, inbuf, buf);
1592 if (ret != GST_FLOW_OK) {
1593 gst_buffer_unref (buf);
1594 return ret;
1595 }
1596
1597 g_assert (self->num_output_per_input == 1 || self->num_output_per_input == 2);
1598
1599 /* Update timestamp and buffer duration.
1600 * Here, PTS and duration of inbuf must be valid,
1601 * unless there's programing error, since we updated timestamp and duration
1602 * already around submit_input_buffer() */
1603 if (self->num_output_per_input == 2) {
1604 if (!GST_BUFFER_DURATION_IS_VALID (inbuf)) {
1605 GST_LOG_OBJECT (self, "Input buffer duration is unknown");
1606 } else if (!GST_BUFFER_PTS_IS_VALID (inbuf)) {
1607 GST_LOG_OBJECT (self, "Input buffer timestamp is unknown");
1608 } else {
1609 GstClockTime duration = GST_BUFFER_DURATION (inbuf) / 2;
1610 gboolean second_field = FALSE;
1611
1612 if (self->first_output) {
1613 /* For reverse playback, first output is the second field */
1614 if (trans->segment.rate < 0)
1615 second_field = TRUE;
1616 else
1617 second_field = FALSE;
1618 } else {
1619 if (trans->segment.rate < 0)
1620 second_field = FALSE;
1621 else
1622 second_field = TRUE;
1623 }
1624
1625 GST_BUFFER_DURATION (buf) = duration;
1626 if (second_field) {
1627 GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buf) + duration;
1628 }
1629 }
1630 }
1631
1632 *outbuf = buf;
1633 self->first_output = FALSE;
1634 self->num_transformed++;
1635 /* https://docs.microsoft.com/en-us/windows/win32/api/d3d12video/ns-d3d12video-d3d12_video_process_input_stream_rate */
1636 if (self->method == GST_D3D11_DEINTERLACE_METHOD_BLEND) {
1637 self->input_index += 2;
1638 } else {
1639 self->input_index++;
1640 }
1641
1642 if (self->num_output_per_input <= self->num_transformed) {
1643 /* Move processed frame to past_frame queue */
1644 gst_d3d11_deinterlace_submit_past_frame (self, self->to_process);
1645 self->to_process = NULL;
1646 }
1647
1648 return ret;
1649 }
1650
1651 static GstFlowReturn
gst_d3d11_deinterlace_transform(GstBaseTransform * trans,GstBuffer * inbuf,GstBuffer * outbuf)1652 gst_d3d11_deinterlace_transform (GstBaseTransform * trans, GstBuffer * inbuf,
1653 GstBuffer * outbuf)
1654 {
1655 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1656 ID3D11VideoProcessorInputView *piv;
1657 ID3D11VideoProcessorOutputView *pov;
1658 D3D11_VIDEO_FRAME_FORMAT frame_foramt = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
1659 D3D11_VIDEO_PROCESSOR_STREAM proc_stream = { 0, };
1660 ID3D11VideoProcessorInputView *future_surfaces[MAX_NUM_REFERENCES] =
1661 { NULL, };
1662 ID3D11VideoProcessorInputView *past_surfaces[MAX_NUM_REFERENCES] = { NULL, };
1663 guint future_frames = 0;
1664 guint past_frames = 0;
1665 HRESULT hr;
1666 guint i;
1667
1668 /* Input/output buffer must be holding valid D3D11 memory here,
1669 * as we checked it already in submit_input_buffer() and generate_output() */
1670 piv = gst_d3d11_deinterace_get_piv_from_buffer (self, inbuf);
1671 if (!piv) {
1672 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1673 return GST_FLOW_ERROR;
1674 }
1675
1676 pov = gst_d3d11_deinterace_get_pov_from_buffer (self, outbuf);
1677 if (!pov) {
1678 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
1679 return GST_FLOW_ERROR;
1680 }
1681
1682 /* Check field order */
1683 if (GST_VIDEO_INFO_INTERLACE_MODE (&self->in_info) ==
1684 GST_VIDEO_INTERLACE_MODE_MIXED ||
1685 (GST_VIDEO_INFO_INTERLACE_MODE (&self->in_info) ==
1686 GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1687 GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1688 GST_VIDEO_FIELD_ORDER_UNKNOWN)) {
1689 if (!GST_BUFFER_FLAG_IS_SET (inbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED)) {
1690 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
1691 } else if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_VIDEO_BUFFER_FLAG_TFF)) {
1692 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1693 } else {
1694 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1695 }
1696 } else if (GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1697 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST) {
1698 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
1699 } else if (GST_VIDEO_INFO_FIELD_ORDER (&self->in_info) ==
1700 GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST) {
1701 frame_foramt = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_BOTTOM_FIELD_FIRST;
1702 }
1703
1704 if (frame_foramt == D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE) {
1705 /* Progressive stream will produce only one frame per frame */
1706 self->num_output_per_input = 1;
1707 } else if (self->method != GST_D3D11_DEINTERLACE_METHOD_BLEND &&
1708 self->method != GST_D3D11_DEINTERLACE_METHOD_BOB) {
1709 /* Fill reference frames */
1710 for (i = 0; i < g_queue_get_length (&self->future_frame_queue) &&
1711 i < G_N_ELEMENTS (future_surfaces); i++) {
1712 GstBuffer *future_buf;
1713 ID3D11VideoProcessorInputView *future_piv;
1714
1715 future_buf =
1716 (GstBuffer *) g_queue_peek_nth (&self->future_frame_queue, i);
1717 future_piv = gst_d3d11_deinterace_get_piv_from_buffer (self, future_buf);
1718 if (!future_piv) {
1719 GST_WARNING_OBJECT (self,
1720 "Couldn't get ID3D11VideoProcessorInputView from future "
1721 "reference %d", i);
1722 break;
1723 }
1724
1725 future_surfaces[i] = future_piv;
1726 future_frames++;
1727 }
1728
1729 for (i = 0; i < g_queue_get_length (&self->past_frame_queue) &&
1730 i < G_N_ELEMENTS (past_surfaces); i++) {
1731 GstBuffer *past_buf;
1732 ID3D11VideoProcessorInputView *past_piv;
1733
1734 past_buf = (GstBuffer *) g_queue_peek_nth (&self->past_frame_queue, i);
1735 past_piv = gst_d3d11_deinterace_get_piv_from_buffer (self, past_buf);
1736 if (!past_piv) {
1737 GST_WARNING_OBJECT (self,
1738 "Couldn't get ID3D11VideoProcessorInputView from past "
1739 "reference %d", i);
1740 break;
1741 }
1742
1743 past_surfaces[i] = past_piv;
1744 past_frames++;
1745 }
1746 }
1747
1748 proc_stream.Enable = TRUE;
1749 proc_stream.pInputSurface = piv;
1750 proc_stream.InputFrameOrField = self->input_index;
1751 /* FIXME: This is wrong for inverse telechin case */
1752 /* OutputIndex == 0 for the first field, and 1 for the second field */
1753 if (self->num_output_per_input == 2) {
1754 if (trans->segment.rate < 0.0) {
1755 /* Process the second frame first in case of reverse playback */
1756 proc_stream.OutputIndex = self->first_output ? 1 : 0;
1757 } else {
1758 proc_stream.OutputIndex = self->first_output ? 0 : 1;
1759 }
1760 } else {
1761 proc_stream.OutputIndex = 0;
1762 }
1763
1764 if (future_frames) {
1765 proc_stream.FutureFrames = future_frames;
1766 proc_stream.ppFutureSurfaces = future_surfaces;
1767 }
1768
1769 if (past_frames) {
1770 proc_stream.PastFrames = past_frames;
1771 proc_stream.ppPastSurfaces = past_surfaces;
1772 }
1773
1774 gst_d3d11_device_lock (self->device);
1775 self->video_context->VideoProcessorSetStreamFrameFormat (self->video_proc, 0,
1776 frame_foramt);
1777
1778 hr = self->video_context->VideoProcessorBlt (self->video_proc, pov, 0,
1779 1, &proc_stream);
1780 gst_d3d11_device_unlock (self->device);
1781
1782 if (!gst_d3d11_result (hr, self->device)) {
1783 GST_ERROR_OBJECT (self, "Failed to perform deinterlacing");
1784 return GST_FLOW_ERROR;
1785 }
1786
1787 return GST_FLOW_OK;
1788 }
1789
1790 static gboolean
gst_d3d11_deinterlace_sink_event(GstBaseTransform * trans,GstEvent * event)1791 gst_d3d11_deinterlace_sink_event (GstBaseTransform * trans, GstEvent * event)
1792 {
1793 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1794
1795 switch (GST_EVENT_TYPE (event)) {
1796 case GST_EVENT_STREAM_START:
1797 /* stream-start means discont stream from previous one. Drain pending
1798 * frame if any */
1799 GST_DEBUG_OBJECT (self, "Have stream-start, drain frames if any");
1800 gst_d3d11_deinterlace_drain (self);
1801 break;
1802 case GST_EVENT_CAPS:{
1803 GstPad *sinkpad = GST_BASE_TRANSFORM_SINK_PAD (trans);
1804 GstCaps *prev_caps;
1805
1806 prev_caps = gst_pad_get_current_caps (sinkpad);
1807 if (prev_caps) {
1808 GstCaps *caps;
1809 gst_event_parse_caps (event, &caps);
1810 /* If caps is updated, drain pending frames */
1811 if (!gst_caps_is_equal (prev_caps, caps)) {
1812 GST_DEBUG_OBJECT (self, "Caps updated from %" GST_PTR_FORMAT " to %"
1813 GST_PTR_FORMAT, prev_caps, caps);
1814 gst_d3d11_deinterlace_drain (self);
1815 }
1816
1817 gst_caps_unref (prev_caps);
1818 }
1819 break;
1820 }
1821 case GST_EVENT_SEGMENT:
1822 /* new segment would mean that temporal discontinuity */
1823 case GST_EVENT_SEGMENT_DONE:
1824 case GST_EVENT_EOS:
1825 GST_DEBUG_OBJECT (self, "Have event %s, drain frames if any",
1826 GST_EVENT_TYPE_NAME (event));
1827 gst_d3d11_deinterlace_drain (self);
1828 break;
1829 case GST_EVENT_FLUSH_STOP:
1830 GST_D3D11_DEINTERLACE_LOCK (self);
1831 gst_d3d11_deinterlace_reset_history (self);
1832 GST_D3D11_DEINTERLACE_UNLOCK (self);
1833 break;
1834 default:
1835 break;
1836 }
1837
1838 return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
1839 }
1840
1841 static void
gst_d3d11_deinterlace_before_transform(GstBaseTransform * trans,GstBuffer * buffer)1842 gst_d3d11_deinterlace_before_transform (GstBaseTransform * trans,
1843 GstBuffer * buffer)
1844 {
1845 GstD3D11Deinterlace *self = GST_D3D11_DEINTERLACE (trans);
1846 GstD3D11DeinterlaceClass *klass = GST_D3D11_DEINTERLACE_GET_CLASS (self);
1847 GstD3D11Memory *dmem;
1848 GstMemory *mem;
1849 GstCaps *in_caps = NULL;
1850 GstCaps *out_caps = NULL;
1851 guint adapter = 0;
1852
1853 mem = gst_buffer_peek_memory (buffer, 0);
1854 if (!gst_is_d3d11_memory (mem)) {
1855 GST_ELEMENT_ERROR (self, CORE, FAILED, (NULL), ("Invalid memory"));
1856 return;
1857 }
1858
1859 dmem = GST_D3D11_MEMORY_CAST (mem);
1860 /* Same device, nothing to do */
1861 if (dmem->device == self->device)
1862 return;
1863
1864 g_object_get (dmem->device, "adapter", &adapter, NULL);
1865 /* We have per-GPU deinterlace elements because of different capability
1866 * per GPU. so, cannot accept other GPU at the moment */
1867 if (adapter != klass->adapter)
1868 return;
1869
1870 GST_INFO_OBJECT (self, "Updating device %" GST_PTR_FORMAT " -> %"
1871 GST_PTR_FORMAT, self->device, dmem->device);
1872
1873 /* Drain buffers before updating device */
1874 gst_d3d11_deinterlace_drain (self);
1875
1876 gst_object_unref (self->device);
1877 self->device = (GstD3D11Device *) gst_object_ref (dmem->device);
1878
1879 in_caps = gst_pad_get_current_caps (GST_BASE_TRANSFORM_SINK_PAD (trans));
1880 if (!in_caps) {
1881 GST_WARNING_OBJECT (self, "sinkpad has null caps");
1882 goto out;
1883 }
1884
1885 out_caps = gst_pad_get_current_caps (GST_BASE_TRANSFORM_SRC_PAD (trans));
1886 if (!out_caps) {
1887 GST_WARNING_OBJECT (self, "Has no configured output caps");
1888 goto out;
1889 }
1890
1891 gst_d3d11_deinterlace_set_caps (trans, in_caps, out_caps);
1892
1893 /* Mark reconfigure so that we can update pool */
1894 gst_base_transform_reconfigure_src (trans);
1895
1896 out:
1897 gst_clear_caps (&in_caps);
1898 gst_clear_caps (&out_caps);
1899
1900 return;
1901 }
1902
1903 /* FIXME: might be job of basetransform */
1904 static GstFlowReturn
gst_d3d11_deinterlace_drain(GstD3D11Deinterlace * self)1905 gst_d3d11_deinterlace_drain (GstD3D11Deinterlace * self)
1906 {
1907 GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (self);
1908 GstFlowReturn ret = GST_FLOW_OK;
1909 GstBuffer *outbuf = NULL;
1910
1911 GST_D3D11_DEINTERLACE_LOCK (self);
1912 if (gst_base_transform_is_passthrough (trans)) {
1913 /* If we were passthrough, nothing to do */
1914 goto done;
1915 } else if (!g_queue_get_length (&self->future_frame_queue)) {
1916 /* No pending data, nothing to do */
1917 goto done;
1918 }
1919
1920 while (g_queue_get_length (&self->future_frame_queue)) {
1921 gst_d3d11_deinterlace_submit_future_frame (self, NULL);
1922 if (!self->to_process)
1923 break;
1924
1925 do {
1926 outbuf = NULL;
1927
1928 ret = gst_d3d11_deinterlace_generate_output (trans, &outbuf);
1929 if (outbuf != NULL) {
1930 /* Release lock during push buffer */
1931 GST_D3D11_DEINTERLACE_UNLOCK (self);
1932 ret = gst_pad_push (trans->srcpad, outbuf);
1933 GST_D3D11_DEINTERLACE_LOCK (self);
1934 }
1935 } while (ret == GST_FLOW_OK && outbuf != NULL);
1936 }
1937
1938 done:
1939 gst_d3d11_deinterlace_reset_history (self);
1940 GST_D3D11_DEINTERLACE_UNLOCK (self);
1941
1942 return ret;
1943 }
1944
1945 /**
1946 * SECTION:element-d3d11deinterlace
1947 * @title: d3d11deinterlace
1948 * @short_description: A Direct3D11 based deinterlace element
1949 *
1950 * Deinterlacing interlaced video frames to progressive video frames by using
1951 * ID3D11VideoProcessor API.
1952 *
1953 * ## Example launch line
1954 * ```
1955 * gst-launch-1.0 filesrc location=/path/to/h264/file ! parsebin ! d3d11h264dec ! d3d11deinterlace ! d3d11videosink
1956 * ```
1957 *
1958 * Since: 1.20
1959 *
1960 */
1961
1962 /* GstD3D11DeinterlaceBin */
1963 enum
1964 {
1965 PROP_BIN_0,
1966 /* basetransform */
1967 PROP_BIN_QOS,
1968 /* deinterlace */
1969 PROP_BIN_ADAPTER,
1970 PROP_BIN_DEVICE_ID,
1971 PROP_BIN_VENDOR_ID,
1972 PROP_BIN_METHOD,
1973 PROP_BIN_SUPPORTED_METHODS,
1974 };
1975
1976 typedef struct _GstD3D11DeinterlaceBin
1977 {
1978 GstBin parent;
1979
1980 GstPad *sinkpad;
1981 GstPad *srcpad;
1982
1983 GstElement *deinterlace;
1984 GstElement *in_convert;
1985 GstElement *out_convert;
1986 GstElement *upload;
1987 GstElement *download;
1988 } GstD3D11DeinterlaceBin;
1989
1990 typedef struct _GstD3D11DeinterlaceBinClass
1991 {
1992 GstBinClass parent_class;
1993
1994 guint adapter;
1995 GType child_type;
1996 } GstD3D11DeinterlaceBinClass;
1997
1998 static GstElementClass *bin_parent_class = NULL;
1999 #define GST_D3D11_DEINTERLACE_BIN(object) ((GstD3D11DeinterlaceBin *) (object))
2000 #define GST_D3D11_DEINTERLACE_BIN_GET_CLASS(object) \
2001 (G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object), \
2002 GstD3D11DeinterlaceBinClass))
2003
2004 #define GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE(format) \
2005 "video/x-raw, " \
2006 "format = (string) " format ", " \
2007 "width = (int) [1, 8192], " \
2008 "height = (int) [1, 8192] "
2009
2010 #define GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES(features,format) \
2011 "video/x-raw(" features "), " \
2012 "format = (string) " format ", " \
2013 "width = (int) [1, 8192], " \
2014 "height = (int) [1, 8192] "
2015
2016 static GstStaticPadTemplate bin_sink_template_caps =
2017 GST_STATIC_PAD_TEMPLATE ("sink",
2018 GST_PAD_SINK,
2019 GST_PAD_ALWAYS,
2020 GST_STATIC_CAPS (GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2021 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SINK_FORMATS) "; "
2022 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2023 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
2024 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2025 GST_D3D11_SINK_FORMATS) "; "
2026 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE (GST_D3D11_SINK_FORMATS) "; "
2027 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2028 (GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY ","
2029 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2030 GST_D3D11_SINK_FORMATS)
2031 ));
2032
2033 static GstStaticPadTemplate bin_src_template_caps =
2034 GST_STATIC_PAD_TEMPLATE ("src",
2035 GST_PAD_SRC,
2036 GST_PAD_ALWAYS,
2037 GST_STATIC_CAPS (GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2038 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SRC_FORMATS) "; "
2039 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2040 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
2041 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2042 GST_D3D11_SRC_FORMATS) "; "
2043 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE (GST_D3D11_SRC_FORMATS) "; "
2044 GST_D3D11_DEINTERLACE_BIN_CAPS_MAKE_WITH_FEATURES
2045 (GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY ","
2046 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
2047 GST_D3D11_SRC_FORMATS)
2048 ));
2049
2050 static void gst_d3d11_deinterlace_bin_set_property (GObject * object,
2051 guint prop_id, const GValue * value, GParamSpec * pspec);
2052 static void gst_d3d11_deinterlace_bin_get_property (GObject * object,
2053 guint prop_id, GValue * value, GParamSpec * pspec);
2054
2055 static void
gst_d3d11_deinterlace_bin_class_init(GstD3D11DeinterlaceBinClass * klass,gpointer data)2056 gst_d3d11_deinterlace_bin_class_init (GstD3D11DeinterlaceBinClass * klass,
2057 gpointer data)
2058 {
2059 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
2060 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
2061 GstD3D11DeinterlaceClassData *cdata = (GstD3D11DeinterlaceClassData *) data;
2062 gchar *long_name;
2063
2064 bin_parent_class = (GstElementClass *) g_type_class_peek_parent (klass);
2065
2066 gobject_class->get_property = gst_d3d11_deinterlace_bin_get_property;
2067 gobject_class->set_property = gst_d3d11_deinterlace_bin_set_property;
2068
2069 /* basetransform */
2070 g_object_class_install_property (gobject_class, PROP_BIN_QOS,
2071 g_param_spec_boolean ("qos", "QoS", "Handle Quality-of-Service events",
2072 FALSE, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
2073
2074 /* deinterlace */
2075 g_object_class_install_property (gobject_class, PROP_BIN_ADAPTER,
2076 g_param_spec_uint ("adapter", "Adapter",
2077 "DXGI Adapter index for creating device",
2078 0, G_MAXUINT32, cdata->adapter,
2079 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2080 g_object_class_install_property (gobject_class, PROP_BIN_DEVICE_ID,
2081 g_param_spec_uint ("device-id", "Device Id",
2082 "DXGI Device ID", 0, G_MAXUINT32, 0,
2083 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2084 g_object_class_install_property (gobject_class, PROP_BIN_VENDOR_ID,
2085 g_param_spec_uint ("vendor-id", "Vendor Id",
2086 "DXGI Vendor ID", 0, G_MAXUINT32, 0,
2087 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2088 g_object_class_install_property (gobject_class, PROP_BIN_METHOD,
2089 g_param_spec_flags ("method", "Method",
2090 "Deinterlace Method. Use can set multiple methods as a flagset "
2091 "and element will select one of method automatically. "
2092 "If deinterlacing device failed to deinterlace with given mode, "
2093 "fallback might happen by the device",
2094 GST_TYPE_D3D11_DEINTERLACE_METHOD, cdata->device_caps.default_method,
2095 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
2096 GST_PARAM_MUTABLE_READY)));
2097 g_object_class_install_property (gobject_class, PROP_BIN_SUPPORTED_METHODS,
2098 g_param_spec_flags ("supported-methods", "Supported Methods",
2099 "Set of supported deinterlace methods by device",
2100 GST_TYPE_D3D11_DEINTERLACE_METHOD,
2101 cdata->device_caps.supported_methods,
2102 (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
2103
2104 long_name = g_strdup_printf ("Direct3D11 %s Deinterlacer Bin",
2105 cdata->description);
2106 gst_element_class_set_metadata (element_class, long_name,
2107 "Filter/Effect/Video/Deinterlace/Hardware",
2108 "A Direct3D11 based deinterlacer bin",
2109 "Seungha Yang <seungha@centricular.com>");
2110 g_free (long_name);
2111
2112 gst_element_class_add_static_pad_template (element_class,
2113 &bin_sink_template_caps);
2114 gst_element_class_add_static_pad_template (element_class,
2115 &bin_src_template_caps);
2116
2117 klass->adapter = cdata->adapter;
2118 klass->child_type = cdata->deinterlace_type;
2119
2120 gst_d3d11_deinterlace_class_data_unref (cdata);
2121 }
2122
2123 static void
gst_d3d11_deinterlace_bin_init(GstD3D11DeinterlaceBin * self)2124 gst_d3d11_deinterlace_bin_init (GstD3D11DeinterlaceBin * self)
2125 {
2126 GstD3D11DeinterlaceBinClass *klass =
2127 GST_D3D11_DEINTERLACE_BIN_GET_CLASS (self);
2128 GstPad *pad;
2129
2130 self->deinterlace = (GstElement *) g_object_new (klass->child_type,
2131 "name", "deinterlace", NULL);
2132 self->in_convert = gst_element_factory_make ("d3d11colorconvert", NULL);
2133 self->out_convert = gst_element_factory_make ("d3d11colorconvert", NULL);
2134 self->upload = gst_element_factory_make ("d3d11upload", NULL);
2135 self->download = gst_element_factory_make ("d3d11download", NULL);
2136
2137 /* Specify DXGI adapter index to use */
2138 g_object_set (G_OBJECT (self->in_convert), "adapter", klass->adapter, NULL);
2139 g_object_set (G_OBJECT (self->out_convert), "adapter", klass->adapter, NULL);
2140 g_object_set (G_OBJECT (self->upload), "adapter", klass->adapter, NULL);
2141 g_object_set (G_OBJECT (self->download), "adapter", klass->adapter, NULL);
2142
2143 gst_bin_add_many (GST_BIN_CAST (self), self->upload, self->in_convert,
2144 self->deinterlace, self->out_convert, self->download, NULL);
2145 gst_element_link_many (self->upload, self->in_convert, self->deinterlace,
2146 self->out_convert, self->download, NULL);
2147
2148 pad = gst_element_get_static_pad (self->upload, "sink");
2149 self->sinkpad = gst_ghost_pad_new ("sink", pad);
2150 gst_element_add_pad (GST_ELEMENT_CAST (self), self->sinkpad);
2151 gst_object_unref (pad);
2152
2153 pad = gst_element_get_static_pad (self->download, "src");
2154 self->srcpad = gst_ghost_pad_new ("src", pad);
2155 gst_element_add_pad (GST_ELEMENT_CAST (self), self->srcpad);
2156 gst_object_unref (pad);
2157 }
2158
2159 static void
gst_d3d11_deinterlace_bin_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)2160 gst_d3d11_deinterlace_bin_set_property (GObject * object, guint prop_id,
2161 const GValue * value, GParamSpec * pspec)
2162 {
2163 GstD3D11DeinterlaceBin *self = GST_D3D11_DEINTERLACE_BIN (object);
2164
2165 g_object_set_property (G_OBJECT (self->deinterlace), pspec->name, value);
2166 }
2167
2168 static void
gst_d3d11_deinterlace_bin_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)2169 gst_d3d11_deinterlace_bin_get_property (GObject * object, guint prop_id,
2170 GValue * value, GParamSpec * pspec)
2171 {
2172 GstD3D11DeinterlaceBin *self = GST_D3D11_DEINTERLACE_BIN (object);
2173
2174 g_object_get_property (G_OBJECT (self->deinterlace), pspec->name, value);
2175 }
2176
2177 void
gst_d3d11_deinterlace_register(GstPlugin * plugin,GstD3D11Device * device,guint rank)2178 gst_d3d11_deinterlace_register (GstPlugin * plugin, GstD3D11Device * device,
2179 guint rank)
2180 {
2181 GType type;
2182 GType bin_type;
2183 gchar *type_name;
2184 gchar *feature_name;
2185 guint index = 0;
2186 GTypeInfo type_info = {
2187 sizeof (GstD3D11DeinterlaceClass),
2188 NULL,
2189 NULL,
2190 (GClassInitFunc) gst_d3d11_deinterlace_class_init,
2191 NULL,
2192 NULL,
2193 sizeof (GstD3D11Deinterlace),
2194 0,
2195 (GInstanceInitFunc) gst_d3d11_deinterlace_init,
2196 };
2197 GTypeInfo bin_type_info = {
2198 sizeof (GstD3D11DeinterlaceBinClass),
2199 NULL,
2200 NULL,
2201 (GClassInitFunc) gst_d3d11_deinterlace_bin_class_init,
2202 NULL,
2203 NULL,
2204 sizeof (GstD3D11DeinterlaceBin),
2205 0,
2206 (GInstanceInitFunc) gst_d3d11_deinterlace_bin_init,
2207 };
2208 GstCaps *sink_caps = NULL;
2209 GstCaps *src_caps = NULL;
2210 GstCaps *caps = NULL;
2211 GstCapsFeatures *caps_features;
2212 ID3D11Device *device_handle;
2213 ID3D11DeviceContext *context_handle;
2214 /* *INDENT-OFF* */
2215 ComPtr<ID3D11VideoDevice> video_device;
2216 ComPtr<ID3D11VideoContext> video_context;
2217 ComPtr<ID3D11VideoProcessorEnumerator> video_proc_enum;
2218 ComPtr<ID3D11VideoProcessorEnumerator1> video_proc_enum1;
2219 /* *INDENT-ON* */
2220 HRESULT hr;
2221 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
2222 D3D11_VIDEO_PROCESSOR_CAPS proc_caps = { 0, };
2223 UINT supported_methods = 0;
2224 GstD3D11DeinterlaceMethod default_method;
2225 gboolean blend;
2226 gboolean bob;
2227 gboolean adaptive;
2228 gboolean mocomp;
2229 /* NOTE: processor might be able to handle other formats.
2230 * However, not all YUV formats can be used for render target.
2231 * For instance, DXGI_FORMAT_Y210 and DXGI_FORMAT_Y410 formats cannot be
2232 * render target. In practice, interlaced stream would output of video
2233 * decoders, so NV12/P010/P016 can cover most of real-world use case.
2234 */
2235 DXGI_FORMAT formats_to_check[] = {
2236 DXGI_FORMAT_NV12, /* NV12 */
2237 DXGI_FORMAT_P010, /* P010_10LE */
2238 DXGI_FORMAT_P016, /* P016_LE */
2239 };
2240 GValue *supported_formats = NULL;
2241 GstD3D11DeinterlaceClassData *cdata;
2242 guint max_past_frames = 0;
2243 guint max_future_frames = 0;
2244 guint i;
2245
2246 device_handle = gst_d3d11_device_get_device_handle (device);
2247 context_handle = gst_d3d11_device_get_device_context_handle (device);
2248
2249 hr = device_handle->QueryInterface (IID_PPV_ARGS (&video_device));
2250 if (!gst_d3d11_result (hr, device))
2251 return;
2252
2253 hr = context_handle->QueryInterface (IID_PPV_ARGS (&video_context));
2254 if (!gst_d3d11_result (hr, device))
2255 return;
2256
2257 memset (&desc, 0, sizeof (D3D11_VIDEO_PROCESSOR_CONTENT_DESC));
2258 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
2259 desc.InputWidth = 320;
2260 desc.InputHeight = 240;
2261 desc.OutputWidth = 320;
2262 desc.OutputHeight = 240;
2263 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
2264
2265 hr = video_device->CreateVideoProcessorEnumerator (&desc, &video_proc_enum);
2266 if (!gst_d3d11_result (hr, device))
2267 return;
2268
2269 /* We need ID3D11VideoProcessorEnumerator1 interface to check conversion
2270 * capability of device via CheckVideoProcessorFormatConversion() */
2271 hr = video_proc_enum.As (&video_proc_enum1);
2272 if (!gst_d3d11_result (hr, device))
2273 return;
2274
2275 hr = video_proc_enum->GetVideoProcessorCaps (&proc_caps);
2276 if (!gst_d3d11_result (hr, device))
2277 return;
2278
2279 for (i = 0; i < proc_caps.RateConversionCapsCount; i++) {
2280 D3D11_VIDEO_PROCESSOR_RATE_CONVERSION_CAPS rate_conv_caps = { 0, };
2281
2282 hr = video_proc_enum->GetVideoProcessorRateConversionCaps (i,
2283 &rate_conv_caps);
2284 if (FAILED (hr))
2285 continue;
2286
2287 supported_methods |= rate_conv_caps.ProcessorCaps;
2288 max_past_frames = MAX (max_past_frames, rate_conv_caps.PastFrames);
2289 max_future_frames = MAX (max_future_frames, rate_conv_caps.FutureFrames);
2290 }
2291
2292 if (supported_methods == 0)
2293 return;
2294
2295 #define IS_SUPPORTED_METHOD(flags,val) (flags & val) == val
2296 blend = IS_SUPPORTED_METHOD (supported_methods,
2297 GST_D3D11_DEINTERLACE_METHOD_BLEND);
2298 bob = IS_SUPPORTED_METHOD (supported_methods,
2299 GST_D3D11_DEINTERLACE_METHOD_BOB);
2300 adaptive = IS_SUPPORTED_METHOD (supported_methods,
2301 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE);
2302 mocomp = IS_SUPPORTED_METHOD (supported_methods,
2303 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION);
2304 #undef IS_SUPPORTED_METHOD
2305
2306 if (!blend && !bob && !adaptive && !mocomp)
2307 return;
2308
2309 /* Drop all not supported methods from flags */
2310 supported_methods = supported_methods &
2311 (GST_D3D11_DEINTERLACE_METHOD_BLEND | GST_D3D11_DEINTERLACE_METHOD_BOB |
2312 GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE |
2313 GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION);
2314
2315 /* Prefer bob, it's equivalent to "linear" which is default mode of
2316 * software deinterlace element, also it's fallback mode
2317 * for our "adaptive" and "mocomp" modes. Note that since Direct3D12, "blend"
2318 * mode is no more supported, instead "bob" and "custom" mode are suported
2319 * by Direct3D12 */
2320 if (bob) {
2321 default_method = GST_D3D11_DEINTERLACE_METHOD_BOB;
2322 } else if (adaptive) {
2323 default_method = GST_D3D11_DEINTERLACE_METHOD_ADAPTVIE;
2324 } else if (mocomp) {
2325 default_method = GST_D3D11_DEINTERLACE_METHOD_MOTION_COMPENSATION;
2326 } else if (blend) {
2327 default_method = GST_D3D11_DEINTERLACE_METHOD_BLEND;
2328 } else {
2329 /* Programming error */
2330 g_return_if_reached ();
2331 }
2332
2333 for (i = 0; i < G_N_ELEMENTS (formats_to_check); i++) {
2334 UINT flags = 0;
2335 GValue val = G_VALUE_INIT;
2336 GstVideoFormat format;
2337 BOOL supported = FALSE;
2338
2339 hr = video_proc_enum->CheckVideoProcessorFormat (formats_to_check[i],
2340 &flags);
2341 if (FAILED (hr))
2342 continue;
2343
2344 /* D3D11 video processor can support other conversion at once,
2345 * including color format conversion.
2346 * But not all combinations of in/out pairs can be supported.
2347 * To make things simple, this element will do only deinterlacing
2348 * (might not be optimal in terms of processing power/resource though) */
2349
2350 /* D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_INPUT = 0x1,
2351 * D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_OUTPUT = 0x2,
2352 * MinGW header might not be defining the above enum values */
2353 if ((flags & 0x3) != 0x3)
2354 continue;
2355
2356 format = gst_d3d11_dxgi_format_to_gst (formats_to_check[i]);
2357 /* This is programming error! */
2358 if (format == GST_VIDEO_FORMAT_UNKNOWN) {
2359 GST_ERROR ("Couldn't convert DXGI format %d to video format",
2360 formats_to_check[i]);
2361 continue;
2362 }
2363
2364 hr = video_proc_enum1->CheckVideoProcessorFormatConversion
2365 (formats_to_check[i], DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
2366 formats_to_check[i], DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709,
2367 &supported);
2368 if (FAILED (hr) || !supported)
2369 continue;
2370
2371 if (!supported_formats) {
2372 supported_formats = g_new0 (GValue, 1);
2373 g_value_init (supported_formats, GST_TYPE_LIST);
2374 }
2375
2376 if (formats_to_check[i] == DXGI_FORMAT_P016) {
2377 /* This is used for P012 as well */
2378 g_value_init (&val, G_TYPE_STRING);
2379 g_value_set_static_string (&val,
2380 gst_video_format_to_string (GST_VIDEO_FORMAT_P012_LE));
2381 gst_value_list_append_and_take_value (supported_formats, &val);
2382 }
2383
2384 g_value_init (&val, G_TYPE_STRING);
2385 g_value_set_static_string (&val, gst_video_format_to_string (format));
2386 gst_value_list_append_and_take_value (supported_formats, &val);
2387 }
2388
2389 if (!supported_formats)
2390 return;
2391
2392 caps = gst_caps_new_empty_simple ("video/x-raw");
2393 /* FIXME: Check supported resolution, it would be different from
2394 * supported max texture dimension */
2395 gst_caps_set_simple (caps,
2396 "width", GST_TYPE_INT_RANGE, 1, 8192,
2397 "height", GST_TYPE_INT_RANGE, 1, 8192, NULL);
2398 gst_caps_set_value (caps, "format", supported_formats);
2399 g_value_unset (supported_formats);
2400 g_free (supported_formats);
2401
2402 /* TODO: Add alternating deinterlace */
2403 src_caps = gst_caps_copy (caps);
2404 caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
2405 NULL);
2406 gst_caps_set_features_simple (src_caps, caps_features);
2407
2408 caps_features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY,
2409 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION, NULL);
2410 gst_caps_set_features_simple (caps, caps_features);
2411 gst_caps_append (src_caps, caps);
2412
2413 sink_caps = gst_caps_copy (src_caps);
2414
2415 GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
2416 GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
2417
2418 cdata = gst_d3d11_deinterlace_class_data_new ();
2419 cdata->sink_caps = sink_caps;
2420 cdata->src_caps = src_caps;
2421 cdata->device_caps.supported_methods =
2422 (GstD3D11DeinterlaceMethod) supported_methods;
2423 cdata->device_caps.default_method = default_method;
2424 cdata->device_caps.max_past_frames = max_past_frames;
2425 cdata->device_caps.max_future_frames = max_future_frames;
2426
2427 g_object_get (device, "adapter", &cdata->adapter,
2428 "device-id", &cdata->device_id, "vendor-id", &cdata->vendor_id,
2429 "description", &cdata->description, NULL);
2430 type_info.class_data = cdata;
2431 bin_type_info.class_data = gst_d3d11_deinterlace_class_data_ref (cdata);
2432
2433 type_name = g_strdup ("GstD3D11Deinterlace");
2434 feature_name = g_strdup ("d3d11deinterlaceelement");
2435
2436 while (g_type_from_name (type_name)) {
2437 index++;
2438 g_free (type_name);
2439 g_free (feature_name);
2440 type_name = g_strdup_printf ("GstD3D11Device%dDeinterlace", index);
2441 feature_name = g_strdup_printf ("d3d11device%ddeinterlaceelement", index);
2442 }
2443
2444 type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
2445 type_name, &type_info, (GTypeFlags) 0);
2446 cdata->deinterlace_type = type;
2447
2448 if (index != 0)
2449 gst_element_type_set_skip_documentation (type);
2450
2451 if (!gst_element_register (plugin, feature_name, GST_RANK_NONE, type))
2452 GST_WARNING ("Failed to register plugin '%s'", type_name);
2453
2454 g_free (type_name);
2455 g_free (feature_name);
2456
2457 /* Register wrapper bin */
2458 index = 0;
2459 type_name = g_strdup ("GstD3D11DeinterlaceBin");
2460 feature_name = g_strdup ("d3d11deinterlace");
2461
2462 while (g_type_from_name (type_name)) {
2463 index++;
2464 g_free (type_name);
2465 g_free (feature_name);
2466 type_name = g_strdup_printf ("GstD3D11Device%dDeinterlaceBin", index);
2467 feature_name = g_strdup_printf ("d3d11device%ddeinterlace", index);
2468 }
2469
2470 bin_type = g_type_register_static (GST_TYPE_BIN,
2471 type_name, &bin_type_info, (GTypeFlags) 0);
2472
2473 /* make lower rank than default device */
2474 if (rank > 0 && index != 0)
2475 rank--;
2476
2477 if (index != 0)
2478 gst_element_type_set_skip_documentation (bin_type);
2479
2480 if (!gst_element_register (plugin, feature_name, rank, bin_type))
2481 GST_WARNING ("Failed to register plugin '%s'", type_name);
2482
2483 g_free (type_name);
2484 g_free (feature_name);
2485 }
2486