1 /* GStreamer
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) 2005-2012 David Schleef <ds@schleef.org>
4 * Copyright (C) 2012-2014 Matthew Waters <ystree00@gmail.com>
5 * Copyright (C) <2019> Seungha Yang <seungha.yang@navercorp.com>
6 * Copyright (C) <2019> Jeongki Kim <jeongki.kim@jeongki.kim>
7 * Copyright (C) 2020 Thibault Saunier <tsaunier@igalia.com>
8 *
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
13 *
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
18 *
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
23 */
24
25 #ifdef HAVE_CONFIG_H
26 # include <config.h>
27 #endif
28
29 #include "gstd3d11convert.h"
30 #include "gstd3d11converter.h"
31 #include "gstd3d11videoprocessor.h"
32 #include "gstd3d11pluginutils.h"
33
34 GST_DEBUG_CATEGORY_STATIC (gst_d3d11_convert_debug);
35 #define GST_CAT_DEFAULT gst_d3d11_convert_debug
36
37 static GstStaticCaps sink_template_caps =
38 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
39 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SINK_FORMATS) "; "
40 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
41 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
42 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
43 GST_D3D11_SINK_FORMATS));
44
45 static GstStaticCaps src_template_caps =
46 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES
47 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY, GST_D3D11_SRC_FORMATS) "; "
48 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
49 (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY ","
50 GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
51 GST_D3D11_SRC_FORMATS));
52
53 #define DEFAULT_ADD_BORDERS TRUE
54
55 struct _GstD3D11BaseConvert
56 {
57 GstD3D11BaseFilter parent;
58
59 const GstD3D11Format *in_d3d11_format;
60 const GstD3D11Format *out_d3d11_format;
61
62 ID3D11Texture2D *in_texture[GST_VIDEO_MAX_PLANES];
63 ID3D11ShaderResourceView *shader_resource_view[GST_VIDEO_MAX_PLANES];
64 guint num_input_view;
65
66 ID3D11Texture2D *out_texture[GST_VIDEO_MAX_PLANES];
67 ID3D11RenderTargetView *render_target_view[GST_VIDEO_MAX_PLANES];
68 guint num_output_view;
69
70 GstD3D11Converter *converter;
71 GstD3D11VideoProcessor *processor;
72 gboolean processor_in_use;
73
74 /* used for border rendering */
75 RECT in_rect;
76 RECT out_rect;
77
78 gint borders_h;
79 gint borders_w;
80
81 /* Updated by subclass */
82 gboolean add_borders;
83 };
84
85 /**
86 * GstD3D11BaseConvert:
87 *
88 * A baseclass implementation for d3d11 convert elements
89 *
90 * Since: 1.20
91 */
92 #define gst_d3d11_base_convert_parent_class parent_class
93 G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstD3D11BaseConvert, gst_d3d11_base_convert,
94 GST_TYPE_D3D11_BASE_FILTER,
95 GST_DEBUG_CATEGORY_INIT (gst_d3d11_convert_debug, "d3d11convert", 0,
96 "d3d11convert"));
97
98 static void gst_d3d11_base_convert_dispose (GObject * object);
99 static GstCaps *gst_d3d11_base_convert_transform_caps (GstBaseTransform *
100 trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter);
101 static GstCaps *gst_d3d11_base_convert_fixate_caps (GstBaseTransform *
102 base, GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
103 static gboolean gst_d3d11_base_convert_filter_meta (GstBaseTransform * trans,
104 GstQuery * query, GType api, const GstStructure * params);
105 static gboolean
106 gst_d3d11_base_convert_propose_allocation (GstBaseTransform * trans,
107 GstQuery * decide_query, GstQuery * query);
108 static gboolean
109 gst_d3d11_base_convert_decide_allocation (GstBaseTransform * trans,
110 GstQuery * query);
111
112 static GstFlowReturn gst_d3d11_base_convert_transform (GstBaseTransform *
113 trans, GstBuffer * inbuf, GstBuffer * outbuf);
114 static gboolean gst_d3d11_base_convert_set_info (GstD3D11BaseFilter * filter,
115 GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
116 GstVideoInfo * out_info);
117
118 /* copies the given caps */
119 static GstCaps *
gst_d3d11_base_convert_caps_remove_format_info(GstCaps * caps)120 gst_d3d11_base_convert_caps_remove_format_info (GstCaps * caps)
121 {
122 GstStructure *st;
123 GstCapsFeatures *f;
124 gint i, n;
125 GstCaps *res;
126 GstCapsFeatures *feature =
127 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY);
128
129 res = gst_caps_new_empty ();
130
131 n = gst_caps_get_size (caps);
132 for (i = 0; i < n; i++) {
133 st = gst_caps_get_structure (caps, i);
134 f = gst_caps_get_features (caps, i);
135
136 /* If this is already expressed by the existing caps
137 * skip this structure */
138 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
139 continue;
140
141 st = gst_structure_copy (st);
142 /* Only remove format info for the cases when we can actually convert */
143 if (!gst_caps_features_is_any (f)
144 && gst_caps_features_is_equal (f, feature)) {
145 gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
146 NULL);
147 }
148
149 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
150 }
151 gst_caps_features_free (feature);
152
153 return res;
154 }
155
156 static GstCaps *
gst_d3d11_base_convert_caps_rangify_size_info(GstCaps * caps)157 gst_d3d11_base_convert_caps_rangify_size_info (GstCaps * caps)
158 {
159 GstStructure *st;
160 GstCapsFeatures *f;
161 gint i, n;
162 GstCaps *res;
163 GstCapsFeatures *feature =
164 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY);
165
166 res = gst_caps_new_empty ();
167
168 n = gst_caps_get_size (caps);
169 for (i = 0; i < n; i++) {
170 st = gst_caps_get_structure (caps, i);
171 f = gst_caps_get_features (caps, i);
172
173 /* If this is already expressed by the existing caps
174 * skip this structure */
175 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
176 continue;
177
178 st = gst_structure_copy (st);
179 /* Only remove format info for the cases when we can actually convert */
180 if (!gst_caps_features_is_any (f)
181 && gst_caps_features_is_equal (f, feature)) {
182 gst_structure_set (st, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
183 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
184
185 /* if pixel aspect ratio, make a range of it */
186 if (gst_structure_has_field (st, "pixel-aspect-ratio")) {
187 gst_structure_set (st, "pixel-aspect-ratio",
188 GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL);
189 }
190 }
191
192 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
193 }
194 gst_caps_features_free (feature);
195
196 return res;
197 }
198
199 static GstCaps *
gst_d3d11_base_convert_caps_remove_format_and_rangify_size_info(GstCaps * caps)200 gst_d3d11_base_convert_caps_remove_format_and_rangify_size_info (GstCaps * caps)
201 {
202 GstStructure *st;
203 GstCapsFeatures *f;
204 gint i, n;
205 GstCaps *res;
206 GstCapsFeatures *feature =
207 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_D3D11_MEMORY);
208
209 res = gst_caps_new_empty ();
210
211 n = gst_caps_get_size (caps);
212 for (i = 0; i < n; i++) {
213 st = gst_caps_get_structure (caps, i);
214 f = gst_caps_get_features (caps, i);
215
216 /* If this is already expressed by the existing caps
217 * skip this structure */
218 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
219 continue;
220
221 st = gst_structure_copy (st);
222 /* Only remove format info for the cases when we can actually convert */
223 if (!gst_caps_features_is_any (f)
224 && gst_caps_features_is_equal (f, feature)) {
225 gst_structure_set (st, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
226 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
227 /* if pixel aspect ratio, make a range of it */
228 if (gst_structure_has_field (st, "pixel-aspect-ratio")) {
229 gst_structure_set (st, "pixel-aspect-ratio",
230 GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL);
231 }
232 gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
233 NULL);
234 }
235
236 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
237 }
238 gst_caps_features_free (feature);
239
240 return res;
241 }
242
243 static void
gst_d3d11_base_convert_class_init(GstD3D11BaseConvertClass * klass)244 gst_d3d11_base_convert_class_init (GstD3D11BaseConvertClass * klass)
245 {
246 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
247 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
248 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
249 GstD3D11BaseFilterClass *bfilter_class = GST_D3D11_BASE_FILTER_CLASS (klass);
250 GstCaps *caps;
251
252 gobject_class->dispose = gst_d3d11_base_convert_dispose;
253
254 caps = gst_d3d11_get_updated_template_caps (&sink_template_caps);
255 gst_element_class_add_pad_template (element_class,
256 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps));
257 gst_caps_unref (caps);
258
259 caps = gst_d3d11_get_updated_template_caps (&src_template_caps);
260 gst_element_class_add_pad_template (element_class,
261 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps));
262 gst_caps_unref (caps);
263
264 trans_class->passthrough_on_same_caps = TRUE;
265
266 trans_class->transform_caps =
267 GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_transform_caps);
268 trans_class->fixate_caps =
269 GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_fixate_caps);
270 trans_class->filter_meta =
271 GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_filter_meta);
272 trans_class->propose_allocation =
273 GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_propose_allocation);
274 trans_class->decide_allocation =
275 GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_decide_allocation);
276 trans_class->transform = GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_transform);
277
278 bfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_d3d11_base_convert_set_info);
279
280 gst_type_mark_as_plugin_api (GST_TYPE_D3D11_BASE_CONVERT,
281 (GstPluginAPIFlags) 0);
282 }
283
284 static void
gst_d3d11_base_convert_init(GstD3D11BaseConvert * self)285 gst_d3d11_base_convert_init (GstD3D11BaseConvert * self)
286 {
287 self->add_borders = DEFAULT_ADD_BORDERS;
288 }
289
290 static void
gst_d3d11_base_convert_clear_shader_resource(GstD3D11BaseConvert * self)291 gst_d3d11_base_convert_clear_shader_resource (GstD3D11BaseConvert * self)
292 {
293 gint i;
294
295 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
296 GST_D3D11_CLEAR_COM (self->shader_resource_view[i]);
297 GST_D3D11_CLEAR_COM (self->render_target_view[i]);
298 }
299
300 self->num_input_view = 0;
301 self->num_output_view = 0;
302
303 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
304 GST_D3D11_CLEAR_COM (self->in_texture[i]);
305 GST_D3D11_CLEAR_COM (self->out_texture[i]);
306 }
307
308 g_clear_pointer (&self->converter, gst_d3d11_converter_free);
309 g_clear_pointer (&self->processor, gst_d3d11_video_processor_free);
310
311 self->processor_in_use = FALSE;
312 }
313
314 static void
gst_d3d11_base_convert_dispose(GObject * object)315 gst_d3d11_base_convert_dispose (GObject * object)
316 {
317 GstD3D11BaseConvert *self = GST_D3D11_BASE_CONVERT (object);
318
319 gst_d3d11_base_convert_clear_shader_resource (self);
320
321 G_OBJECT_CLASS (parent_class)->dispose (object);
322 }
323
324 static GstCaps *
gst_d3d11_base_convert_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)325 gst_d3d11_base_convert_transform_caps (GstBaseTransform *
326 trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter)
327 {
328 GstCaps *tmp, *tmp2;
329 GstCaps *result;
330
331 /* Get all possible caps that we can transform to */
332 tmp = gst_d3d11_base_convert_caps_remove_format_and_rangify_size_info (caps);
333
334 if (filter) {
335 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
336 gst_caps_unref (tmp);
337 tmp = tmp2;
338 }
339
340 result = tmp;
341
342 GST_DEBUG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " into %"
343 GST_PTR_FORMAT, caps, result);
344
345 return result;
346 }
347
348 /*
349 * This is an incomplete matrix of in formats and a score for the prefered output
350 * format.
351 *
352 * out: RGB24 RGB16 ARGB AYUV YUV444 YUV422 YUV420 YUV411 YUV410 PAL GRAY
353 * in
354 * RGB24 0 2 1 2 2 3 4 5 6 7 8
355 * RGB16 1 0 1 2 2 3 4 5 6 7 8
356 * ARGB 2 3 0 1 4 5 6 7 8 9 10
357 * AYUV 3 4 1 0 2 5 6 7 8 9 10
358 * YUV444 2 4 3 1 0 5 6 7 8 9 10
359 * YUV422 3 5 4 2 1 0 6 7 8 9 10
360 * YUV420 4 6 5 3 2 1 0 7 8 9 10
361 * YUV411 4 6 5 3 2 1 7 0 8 9 10
362 * YUV410 6 8 7 5 4 3 2 1 0 9 10
363 * PAL 1 3 2 6 4 6 7 8 9 0 10
364 * GRAY 1 4 3 2 1 5 6 7 8 9 0
365 *
366 * PAL or GRAY are never prefered, if we can we would convert to PAL instead
367 * of GRAY, though
368 * less subsampling is prefered and if any, preferably horizontal
369 * We would like to keep the alpha, even if we would need to to colorspace conversion
370 * or lose depth.
371 */
372 #define SCORE_FORMAT_CHANGE 1
373 #define SCORE_DEPTH_CHANGE 1
374 #define SCORE_ALPHA_CHANGE 1
375 #define SCORE_CHROMA_W_CHANGE 1
376 #define SCORE_CHROMA_H_CHANGE 1
377 #define SCORE_PALETTE_CHANGE 1
378
379 #define SCORE_COLORSPACE_LOSS 2 /* RGB <-> YUV */
380 #define SCORE_DEPTH_LOSS 4 /* change bit depth */
381 #define SCORE_ALPHA_LOSS 8 /* lose the alpha channel */
382 #define SCORE_CHROMA_W_LOSS 16 /* vertical subsample */
383 #define SCORE_CHROMA_H_LOSS 32 /* horizontal subsample */
384 #define SCORE_PALETTE_LOSS 64 /* convert to palette format */
385 #define SCORE_COLOR_LOSS 128 /* convert to GRAY */
386
387 #define COLORSPACE_MASK (GST_VIDEO_FORMAT_FLAG_YUV | \
388 GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_GRAY)
389 #define ALPHA_MASK (GST_VIDEO_FORMAT_FLAG_ALPHA)
390 #define PALETTE_MASK (GST_VIDEO_FORMAT_FLAG_PALETTE)
391
392 /* calculate how much loss a conversion would be */
393 static void
score_value(GstBaseTransform * base,const GstVideoFormatInfo * in_info,const GValue * val,gint * min_loss,const GstVideoFormatInfo ** out_info)394 score_value (GstBaseTransform * base, const GstVideoFormatInfo * in_info,
395 const GValue * val, gint * min_loss, const GstVideoFormatInfo ** out_info)
396 {
397 const gchar *fname;
398 const GstVideoFormatInfo *t_info;
399 guint in_flags, t_flags;
400 gint loss;
401
402 fname = g_value_get_string (val);
403 t_info = gst_video_format_get_info (gst_video_format_from_string (fname));
404 if (!t_info || t_info->format == GST_VIDEO_FORMAT_UNKNOWN)
405 return;
406
407 /* accept input format immediately without loss */
408 if (in_info == t_info) {
409 *min_loss = 0;
410 *out_info = t_info;
411 return;
412 }
413
414 loss = SCORE_FORMAT_CHANGE;
415
416 in_flags = GST_VIDEO_FORMAT_INFO_FLAGS (in_info);
417 in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
418 in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
419 in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
420
421 t_flags = GST_VIDEO_FORMAT_INFO_FLAGS (t_info);
422 t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
423 t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
424 t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
425
426 if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) {
427 loss += SCORE_PALETTE_CHANGE;
428 if (t_flags & PALETTE_MASK)
429 loss += SCORE_PALETTE_LOSS;
430 }
431
432 if ((t_flags & COLORSPACE_MASK) != (in_flags & COLORSPACE_MASK)) {
433 loss += SCORE_COLORSPACE_LOSS;
434 if (t_flags & GST_VIDEO_FORMAT_FLAG_GRAY)
435 loss += SCORE_COLOR_LOSS;
436 }
437
438 if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) {
439 loss += SCORE_ALPHA_CHANGE;
440 if (in_flags & ALPHA_MASK)
441 loss += SCORE_ALPHA_LOSS;
442 }
443
444 if ((in_info->h_sub[1]) != (t_info->h_sub[1])) {
445 loss += SCORE_CHROMA_H_CHANGE;
446 if ((in_info->h_sub[1]) < (t_info->h_sub[1]))
447 loss += SCORE_CHROMA_H_LOSS;
448 }
449 if ((in_info->w_sub[1]) != (t_info->w_sub[1])) {
450 loss += SCORE_CHROMA_W_CHANGE;
451 if ((in_info->w_sub[1]) < (t_info->w_sub[1]))
452 loss += SCORE_CHROMA_W_LOSS;
453 }
454
455 if ((in_info->bits) != (t_info->bits)) {
456 loss += SCORE_DEPTH_CHANGE;
457 if ((in_info->bits) > (t_info->bits))
458 loss += SCORE_DEPTH_LOSS + (in_info->bits - t_info->bits);
459 }
460
461 GST_DEBUG_OBJECT (base, "score %s -> %s = %d",
462 GST_VIDEO_FORMAT_INFO_NAME (in_info),
463 GST_VIDEO_FORMAT_INFO_NAME (t_info), loss);
464
465 if (loss < *min_loss) {
466 GST_DEBUG_OBJECT (base, "found new best %d", loss);
467 *out_info = t_info;
468 *min_loss = loss;
469 }
470 }
471
472 static void
gst_d3d11_base_convert_fixate_format(GstBaseTransform * trans,GstCaps * caps,GstCaps * result)473 gst_d3d11_base_convert_fixate_format (GstBaseTransform * trans,
474 GstCaps * caps, GstCaps * result)
475 {
476 GstStructure *ins, *outs;
477 const gchar *in_format;
478 const GstVideoFormatInfo *in_info, *out_info = NULL;
479 gint min_loss = G_MAXINT;
480 guint i, capslen;
481
482 ins = gst_caps_get_structure (caps, 0);
483 in_format = gst_structure_get_string (ins, "format");
484 if (!in_format) {
485 return;
486 }
487
488 GST_DEBUG_OBJECT (trans, "source format %s", in_format);
489
490 in_info =
491 gst_video_format_get_info (gst_video_format_from_string (in_format));
492 if (!in_info)
493 return;
494
495 outs = gst_caps_get_structure (result, 0);
496
497 capslen = gst_caps_get_size (result);
498 GST_DEBUG ("iterate %d structures", capslen);
499 for (i = 0; i < capslen; i++) {
500 GstStructure *tests;
501 const GValue *format;
502
503 tests = gst_caps_get_structure (result, i);
504 format = gst_structure_get_value (tests, "format");
505
506 /* should not happen */
507 if (format == NULL)
508 continue;
509
510 if (GST_VALUE_HOLDS_LIST (format)) {
511 gint j, len;
512
513 len = gst_value_list_get_size (format);
514 GST_DEBUG_OBJECT (trans, "have %d formats", len);
515 for (j = 0; j < len; j++) {
516 const GValue *val;
517
518 val = gst_value_list_get_value (format, j);
519 if (G_VALUE_HOLDS_STRING (val)) {
520 score_value (trans, in_info, val, &min_loss, &out_info);
521 if (min_loss == 0)
522 break;
523 }
524 }
525 } else if (G_VALUE_HOLDS_STRING (format)) {
526 score_value (trans, in_info, format, &min_loss, &out_info);
527 }
528 }
529 if (out_info)
530 gst_structure_set (outs, "format", G_TYPE_STRING,
531 GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL);
532 }
533
534 static gboolean
subsampling_unchanged(GstVideoInfo * in_info,GstVideoInfo * out_info)535 subsampling_unchanged (GstVideoInfo * in_info, GstVideoInfo * out_info)
536 {
537 guint i;
538 const GstVideoFormatInfo *in_format, *out_format;
539
540 if (GST_VIDEO_INFO_N_COMPONENTS (in_info) !=
541 GST_VIDEO_INFO_N_COMPONENTS (out_info))
542 return FALSE;
543
544 in_format = in_info->finfo;
545 out_format = out_info->finfo;
546
547 for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (in_info); i++) {
548 if (GST_VIDEO_FORMAT_INFO_W_SUB (in_format,
549 i) != GST_VIDEO_FORMAT_INFO_W_SUB (out_format, i))
550 return FALSE;
551 if (GST_VIDEO_FORMAT_INFO_H_SUB (in_format,
552 i) != GST_VIDEO_FORMAT_INFO_H_SUB (out_format, i))
553 return FALSE;
554 }
555
556 return TRUE;
557 }
558
559 static void
transfer_colorimetry_from_input(GstBaseTransform * trans,GstCaps * in_caps,GstCaps * out_caps)560 transfer_colorimetry_from_input (GstBaseTransform * trans, GstCaps * in_caps,
561 GstCaps * out_caps)
562 {
563 GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
564 GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
565 gboolean have_colorimetry =
566 gst_structure_has_field (out_caps_s, "colorimetry");
567 gboolean have_chroma_site =
568 gst_structure_has_field (out_caps_s, "chroma-site");
569
570 /* If the output already has colorimetry and chroma-site, stop,
571 * otherwise try and transfer what we can from the input caps */
572 if (have_colorimetry && have_chroma_site)
573 return;
574
575 {
576 GstVideoInfo in_info, out_info;
577 const GValue *in_colorimetry =
578 gst_structure_get_value (in_caps_s, "colorimetry");
579
580 if (!gst_video_info_from_caps (&in_info, in_caps)) {
581 GST_WARNING_OBJECT (trans,
582 "Failed to convert sink pad caps to video info");
583 return;
584 }
585 if (!gst_video_info_from_caps (&out_info, out_caps)) {
586 GST_WARNING_OBJECT (trans,
587 "Failed to convert src pad caps to video info");
588 return;
589 }
590
591 if (!have_colorimetry && in_colorimetry != NULL) {
592 if ((GST_VIDEO_INFO_IS_YUV (&out_info)
593 && GST_VIDEO_INFO_IS_YUV (&in_info))
594 || (GST_VIDEO_INFO_IS_RGB (&out_info)
595 && GST_VIDEO_INFO_IS_RGB (&in_info))
596 || (GST_VIDEO_INFO_IS_GRAY (&out_info)
597 && GST_VIDEO_INFO_IS_GRAY (&in_info))) {
598 /* Can transfer the colorimetry intact from the input if it has it */
599 gst_structure_set_value (out_caps_s, "colorimetry", in_colorimetry);
600 } else {
601 gchar *colorimetry_str;
602
603 /* Changing between YUV/RGB - forward primaries and transfer function, but use
604 * default range and matrix.
605 * the primaries is used for conversion between RGB and XYZ (CIE 1931 coordinate).
606 * the transfer function could be another reference (e.g., HDR)
607 */
608 out_info.colorimetry.primaries = in_info.colorimetry.primaries;
609 out_info.colorimetry.transfer = in_info.colorimetry.transfer;
610
611 colorimetry_str =
612 gst_video_colorimetry_to_string (&out_info.colorimetry);
613 gst_caps_set_simple (out_caps, "colorimetry", G_TYPE_STRING,
614 colorimetry_str, NULL);
615 g_free (colorimetry_str);
616 }
617 }
618
619 /* Only YUV output needs chroma-site. If the input was also YUV and had the same chroma
620 * subsampling, transfer the siting. If the sub-sampling is changing, then the planes get
621 * scaled anyway so there's no real reason to prefer the input siting. */
622 if (!have_chroma_site && GST_VIDEO_INFO_IS_YUV (&out_info)) {
623 if (GST_VIDEO_INFO_IS_YUV (&in_info)) {
624 const GValue *in_chroma_site =
625 gst_structure_get_value (in_caps_s, "chroma-site");
626 if (in_chroma_site != NULL
627 && subsampling_unchanged (&in_info, &out_info))
628 gst_structure_set_value (out_caps_s, "chroma-site", in_chroma_site);
629 }
630 }
631 }
632 }
633
634 static GstCaps *
gst_d3d11_base_convert_get_fixed_format(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)635 gst_d3d11_base_convert_get_fixed_format (GstBaseTransform * trans,
636 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
637 {
638 GstCaps *result;
639
640 result = gst_caps_intersect (othercaps, caps);
641 if (gst_caps_is_empty (result)) {
642 gst_caps_unref (result);
643 result = gst_caps_copy (othercaps);
644 }
645
646 gst_d3d11_base_convert_fixate_format (trans, caps, result);
647
648 /* fixate remaining fields */
649 result = gst_caps_fixate (result);
650
651 if (direction == GST_PAD_SINK) {
652 if (gst_caps_is_subset (caps, result)) {
653 gst_caps_replace (&result, caps);
654 } else {
655 /* Try and preserve input colorimetry / chroma information */
656 transfer_colorimetry_from_input (trans, caps, result);
657 }
658 }
659
660 return result;
661 }
662
663 static GstCaps *
gst_d3d11_base_convert_fixate_size(GstBaseTransform * base,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)664 gst_d3d11_base_convert_fixate_size (GstBaseTransform * base,
665 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
666 {
667 GstStructure *ins, *outs;
668 const GValue *from_par, *to_par;
669 GValue fpar = G_VALUE_INIT, tpar = G_VALUE_INIT;
670
671 othercaps = gst_caps_truncate (othercaps);
672 othercaps = gst_caps_make_writable (othercaps);
673 ins = gst_caps_get_structure (caps, 0);
674 outs = gst_caps_get_structure (othercaps, 0);
675
676 from_par = gst_structure_get_value (ins, "pixel-aspect-ratio");
677 to_par = gst_structure_get_value (outs, "pixel-aspect-ratio");
678
679 /* If we're fixating from the sinkpad we always set the PAR and
680 * assume that missing PAR on the sinkpad means 1/1 and
681 * missing PAR on the srcpad means undefined
682 */
683 if (direction == GST_PAD_SINK) {
684 if (!from_par) {
685 g_value_init (&fpar, GST_TYPE_FRACTION);
686 gst_value_set_fraction (&fpar, 1, 1);
687 from_par = &fpar;
688 }
689 if (!to_par) {
690 g_value_init (&tpar, GST_TYPE_FRACTION_RANGE);
691 gst_value_set_fraction_range_full (&tpar, 1, G_MAXINT, G_MAXINT, 1);
692 to_par = &tpar;
693 }
694 } else {
695 if (!to_par) {
696 g_value_init (&tpar, GST_TYPE_FRACTION);
697 gst_value_set_fraction (&tpar, 1, 1);
698 to_par = &tpar;
699
700 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
701 NULL);
702 }
703 if (!from_par) {
704 g_value_init (&fpar, GST_TYPE_FRACTION);
705 gst_value_set_fraction (&fpar, 1, 1);
706 from_par = &fpar;
707 }
708 }
709
710 /* we have both PAR but they might not be fixated */
711 {
712 gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d;
713 gint w = 0, h = 0;
714 gint from_dar_n, from_dar_d;
715 gint num, den;
716
717 /* from_par should be fixed */
718 g_return_val_if_fail (gst_value_is_fixed (from_par), othercaps);
719
720 from_par_n = gst_value_get_fraction_numerator (from_par);
721 from_par_d = gst_value_get_fraction_denominator (from_par);
722
723 gst_structure_get_int (ins, "width", &from_w);
724 gst_structure_get_int (ins, "height", &from_h);
725
726 gst_structure_get_int (outs, "width", &w);
727 gst_structure_get_int (outs, "height", &h);
728
729 /* if both width and height are already fixed, we can't do anything
730 * about it anymore */
731 if (w && h) {
732 guint n, d;
733
734 GST_DEBUG_OBJECT (base, "dimensions already set to %dx%d, not fixating",
735 w, h);
736 if (!gst_value_is_fixed (to_par)) {
737 if (gst_video_calculate_display_ratio (&n, &d, from_w, from_h,
738 from_par_n, from_par_d, w, h)) {
739 GST_DEBUG_OBJECT (base, "fixating to_par to %dx%d", n, d);
740 if (gst_structure_has_field (outs, "pixel-aspect-ratio"))
741 gst_structure_fixate_field_nearest_fraction (outs,
742 "pixel-aspect-ratio", n, d);
743 else if (n != d)
744 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
745 n, d, NULL);
746 }
747 }
748 goto done;
749 }
750
751 /* Calculate input DAR */
752 if (!gst_util_fraction_multiply (from_w, from_h, from_par_n, from_par_d,
753 &from_dar_n, &from_dar_d)) {
754 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
755 ("Error calculating the output scaled size - integer overflow"));
756 goto done;
757 }
758
759 GST_DEBUG_OBJECT (base, "Input DAR is %d/%d", from_dar_n, from_dar_d);
760
761 /* If either width or height are fixed there's not much we
762 * can do either except choosing a height or width and PAR
763 * that matches the DAR as good as possible
764 */
765 if (h) {
766 GstStructure *tmp;
767 gint set_w, set_par_n, set_par_d;
768
769 GST_DEBUG_OBJECT (base, "height is fixed (%d)", h);
770
771 /* If the PAR is fixed too, there's not much to do
772 * except choosing the width that is nearest to the
773 * width with the same DAR */
774 if (gst_value_is_fixed (to_par)) {
775 to_par_n = gst_value_get_fraction_numerator (to_par);
776 to_par_d = gst_value_get_fraction_denominator (to_par);
777
778 GST_DEBUG_OBJECT (base, "PAR is fixed %d/%d", to_par_n, to_par_d);
779
780 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d,
781 to_par_n, &num, &den)) {
782 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
783 ("Error calculating the output scaled size - integer overflow"));
784 goto done;
785 }
786
787 w = (guint) gst_util_uint64_scale_int_round (h, num, den);
788 gst_structure_fixate_field_nearest_int (outs, "width", w);
789
790 goto done;
791 }
792
793 /* The PAR is not fixed and it's quite likely that we can set
794 * an arbitrary PAR. */
795
796 /* Check if we can keep the input width */
797 tmp = gst_structure_copy (outs);
798 gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
799 gst_structure_get_int (tmp, "width", &set_w);
800
801 /* Might have failed but try to keep the DAR nonetheless by
802 * adjusting the PAR */
803 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, h, set_w,
804 &to_par_n, &to_par_d)) {
805 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
806 ("Error calculating the output scaled size - integer overflow"));
807 gst_structure_free (tmp);
808 goto done;
809 }
810
811 if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
812 gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
813 gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
814 to_par_n, to_par_d);
815 gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
816 &set_par_d);
817 gst_structure_free (tmp);
818
819 /* Check if the adjusted PAR is accepted */
820 if (set_par_n == to_par_n && set_par_d == to_par_d) {
821 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
822 set_par_n != set_par_d)
823 gst_structure_set (outs, "width", G_TYPE_INT, set_w,
824 "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d,
825 NULL);
826 goto done;
827 }
828
829 /* Otherwise scale the width to the new PAR and check if the
830 * adjusted with is accepted. If all that fails we can't keep
831 * the DAR */
832 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
833 set_par_n, &num, &den)) {
834 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
835 ("Error calculating the output scaled size - integer overflow"));
836 goto done;
837 }
838
839 w = (guint) gst_util_uint64_scale_int_round (h, num, den);
840 gst_structure_fixate_field_nearest_int (outs, "width", w);
841 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
842 set_par_n != set_par_d)
843 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
844 set_par_n, set_par_d, NULL);
845
846 goto done;
847 } else if (w) {
848 GstStructure *tmp;
849 gint set_h, set_par_n, set_par_d;
850
851 GST_DEBUG_OBJECT (base, "width is fixed (%d)", w);
852
853 /* If the PAR is fixed too, there's not much to do
854 * except choosing the height that is nearest to the
855 * height with the same DAR */
856 if (gst_value_is_fixed (to_par)) {
857 to_par_n = gst_value_get_fraction_numerator (to_par);
858 to_par_d = gst_value_get_fraction_denominator (to_par);
859
860 GST_DEBUG_OBJECT (base, "PAR is fixed %d/%d", to_par_n, to_par_d);
861
862 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d,
863 to_par_n, &num, &den)) {
864 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
865 ("Error calculating the output scaled size - integer overflow"));
866 goto done;
867 }
868
869 h = (guint) gst_util_uint64_scale_int_round (w, den, num);
870 gst_structure_fixate_field_nearest_int (outs, "height", h);
871
872 goto done;
873 }
874
875 /* The PAR is not fixed and it's quite likely that we can set
876 * an arbitrary PAR. */
877
878 /* Check if we can keep the input height */
879 tmp = gst_structure_copy (outs);
880 gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
881 gst_structure_get_int (tmp, "height", &set_h);
882
883 /* Might have failed but try to keep the DAR nonetheless by
884 * adjusting the PAR */
885 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, w,
886 &to_par_n, &to_par_d)) {
887 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
888 ("Error calculating the output scaled size - integer overflow"));
889 gst_structure_free (tmp);
890 goto done;
891 }
892 if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
893 gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
894 gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
895 to_par_n, to_par_d);
896 gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
897 &set_par_d);
898 gst_structure_free (tmp);
899
900 /* Check if the adjusted PAR is accepted */
901 if (set_par_n == to_par_n && set_par_d == to_par_d) {
902 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
903 set_par_n != set_par_d)
904 gst_structure_set (outs, "height", G_TYPE_INT, set_h,
905 "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d,
906 NULL);
907 goto done;
908 }
909
910 /* Otherwise scale the height to the new PAR and check if the
911 * adjusted with is accepted. If all that fails we can't keep
912 * the DAR */
913 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
914 set_par_n, &num, &den)) {
915 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
916 ("Error calculating the output scale sized - integer overflow"));
917 goto done;
918 }
919
920 h = (guint) gst_util_uint64_scale_int_round (w, den, num);
921 gst_structure_fixate_field_nearest_int (outs, "height", h);
922 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
923 set_par_n != set_par_d)
924 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
925 set_par_n, set_par_d, NULL);
926
927 goto done;
928 } else if (gst_value_is_fixed (to_par)) {
929 GstStructure *tmp;
930 gint set_h, set_w, f_h, f_w;
931
932 to_par_n = gst_value_get_fraction_numerator (to_par);
933 to_par_d = gst_value_get_fraction_denominator (to_par);
934
935 /* Calculate scale factor for the PAR change */
936 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_n,
937 to_par_d, &num, &den)) {
938 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
939 ("Error calculating the output scaled size - integer overflow"));
940 goto done;
941 }
942
943 /* Try to keep the input height (because of interlacing) */
944 tmp = gst_structure_copy (outs);
945 gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
946 gst_structure_get_int (tmp, "height", &set_h);
947
948 /* This might have failed but try to scale the width
949 * to keep the DAR nonetheless */
950 w = (guint) gst_util_uint64_scale_int_round (set_h, num, den);
951 gst_structure_fixate_field_nearest_int (tmp, "width", w);
952 gst_structure_get_int (tmp, "width", &set_w);
953 gst_structure_free (tmp);
954
955 /* We kept the DAR and the height is nearest to the original height */
956 if (set_w == w) {
957 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
958 G_TYPE_INT, set_h, NULL);
959 goto done;
960 }
961
962 f_h = set_h;
963 f_w = set_w;
964
965 /* If the former failed, try to keep the input width at least */
966 tmp = gst_structure_copy (outs);
967 gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
968 gst_structure_get_int (tmp, "width", &set_w);
969
970 /* This might have failed but try to scale the width
971 * to keep the DAR nonetheless */
972 h = (guint) gst_util_uint64_scale_int_round (set_w, den, num);
973 gst_structure_fixate_field_nearest_int (tmp, "height", h);
974 gst_structure_get_int (tmp, "height", &set_h);
975 gst_structure_free (tmp);
976
977 /* We kept the DAR and the width is nearest to the original width */
978 if (set_h == h) {
979 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
980 G_TYPE_INT, set_h, NULL);
981 goto done;
982 }
983
984 /* If all this failed, keep the dimensions with the DAR that was closest
985 * to the correct DAR. This changes the DAR but there's not much else to
986 * do here.
987 */
988 if (set_w * ABS (set_h - h) < ABS (f_w - w) * f_h) {
989 f_h = set_h;
990 f_w = set_w;
991 }
992 gst_structure_set (outs, "width", G_TYPE_INT, f_w, "height", G_TYPE_INT,
993 f_h, NULL);
994 goto done;
995 } else {
996 GstStructure *tmp;
997 gint set_h, set_w, set_par_n, set_par_d, tmp2;
998
999 /* width, height and PAR are not fixed but passthrough is not possible */
1000
1001 /* First try to keep the height and width as good as possible
1002 * and scale PAR */
1003 tmp = gst_structure_copy (outs);
1004 gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
1005 gst_structure_get_int (tmp, "height", &set_h);
1006 gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
1007 gst_structure_get_int (tmp, "width", &set_w);
1008
1009 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, set_w,
1010 &to_par_n, &to_par_d)) {
1011 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
1012 ("Error calculating the output scaled size - integer overflow"));
1013 gst_structure_free (tmp);
1014 goto done;
1015 }
1016
1017 if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
1018 gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
1019 gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
1020 to_par_n, to_par_d);
1021 gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
1022 &set_par_d);
1023 gst_structure_free (tmp);
1024
1025 if (set_par_n == to_par_n && set_par_d == to_par_d) {
1026 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1027 G_TYPE_INT, set_h, NULL);
1028
1029 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1030 set_par_n != set_par_d)
1031 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1032 set_par_n, set_par_d, NULL);
1033 goto done;
1034 }
1035
1036 /* Otherwise try to scale width to keep the DAR with the set
1037 * PAR and height */
1038 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
1039 set_par_n, &num, &den)) {
1040 GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL),
1041 ("Error calculating the output scaled size - integer overflow"));
1042 goto done;
1043 }
1044
1045 w = (guint) gst_util_uint64_scale_int_round (set_h, num, den);
1046 tmp = gst_structure_copy (outs);
1047 gst_structure_fixate_field_nearest_int (tmp, "width", w);
1048 gst_structure_get_int (tmp, "width", &tmp2);
1049 gst_structure_free (tmp);
1050
1051 if (tmp2 == w) {
1052 gst_structure_set (outs, "width", G_TYPE_INT, tmp2, "height",
1053 G_TYPE_INT, set_h, NULL);
1054 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1055 set_par_n != set_par_d)
1056 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1057 set_par_n, set_par_d, NULL);
1058 goto done;
1059 }
1060
1061 /* ... or try the same with the height */
1062 h = (guint) gst_util_uint64_scale_int_round (set_w, den, num);
1063 tmp = gst_structure_copy (outs);
1064 gst_structure_fixate_field_nearest_int (tmp, "height", h);
1065 gst_structure_get_int (tmp, "height", &tmp2);
1066 gst_structure_free (tmp);
1067
1068 if (tmp2 == h) {
1069 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1070 G_TYPE_INT, tmp2, NULL);
1071 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1072 set_par_n != set_par_d)
1073 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1074 set_par_n, set_par_d, NULL);
1075 goto done;
1076 }
1077
1078 /* If all fails we can't keep the DAR and take the nearest values
1079 * for everything from the first try */
1080 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1081 G_TYPE_INT, set_h, NULL);
1082 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1083 set_par_n != set_par_d)
1084 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1085 set_par_n, set_par_d, NULL);
1086 }
1087 }
1088
1089 done:
1090 if (from_par == &fpar)
1091 g_value_unset (&fpar);
1092 if (to_par == &tpar)
1093 g_value_unset (&tpar);
1094
1095 return othercaps;
1096 }
1097
1098 static GstCaps *
gst_d3d11_base_convert_fixate_caps(GstBaseTransform * base,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)1099 gst_d3d11_base_convert_fixate_caps (GstBaseTransform * base,
1100 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
1101 {
1102 GstCaps *format = NULL;
1103
1104 GST_DEBUG_OBJECT (base,
1105 "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %"
1106 GST_PTR_FORMAT, othercaps, caps);
1107
1108 format = gst_d3d11_base_convert_get_fixed_format (base, direction, caps,
1109 othercaps);
1110
1111 if (gst_caps_is_empty (format)) {
1112 GST_ERROR_OBJECT (base, "Could not convert formats");
1113 return format;
1114 }
1115
1116 /* convert mode is "all" or "size" here */
1117 othercaps =
1118 gst_d3d11_base_convert_fixate_size (base, direction, caps, othercaps);
1119
1120 if (gst_caps_get_size (othercaps) == 1) {
1121 guint i;
1122 const gchar *format_fields[] = { "format", "colorimetry", "chroma-site" };
1123 GstStructure *format_struct = gst_caps_get_structure (format, 0);
1124 GstStructure *fixated_struct;
1125
1126 othercaps = gst_caps_make_writable (othercaps);
1127 fixated_struct = gst_caps_get_structure (othercaps, 0);
1128
1129 for (i = 0; i < G_N_ELEMENTS (format_fields); i++) {
1130 if (gst_structure_has_field (format_struct, format_fields[i])) {
1131 gst_structure_set (fixated_struct, format_fields[i], G_TYPE_STRING,
1132 gst_structure_get_string (format_struct, format_fields[i]), NULL);
1133 } else {
1134 gst_structure_remove_field (fixated_struct, format_fields[i]);
1135 }
1136 }
1137 }
1138 gst_caps_unref (format);
1139
1140 GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps);
1141
1142 return othercaps;
1143 }
1144
1145 static gboolean
gst_d3d11_base_convert_filter_meta(GstBaseTransform * trans,GstQuery * query,GType api,const GstStructure * params)1146 gst_d3d11_base_convert_filter_meta (GstBaseTransform * trans,
1147 GstQuery * query, GType api, const GstStructure * params)
1148 {
1149 /* This element cannot passthrough the crop meta, because it would convert the
1150 * wrong sub-region of the image, and worst, our output image may not be large
1151 * enough for the crop to be applied later */
1152 if (api == GST_VIDEO_CROP_META_API_TYPE)
1153 return FALSE;
1154
1155 /* propose all other metadata upstream */
1156 return TRUE;
1157 }
1158
1159 static gboolean
gst_d3d11_base_convert_propose_allocation(GstBaseTransform * trans,GstQuery * decide_query,GstQuery * query)1160 gst_d3d11_base_convert_propose_allocation (GstBaseTransform * trans,
1161 GstQuery * decide_query, GstQuery * query)
1162 {
1163 GstD3D11BaseFilter *filter = GST_D3D11_BASE_FILTER (trans);
1164 GstVideoInfo info;
1165 GstBufferPool *pool = NULL;
1166 GstCaps *caps;
1167 guint n_pools, i;
1168 GstStructure *config;
1169 guint size;
1170 GstD3D11AllocationParams *d3d11_params;
1171 const GstD3D11Format *d3d11_format;
1172 guint bind_flags = D3D11_BIND_SHADER_RESOURCE;
1173 DXGI_FORMAT dxgi_format = DXGI_FORMAT_UNKNOWN;
1174 UINT supported = 0;
1175 HRESULT hr;
1176 ID3D11Device *device_handle;
1177
1178 if (!GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
1179 decide_query, query))
1180 return FALSE;
1181
1182 /* passthrough, we're done */
1183 if (decide_query == NULL)
1184 return TRUE;
1185
1186 gst_query_parse_allocation (query, &caps, NULL);
1187
1188 if (caps == NULL)
1189 return FALSE;
1190
1191 if (!gst_video_info_from_caps (&info, caps)) {
1192 GST_ERROR_OBJECT (filter, "Invalid caps %" GST_PTR_FORMAT, caps);
1193 return FALSE;
1194 }
1195
1196 d3d11_format = gst_d3d11_device_format_from_gst (filter->device,
1197 GST_VIDEO_INFO_FORMAT (&info));
1198 if (!d3d11_format) {
1199 GST_ERROR_OBJECT (filter, "Unknown format caps %" GST_PTR_FORMAT, caps);
1200 return FALSE;
1201 }
1202
1203 if (d3d11_format->dxgi_format == DXGI_FORMAT_UNKNOWN) {
1204 dxgi_format = d3d11_format->resource_format[0];
1205 } else {
1206 dxgi_format = d3d11_format->dxgi_format;
1207 }
1208
1209 device_handle = gst_d3d11_device_get_device_handle (filter->device);
1210 hr = device_handle->CheckFormatSupport (dxgi_format, &supported);
1211 if (gst_d3d11_result (hr, filter->device) &&
1212 (supported & D3D11_FORMAT_SUPPORT_RENDER_TARGET) ==
1213 D3D11_FORMAT_SUPPORT_RENDER_TARGET) {
1214 bind_flags |= D3D11_BIND_RENDER_TARGET;
1215 }
1216
1217 n_pools = gst_query_get_n_allocation_pools (query);
1218 for (i = 0; i < n_pools; i++) {
1219 gst_query_parse_nth_allocation_pool (query, i, &pool, NULL, NULL, NULL);
1220 if (pool) {
1221 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
1222 gst_clear_object (&pool);
1223 } else {
1224 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
1225 if (dpool->device != filter->device)
1226 gst_clear_object (&pool);
1227 }
1228 }
1229 }
1230
1231 if (!pool)
1232 pool = gst_d3d11_buffer_pool_new (filter->device);
1233
1234 config = gst_buffer_pool_get_config (pool);
1235 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
1236
1237 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
1238 if (!d3d11_params) {
1239 d3d11_params = gst_d3d11_allocation_params_new (filter->device, &info,
1240 (GstD3D11AllocationFlags) 0, bind_flags);
1241 } else {
1242 /* Set bind flag */
1243 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&info); i++) {
1244 d3d11_params->desc[i].BindFlags |= bind_flags;
1245 }
1246 }
1247
1248 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
1249 gst_d3d11_allocation_params_free (d3d11_params);
1250
1251 /* size will be updated by d3d11 buffer pool */
1252 gst_buffer_pool_config_set_params (config, caps, 0, 0, 0);
1253
1254 if (!gst_buffer_pool_set_config (pool, config))
1255 goto config_failed;
1256
1257 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
1258 gst_query_add_allocation_meta (query,
1259 GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL);
1260
1261 /* d3d11 buffer pool will update buffer size based on allocated texture,
1262 * get size from config again */
1263 config = gst_buffer_pool_get_config (pool);
1264 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
1265 gst_structure_free (config);
1266
1267 gst_query_add_allocation_pool (query, pool, size, 0, 0);
1268
1269 gst_object_unref (pool);
1270
1271 return TRUE;
1272
1273 /* ERRORS */
1274 config_failed:
1275 {
1276 GST_ERROR_OBJECT (filter, "failed to set config");
1277 gst_object_unref (pool);
1278 return FALSE;
1279 }
1280 }
1281
1282 static gboolean
gst_d3d11_base_convert_decide_allocation(GstBaseTransform * trans,GstQuery * query)1283 gst_d3d11_base_convert_decide_allocation (GstBaseTransform * trans,
1284 GstQuery * query)
1285 {
1286 GstD3D11BaseFilter *filter = GST_D3D11_BASE_FILTER (trans);
1287 GstCaps *outcaps = NULL;
1288 GstBufferPool *pool = NULL;
1289 guint size, min = 0, max = 0;
1290 GstStructure *config;
1291 GstD3D11AllocationParams *d3d11_params;
1292 gboolean update_pool = FALSE;
1293 GstVideoInfo info;
1294 guint i;
1295 const GstD3D11Format *d3d11_format;
1296 guint bind_flags = D3D11_BIND_RENDER_TARGET;
1297 DXGI_FORMAT dxgi_format = DXGI_FORMAT_UNKNOWN;
1298 UINT supported = 0;
1299 HRESULT hr;
1300 ID3D11Device *device_handle;
1301
1302 gst_query_parse_allocation (query, &outcaps, NULL);
1303
1304 if (!outcaps)
1305 return FALSE;
1306
1307 if (!gst_video_info_from_caps (&info, outcaps)) {
1308 GST_ERROR_OBJECT (filter, "Invalid caps %" GST_PTR_FORMAT, outcaps);
1309 return FALSE;
1310 }
1311
1312 d3d11_format = gst_d3d11_device_format_from_gst (filter->device,
1313 GST_VIDEO_INFO_FORMAT (&info));
1314 if (!d3d11_format) {
1315 GST_ERROR_OBJECT (filter, "Unknown format caps %" GST_PTR_FORMAT, outcaps);
1316 return FALSE;
1317 }
1318
1319 if (d3d11_format->dxgi_format == DXGI_FORMAT_UNKNOWN) {
1320 dxgi_format = d3d11_format->resource_format[0];
1321 } else {
1322 dxgi_format = d3d11_format->dxgi_format;
1323 }
1324
1325 device_handle = gst_d3d11_device_get_device_handle (filter->device);
1326 hr = device_handle->CheckFormatSupport (dxgi_format, &supported);
1327 if (gst_d3d11_result (hr, filter->device) &&
1328 (supported & D3D11_FORMAT_SUPPORT_SHADER_SAMPLE) ==
1329 D3D11_FORMAT_SUPPORT_SHADER_SAMPLE) {
1330 bind_flags |= D3D11_BIND_SHADER_RESOURCE;
1331 }
1332
1333 size = GST_VIDEO_INFO_SIZE (&info);
1334
1335 if (gst_query_get_n_allocation_pools (query) > 0) {
1336 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
1337 if (pool) {
1338 if (!GST_IS_D3D11_BUFFER_POOL (pool)) {
1339 gst_clear_object (&pool);
1340 } else {
1341 GstD3D11BufferPool *dpool = GST_D3D11_BUFFER_POOL (pool);
1342 if (dpool->device != filter->device)
1343 gst_clear_object (&pool);
1344 }
1345 }
1346
1347 update_pool = TRUE;
1348 }
1349
1350 if (!pool)
1351 pool = gst_d3d11_buffer_pool_new (filter->device);
1352
1353 config = gst_buffer_pool_get_config (pool);
1354 gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
1355
1356 d3d11_params = gst_buffer_pool_config_get_d3d11_allocation_params (config);
1357 if (!d3d11_params) {
1358 d3d11_params = gst_d3d11_allocation_params_new (filter->device, &info,
1359 (GstD3D11AllocationFlags) 0, bind_flags);
1360 } else {
1361 /* Set bind flag */
1362 for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&info); i++) {
1363 d3d11_params->desc[i].BindFlags |= bind_flags;
1364 }
1365 }
1366
1367 gst_buffer_pool_config_set_d3d11_allocation_params (config, d3d11_params);
1368 gst_d3d11_allocation_params_free (d3d11_params);
1369
1370 gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
1371 gst_buffer_pool_set_config (pool, config);
1372
1373 /* d3d11 buffer pool will update buffer size based on allocated texture,
1374 * get size from config again */
1375 config = gst_buffer_pool_get_config (pool);
1376 gst_buffer_pool_config_get_params (config, nullptr, &size, nullptr, nullptr);
1377 gst_structure_free (config);
1378
1379 if (update_pool)
1380 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
1381 else
1382 gst_query_add_allocation_pool (query, pool, size, min, max);
1383
1384 gst_object_unref (pool);
1385
1386 return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
1387 query);
1388 }
1389
1390 static gboolean
create_shader_input_resource(GstD3D11BaseConvert * self,GstD3D11Device * device,const GstD3D11Format * format,GstVideoInfo * info)1391 create_shader_input_resource (GstD3D11BaseConvert * self,
1392 GstD3D11Device * device, const GstD3D11Format * format, GstVideoInfo * info)
1393 {
1394 D3D11_TEXTURE2D_DESC texture_desc;
1395 D3D11_SHADER_RESOURCE_VIEW_DESC view_desc;
1396 HRESULT hr;
1397 ID3D11Device *device_handle;
1398 ID3D11Texture2D *tex[GST_VIDEO_MAX_PLANES] = { NULL, };
1399 ID3D11ShaderResourceView *view[GST_VIDEO_MAX_PLANES] = { NULL, };
1400 gint i;
1401
1402 if (self->num_input_view)
1403 return TRUE;
1404
1405 memset (&texture_desc, 0, sizeof (texture_desc));
1406 memset (&view_desc, 0, sizeof (view_desc));
1407
1408 device_handle = gst_d3d11_device_get_device_handle (device);
1409
1410 texture_desc.MipLevels = 1;
1411 texture_desc.ArraySize = 1;
1412 texture_desc.SampleDesc.Count = 1;
1413 texture_desc.SampleDesc.Quality = 0;
1414 texture_desc.Usage = D3D11_USAGE_DEFAULT;
1415 texture_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
1416
1417 if (format->dxgi_format == DXGI_FORMAT_UNKNOWN) {
1418 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1419 if (format->resource_format[i] == DXGI_FORMAT_UNKNOWN)
1420 break;
1421
1422 texture_desc.Width = GST_VIDEO_INFO_COMP_WIDTH (info, i);
1423 texture_desc.Height = GST_VIDEO_INFO_COMP_HEIGHT (info, i);
1424 texture_desc.Format = format->resource_format[i];
1425
1426 hr = device_handle->CreateTexture2D (&texture_desc, NULL, &tex[i]);
1427 if (!gst_d3d11_result (hr, device)) {
1428 GST_ERROR_OBJECT (self, "Failed to create texture (0x%x)", (guint) hr);
1429 goto error;
1430 }
1431 }
1432 } else {
1433 gboolean is_semiplanar = FALSE;
1434
1435 if (format->dxgi_format == DXGI_FORMAT_NV12 ||
1436 format->dxgi_format == DXGI_FORMAT_P010 ||
1437 format->dxgi_format == DXGI_FORMAT_P016)
1438 is_semiplanar = TRUE;
1439
1440 texture_desc.Width = GST_VIDEO_INFO_WIDTH (info);
1441 texture_desc.Height = GST_VIDEO_INFO_HEIGHT (info);
1442 texture_desc.Format = format->dxgi_format;
1443
1444 /* semiplanar format resolution of should be even number */
1445 if (is_semiplanar) {
1446 texture_desc.Width = GST_ROUND_UP_2 (texture_desc.Width);
1447 texture_desc.Height = GST_ROUND_UP_2 (texture_desc.Height);
1448 }
1449
1450 hr = device_handle->CreateTexture2D (&texture_desc, NULL, &tex[0]);
1451 if (!gst_d3d11_result (hr, device)) {
1452 GST_ERROR_OBJECT (self, "Failed to create texture (0x%x)", (guint) hr);
1453 goto error;
1454 }
1455
1456 if (is_semiplanar) {
1457 tex[0]->AddRef ();
1458 tex[1] = tex[0];
1459 }
1460 }
1461
1462 view_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
1463 view_desc.Texture2D.MipLevels = 1;
1464
1465 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1466 if (format->resource_format[i] == DXGI_FORMAT_UNKNOWN)
1467 break;
1468
1469 view_desc.Format = format->resource_format[i];
1470 hr = device_handle->CreateShaderResourceView (tex[i], &view_desc, &view[i]);
1471
1472 if (!gst_d3d11_result (hr, device)) {
1473 GST_ERROR_OBJECT (self,
1474 "Failed to create resource view (0x%x)", (guint) hr);
1475 goto error;
1476 }
1477 }
1478
1479 self->num_input_view = i;
1480
1481 GST_DEBUG_OBJECT (self,
1482 "%d shader resource view created", self->num_input_view);
1483
1484 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1485 self->in_texture[i] = tex[i];
1486 self->shader_resource_view[i] = view[i];
1487 }
1488
1489 return TRUE;
1490
1491 error:
1492 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1493 GST_D3D11_CLEAR_COM (view[i]);
1494 }
1495
1496 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1497 GST_D3D11_CLEAR_COM (tex[i]);
1498 }
1499
1500 return FALSE;
1501 }
1502
1503 /* 16.0 / 255.0 ~= 0.062745 */
1504 static const float luma_black_level_limited = 0.062745f;
1505
1506 static inline void
clear_rtv_color_rgb(GstD3D11BaseConvert * self,ID3D11DeviceContext * context_handle,ID3D11RenderTargetView * rtv,gboolean full_range)1507 clear_rtv_color_rgb (GstD3D11BaseConvert * self,
1508 ID3D11DeviceContext * context_handle, ID3D11RenderTargetView * rtv,
1509 gboolean full_range)
1510 {
1511 const FLOAT clear_color_full[4] = { 0.0f, 0.0f, 0.0f, 1.0f };
1512 const FLOAT clear_color_limited[4] =
1513 { luma_black_level_limited, luma_black_level_limited,
1514 luma_black_level_limited, 1.0f
1515 };
1516 const FLOAT *target;
1517
1518 if (full_range)
1519 target = clear_color_full;
1520 else
1521 target = clear_color_limited;
1522
1523 context_handle->ClearRenderTargetView (rtv, target);
1524 }
1525
1526 static inline void
clear_rtv_color_vuya(GstD3D11BaseConvert * self,ID3D11DeviceContext * context_handle,ID3D11RenderTargetView * rtv,gboolean full_range)1527 clear_rtv_color_vuya (GstD3D11BaseConvert * self,
1528 ID3D11DeviceContext * context_handle, ID3D11RenderTargetView * rtv,
1529 gboolean full_range)
1530 {
1531 const FLOAT clear_color_full[4] = { 0.5f, 0.5f, 0.0f, 1.0f };
1532 const FLOAT clear_color_limited[4] =
1533 { 0.5f, 0.5f, luma_black_level_limited, 1.0f };
1534 const FLOAT *target;
1535
1536 if (full_range)
1537 target = clear_color_full;
1538 else
1539 target = clear_color_limited;
1540
1541 context_handle->ClearRenderTargetView (rtv, target);
1542 }
1543
1544 static inline void
clear_rtv_color_luma(GstD3D11BaseConvert * self,ID3D11DeviceContext * context_handle,ID3D11RenderTargetView * rtv,gboolean full_range)1545 clear_rtv_color_luma (GstD3D11BaseConvert * self,
1546 ID3D11DeviceContext * context_handle, ID3D11RenderTargetView * rtv,
1547 gboolean full_range)
1548 {
1549 const FLOAT clear_color_full[4] = { 0.0f, 0.0f, 0.0f, 1.0f };
1550 const FLOAT clear_color_limited[4] =
1551 { luma_black_level_limited, luma_black_level_limited,
1552 luma_black_level_limited, 1.0f
1553 };
1554 const FLOAT *target;
1555
1556 if (full_range)
1557 target = clear_color_full;
1558 else
1559 target = clear_color_limited;
1560
1561 context_handle->ClearRenderTargetView (rtv, target);
1562 }
1563
1564 static inline void
clear_rtv_color_chroma(GstD3D11BaseConvert * self,ID3D11DeviceContext * context_handle,ID3D11RenderTargetView * rtv)1565 clear_rtv_color_chroma (GstD3D11BaseConvert * self,
1566 ID3D11DeviceContext * context_handle, ID3D11RenderTargetView * rtv)
1567 {
1568 const FLOAT clear_color[4] = { 0.5f, 0.5f, 0.5f, 1.0f };
1569
1570 context_handle->ClearRenderTargetView (rtv, clear_color);
1571 }
1572
1573 static void
clear_rtv_color_all(GstD3D11BaseConvert * self,GstVideoInfo * info,ID3D11DeviceContext * context_handle,ID3D11RenderTargetView * rtv[GST_VIDEO_MAX_PLANES])1574 clear_rtv_color_all (GstD3D11BaseConvert * self, GstVideoInfo * info,
1575 ID3D11DeviceContext * context_handle,
1576 ID3D11RenderTargetView * rtv[GST_VIDEO_MAX_PLANES])
1577 {
1578 gint i;
1579 gboolean full_range = info->colorimetry.range == GST_VIDEO_COLOR_RANGE_0_255;
1580
1581 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1582 if (!rtv[i])
1583 break;
1584
1585 if (GST_VIDEO_INFO_IS_RGB (info)) {
1586 clear_rtv_color_rgb (self, context_handle, rtv[i], full_range);
1587 } else {
1588 if (GST_VIDEO_INFO_N_PLANES (info) == 1) {
1589 clear_rtv_color_vuya (self, context_handle, rtv[i], full_range);
1590 } else {
1591 if (i == 0)
1592 clear_rtv_color_luma (self, context_handle, rtv[i], full_range);
1593 else
1594 clear_rtv_color_chroma (self, context_handle, rtv[i]);
1595 }
1596 }
1597 }
1598 }
1599
1600 static gboolean
create_shader_output_resource(GstD3D11BaseConvert * self,GstD3D11Device * device,const GstD3D11Format * format,GstVideoInfo * info)1601 create_shader_output_resource (GstD3D11BaseConvert * self,
1602 GstD3D11Device * device, const GstD3D11Format * format, GstVideoInfo * info)
1603 {
1604 D3D11_TEXTURE2D_DESC texture_desc;
1605 D3D11_RENDER_TARGET_VIEW_DESC view_desc;
1606 HRESULT hr;
1607 ID3D11Device *device_handle;
1608 ID3D11DeviceContext *context_handle;
1609 ID3D11Texture2D *tex[GST_VIDEO_MAX_PLANES] = { NULL, };
1610 ID3D11RenderTargetView *view[GST_VIDEO_MAX_PLANES] = { NULL, };
1611 gint i;
1612
1613 if (self->num_output_view)
1614 return TRUE;
1615
1616 memset (&texture_desc, 0, sizeof (texture_desc));
1617 memset (&view_desc, 0, sizeof (view_desc));
1618
1619 device_handle = gst_d3d11_device_get_device_handle (device);
1620 context_handle = gst_d3d11_device_get_device_context_handle (device);
1621
1622 texture_desc.MipLevels = 1;
1623 texture_desc.ArraySize = 1;
1624 texture_desc.SampleDesc.Count = 1;
1625 texture_desc.SampleDesc.Quality = 0;
1626 texture_desc.Usage = D3D11_USAGE_DEFAULT;
1627 texture_desc.BindFlags =
1628 D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
1629
1630 if (format->dxgi_format == DXGI_FORMAT_UNKNOWN) {
1631 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1632 if (format->resource_format[i] == DXGI_FORMAT_UNKNOWN)
1633 break;
1634
1635 texture_desc.Width = GST_VIDEO_INFO_COMP_WIDTH (info, i);
1636 texture_desc.Height = GST_VIDEO_INFO_COMP_HEIGHT (info, i);
1637 texture_desc.Format = format->resource_format[i];
1638
1639 hr = device_handle->CreateTexture2D (&texture_desc, NULL, &tex[i]);
1640 if (!gst_d3d11_result (hr, device)) {
1641 GST_ERROR_OBJECT (self, "Failed to create texture (0x%x)", (guint) hr);
1642 goto error;
1643 }
1644 }
1645 } else {
1646 gboolean is_semiplanar = FALSE;
1647
1648 if (format->dxgi_format == DXGI_FORMAT_NV12 ||
1649 format->dxgi_format == DXGI_FORMAT_P010 ||
1650 format->dxgi_format == DXGI_FORMAT_P016)
1651 is_semiplanar = TRUE;
1652
1653 texture_desc.Width = GST_VIDEO_INFO_WIDTH (info);
1654 texture_desc.Height = GST_VIDEO_INFO_HEIGHT (info);
1655 texture_desc.Format = format->dxgi_format;
1656
1657 /* semiplanar format resolution of should be even number */
1658 if (is_semiplanar) {
1659 texture_desc.Width = GST_ROUND_UP_2 (texture_desc.Width);
1660 texture_desc.Height = GST_ROUND_UP_2 (texture_desc.Height);
1661 }
1662
1663 hr = device_handle->CreateTexture2D (&texture_desc, NULL, &tex[0]);
1664 if (!gst_d3d11_result (hr, device)) {
1665 GST_ERROR_OBJECT (self, "Failed to create texture (0x%x)", (guint) hr);
1666 goto error;
1667 }
1668
1669 if (is_semiplanar) {
1670 tex[0]->AddRef ();
1671 tex[1] = tex[0];
1672 }
1673 }
1674
1675 view_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
1676 view_desc.Texture2D.MipSlice = 0;
1677 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1678 if (format->resource_format[i] == DXGI_FORMAT_UNKNOWN)
1679 break;
1680
1681 view_desc.Format = format->resource_format[i];
1682 hr = device_handle->CreateRenderTargetView (tex[i], &view_desc, &view[i]);
1683 if (!gst_d3d11_result (hr, device)) {
1684 GST_ERROR_OBJECT (self,
1685 "Failed to create %dth render target view (0x%x)", i, (guint) hr);
1686 goto error;
1687 }
1688 }
1689
1690 gst_d3d11_device_lock (device);
1691 clear_rtv_color_all (self, info, context_handle, view);
1692 gst_d3d11_device_unlock (device);
1693
1694 self->num_output_view = i;
1695
1696 GST_DEBUG_OBJECT (self, "%d render view created", self->num_output_view);
1697
1698 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1699 self->out_texture[i] = tex[i];
1700 self->render_target_view[i] = view[i];
1701 }
1702
1703 return TRUE;
1704
1705 error:
1706 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1707 GST_D3D11_CLEAR_COM (view[i]);
1708 }
1709
1710 for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
1711 GST_D3D11_CLEAR_COM (tex[i]);
1712 }
1713
1714 return FALSE;
1715 }
1716
1717 static gboolean
gst_d3d11_base_convert_set_info(GstD3D11BaseFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)1718 gst_d3d11_base_convert_set_info (GstD3D11BaseFilter * filter,
1719 GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
1720 GstVideoInfo * out_info)
1721 {
1722 GstD3D11BaseConvert *self = GST_D3D11_BASE_CONVERT (filter);
1723 const GstVideoInfo *unknown_info;
1724 gint from_dar_n, from_dar_d, to_dar_n, to_dar_d;
1725 D3D11_VIEWPORT view_port;
1726 gint border_offset_x = 0;
1727 gint border_offset_y = 0;
1728
1729 if (gst_base_transform_is_passthrough (GST_BASE_TRANSFORM (filter)))
1730 return TRUE;
1731
1732 if (!gst_util_fraction_multiply (in_info->width,
1733 in_info->height, in_info->par_n, in_info->par_d, &from_dar_n,
1734 &from_dar_d)) {
1735 from_dar_n = from_dar_d = -1;
1736 }
1737
1738 if (!gst_util_fraction_multiply (out_info->width,
1739 out_info->height, out_info->par_n, out_info->par_d, &to_dar_n,
1740 &to_dar_d)) {
1741 to_dar_n = to_dar_d = -1;
1742 }
1743
1744 self->borders_w = self->borders_h = 0;
1745 if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) {
1746 if (self->add_borders) {
1747 gint n, d, to_h, to_w;
1748
1749 if (from_dar_n != -1 && from_dar_d != -1
1750 && gst_util_fraction_multiply (from_dar_n, from_dar_d,
1751 out_info->par_d, out_info->par_n, &n, &d)) {
1752 to_h = gst_util_uint64_scale_int (out_info->width, d, n);
1753 if (to_h <= out_info->height) {
1754 self->borders_h = out_info->height - to_h;
1755 self->borders_w = 0;
1756 } else {
1757 to_w = gst_util_uint64_scale_int (out_info->height, n, d);
1758 g_assert (to_w <= out_info->width);
1759 self->borders_h = 0;
1760 self->borders_w = out_info->width - to_w;
1761 }
1762 } else {
1763 GST_WARNING_OBJECT (self, "Can't calculate borders");
1764 }
1765 } else {
1766 GST_INFO_OBJECT (self, "Display aspect ratio update %d/%d -> %d/%d",
1767 from_dar_n, from_dar_d, to_dar_n, to_dar_d);
1768 }
1769 }
1770
1771 gst_d3d11_base_convert_clear_shader_resource (self);
1772
1773 GST_DEBUG_OBJECT (self, "Setup convert with format %s -> %s",
1774 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (in_info)),
1775 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
1776
1777 /* if present, these must match */
1778 if (in_info->interlace_mode != out_info->interlace_mode)
1779 goto format_mismatch;
1780
1781 if (in_info->width == out_info->width && in_info->height == out_info->height
1782 && in_info->finfo == out_info->finfo && self->borders_w == 0 &&
1783 self->borders_h == 0) {
1784 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
1785 return TRUE;
1786 } else {
1787 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE);
1788 }
1789
1790 self->in_d3d11_format =
1791 gst_d3d11_device_format_from_gst (filter->device,
1792 GST_VIDEO_INFO_FORMAT (in_info));
1793 if (!self->in_d3d11_format) {
1794 unknown_info = in_info;
1795 goto format_unknown;
1796 }
1797
1798 self->out_d3d11_format =
1799 gst_d3d11_device_format_from_gst (filter->device,
1800 GST_VIDEO_INFO_FORMAT (out_info));
1801 if (!self->out_d3d11_format) {
1802 unknown_info = out_info;
1803 goto format_unknown;
1804 }
1805
1806 self->converter =
1807 gst_d3d11_converter_new (filter->device, in_info, out_info, nullptr);
1808
1809 if (!self->converter) {
1810 GST_ERROR_OBJECT (self, "couldn't set converter");
1811 return FALSE;
1812 }
1813 #if (GST_D3D11_DXGI_HEADER_VERSION >= 4)
1814 /* If both input and output formats are native DXGI format */
1815 if (self->in_d3d11_format->dxgi_format != DXGI_FORMAT_UNKNOWN &&
1816 self->out_d3d11_format->dxgi_format != DXGI_FORMAT_UNKNOWN) {
1817 gboolean hardware = FALSE;
1818 GstD3D11VideoProcessor *processor = NULL;
1819
1820 gst_d3d11_device_lock (filter->device);
1821 g_object_get (filter->device, "hardware", &hardware, NULL);
1822 if (hardware) {
1823 processor = gst_d3d11_video_processor_new (filter->device,
1824 in_info->width, in_info->height, out_info->width, out_info->height);
1825 }
1826
1827 if (processor) {
1828 const GstDxgiColorSpace *in_color_space;
1829 const GstDxgiColorSpace *out_color_space;
1830
1831 in_color_space = gst_d3d11_video_info_to_dxgi_color_space (in_info);
1832 out_color_space = gst_d3d11_video_info_to_dxgi_color_space (out_info);
1833
1834 if (in_color_space && out_color_space) {
1835 DXGI_FORMAT in_dxgi_format = self->in_d3d11_format->dxgi_format;
1836 DXGI_FORMAT out_dxgi_format = self->out_d3d11_format->dxgi_format;
1837 DXGI_COLOR_SPACE_TYPE in_dxgi_color_space =
1838 (DXGI_COLOR_SPACE_TYPE) in_color_space->dxgi_color_space_type;
1839 DXGI_COLOR_SPACE_TYPE out_dxgi_color_space =
1840 (DXGI_COLOR_SPACE_TYPE) out_color_space->dxgi_color_space_type;
1841
1842 if (!gst_d3d11_video_processor_check_format_conversion (processor,
1843 in_dxgi_format, in_dxgi_color_space, out_dxgi_format,
1844 out_dxgi_color_space)) {
1845 GST_DEBUG_OBJECT (self, "Conversion is not supported by device");
1846 gst_d3d11_video_processor_free (processor);
1847 processor = NULL;
1848 } else {
1849 GST_DEBUG_OBJECT (self, "video processor supports conversion");
1850 gst_d3d11_video_processor_set_input_dxgi_color_space (processor,
1851 in_dxgi_color_space);
1852 gst_d3d11_video_processor_set_output_dxgi_color_space (processor,
1853 out_dxgi_color_space);
1854 }
1855 } else {
1856 GST_WARNING_OBJECT (self,
1857 "Couldn't determine input and/or output dxgi colorspace");
1858 gst_d3d11_video_processor_free (processor);
1859 processor = NULL;
1860 }
1861 }
1862
1863 self->processor = processor;
1864 gst_d3d11_device_unlock (filter->device);
1865 }
1866 #endif
1867
1868 GST_DEBUG_OBJECT (self, "from=%dx%d (par=%d/%d dar=%d/%d), size %"
1869 G_GSIZE_FORMAT " -> to=%dx%d (par=%d/%d dar=%d/%d borders=%d:%d), "
1870 "size %" G_GSIZE_FORMAT,
1871 in_info->width, in_info->height, in_info->par_n, in_info->par_d,
1872 from_dar_n, from_dar_d, in_info->size, out_info->width,
1873 out_info->height, out_info->par_n, out_info->par_d, to_dar_n, to_dar_d,
1874 self->borders_w, self->borders_h, out_info->size);
1875
1876 self->in_rect.left = 0;
1877 self->in_rect.top = 0;
1878 self->in_rect.right = GST_VIDEO_INFO_WIDTH (in_info);
1879 self->in_rect.bottom = GST_VIDEO_INFO_HEIGHT (in_info);
1880
1881 if (self->borders_w) {
1882 border_offset_x = self->borders_w / 2;
1883 self->out_rect.left = border_offset_x;
1884 self->out_rect.right = GST_VIDEO_INFO_WIDTH (out_info) - border_offset_x;
1885 } else {
1886 self->out_rect.left = 0;
1887 self->out_rect.right = GST_VIDEO_INFO_WIDTH (out_info);
1888 }
1889
1890 if (self->borders_h) {
1891 border_offset_y = self->borders_h / 2;
1892 self->out_rect.top = border_offset_y;
1893 self->out_rect.bottom = GST_VIDEO_INFO_HEIGHT (out_info) - border_offset_y;
1894 } else {
1895 self->out_rect.top = 0;
1896 self->out_rect.bottom = GST_VIDEO_INFO_HEIGHT (out_info);
1897 }
1898
1899 view_port.TopLeftX = border_offset_x;
1900 view_port.TopLeftY = border_offset_y;
1901 view_port.Width = GST_VIDEO_INFO_WIDTH (out_info) - self->borders_w;
1902 view_port.Height = GST_VIDEO_INFO_HEIGHT (out_info) - self->borders_h;
1903 view_port.MinDepth = 0.0f;
1904 view_port.MaxDepth = 1.0f;
1905
1906 gst_d3d11_converter_update_viewport (self->converter, &view_port);
1907
1908 return TRUE;
1909
1910 /* ERRORS */
1911 format_mismatch:
1912 {
1913 GST_ERROR_OBJECT (self, "input and output formats do not match");
1914 return FALSE;
1915 }
1916 format_unknown:
1917 {
1918 GST_ERROR_OBJECT (self,
1919 "%s couldn't be converted to d3d11 format",
1920 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (unknown_info)));
1921 return FALSE;
1922 }
1923 }
1924
1925 static gboolean
gst_d3d11_base_convert_prefer_video_processor(GstD3D11BaseConvert * self,GstBuffer * inbuf,GstBuffer * outbuf)1926 gst_d3d11_base_convert_prefer_video_processor (GstD3D11BaseConvert * self,
1927 GstBuffer * inbuf, GstBuffer * outbuf)
1928 {
1929 GstD3D11BaseFilter *filter = GST_D3D11_BASE_FILTER (self);
1930 GstMemory *mem;
1931 GstD3D11Memory *dmem;
1932
1933 if (!self->processor) {
1934 GST_TRACE_OBJECT (self, "Processor is unavailable");
1935 return FALSE;
1936 }
1937
1938 if (gst_buffer_n_memory (inbuf) != 1 || gst_buffer_n_memory (outbuf) != 1) {
1939 GST_TRACE_OBJECT (self, "Num memory objects is mismatched, in: %d, out: %d",
1940 gst_buffer_n_memory (inbuf), gst_buffer_n_memory (outbuf));
1941 return FALSE;
1942 }
1943
1944 mem = gst_buffer_peek_memory (inbuf, 0);
1945 g_assert (gst_is_d3d11_memory (mem));
1946
1947 dmem = (GstD3D11Memory *) mem;
1948 if (dmem->device != filter->device) {
1949 GST_TRACE_OBJECT (self, "Input memory belongs to different device");
1950 return FALSE;
1951 }
1952
1953 /* If we can use shader, and video processor was not used previously,
1954 * we prefer to use shader instead of video processor
1955 * because video processor implementation is vendor dependent
1956 * and not flexible */
1957 if (!self->processor_in_use &&
1958 gst_d3d11_memory_get_shader_resource_view_size (dmem)) {
1959 GST_TRACE_OBJECT (self, "SRV is available");
1960 return FALSE;
1961 }
1962
1963 if (!gst_d3d11_video_processor_get_input_view (self->processor, dmem)) {
1964 GST_TRACE_OBJECT (self, "PIV is unavailable");
1965 return FALSE;
1966 }
1967
1968 mem = gst_buffer_peek_memory (outbuf, 0);
1969 g_assert (gst_is_d3d11_memory (mem));
1970
1971 dmem = (GstD3D11Memory *) mem;
1972 if (dmem->device != filter->device) {
1973 GST_TRACE_OBJECT (self, "Output memory belongs to different device");
1974 return FALSE;
1975 }
1976
1977 if (!gst_d3d11_video_processor_get_output_view (self->processor, dmem)) {
1978 GST_TRACE_OBJECT (self, "POV is unavailable");
1979 return FALSE;
1980 }
1981
1982 return TRUE;
1983 }
1984
1985 static gboolean
gst_d3d11_base_convert_transform_using_processor(GstD3D11BaseConvert * self,GstBuffer * inbuf,GstBuffer * outbuf)1986 gst_d3d11_base_convert_transform_using_processor (GstD3D11BaseConvert * self,
1987 GstBuffer * inbuf, GstBuffer * outbuf)
1988 {
1989 GstD3D11Memory *in_mem, *out_mem;
1990 ID3D11VideoProcessorInputView *piv;
1991 ID3D11VideoProcessorOutputView *pov;
1992
1993 in_mem = (GstD3D11Memory *) gst_buffer_peek_memory (inbuf, 0);
1994 out_mem = (GstD3D11Memory *) gst_buffer_peek_memory (outbuf, 0);
1995
1996 piv = gst_d3d11_video_processor_get_input_view (self->processor, in_mem);
1997 if (!piv) {
1998 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorInputView is unavailable");
1999 return FALSE;
2000 }
2001
2002 pov = gst_d3d11_video_processor_get_output_view (self->processor, out_mem);
2003 if (!pov) {
2004 GST_ERROR_OBJECT (self, "ID3D11VideoProcessorOutputView is unavailable");
2005 return FALSE;
2006 }
2007
2008 /* Clear background color with black */
2009 if (self->borders_w || self->borders_h) {
2010 GstD3D11BaseFilter *bfilter = GST_D3D11_BASE_FILTER_CAST (self);
2011 ID3D11DeviceContext *context_handle =
2012 gst_d3d11_device_get_device_context_handle (bfilter->device);
2013 ID3D11RenderTargetView *render_view[GST_VIDEO_MAX_PLANES] = { NULL, };
2014
2015 if (!gst_d3d11_buffer_get_render_target_view (outbuf, render_view)) {
2016 GST_ERROR_OBJECT (self, "ID3D11RenderTargetView is unavailable");
2017 return FALSE;
2018 }
2019
2020 gst_d3d11_device_lock (bfilter->device);
2021 clear_rtv_color_all (self, &bfilter->out_info, context_handle, render_view);
2022 gst_d3d11_device_unlock (bfilter->device);
2023 }
2024
2025 return gst_d3d11_video_processor_render (self->processor,
2026 &self->in_rect, piv, &self->out_rect, pov);
2027 }
2028
2029 static GstFlowReturn
gst_d3d11_base_convert_transform(GstBaseTransform * trans,GstBuffer * inbuf,GstBuffer * outbuf)2030 gst_d3d11_base_convert_transform (GstBaseTransform * trans,
2031 GstBuffer * inbuf, GstBuffer * outbuf)
2032 {
2033 GstD3D11BaseFilter *filter = GST_D3D11_BASE_FILTER (trans);
2034 GstD3D11BaseConvert *self = GST_D3D11_BASE_CONVERT (trans);
2035 GstD3D11Device *device = filter->device;
2036 ID3D11Device *device_handle;
2037 ID3D11DeviceContext *context_handle;
2038 ID3D11ShaderResourceView *resource_view[GST_VIDEO_MAX_PLANES] = { NULL, };
2039 ID3D11RenderTargetView *render_view[GST_VIDEO_MAX_PLANES] = { NULL, };
2040 ID3D11RenderTargetView **target_rtv;
2041 guint i;
2042 gboolean copy_input = FALSE;
2043 gboolean copy_output = FALSE;
2044 GstMapInfo in_map[GST_VIDEO_MAX_PLANES];
2045 GstMapInfo out_map[GST_VIDEO_MAX_PLANES];
2046
2047 device_handle = gst_d3d11_device_get_device_handle (device);
2048 context_handle = gst_d3d11_device_get_device_context_handle (device);
2049
2050 if (!gst_d3d11_buffer_map (inbuf, device_handle, in_map, GST_MAP_READ)) {
2051 GST_ERROR_OBJECT (self, "Couldn't map input buffer");
2052 goto invalid_memory;
2053 }
2054
2055 if (!gst_d3d11_buffer_map (outbuf, device_handle, out_map, GST_MAP_WRITE)) {
2056 GST_ERROR_OBJECT (self, "Couldn't map output buffer");
2057 gst_d3d11_buffer_unmap (inbuf, in_map);
2058 goto invalid_memory;
2059 }
2060
2061 if (gst_d3d11_base_convert_prefer_video_processor (self, inbuf, outbuf)) {
2062 gboolean ret =
2063 gst_d3d11_base_convert_transform_using_processor (self, inbuf, outbuf);
2064
2065 if (!ret) {
2066 GST_ERROR_OBJECT (self, "Couldn't convert using video processor");
2067 goto conversion_failed;
2068 }
2069
2070 self->processor_in_use = TRUE;
2071
2072 GST_TRACE_OBJECT (self, "Conversion done by video processor");
2073
2074 gst_d3d11_buffer_unmap (inbuf, in_map);
2075 gst_d3d11_buffer_unmap (outbuf, out_map);
2076
2077 return GST_FLOW_OK;
2078 }
2079
2080 /* Ensure shader resource views */
2081 if (!gst_d3d11_buffer_get_shader_resource_view (inbuf, resource_view)) {
2082 if (!create_shader_input_resource (self, device,
2083 self->in_d3d11_format, &filter->in_info)) {
2084 GST_ERROR_OBJECT (self, "Failed to configure fallback input texture");
2085 goto fallback_failed;
2086 }
2087
2088 copy_input = TRUE;
2089 gst_d3d11_device_lock (device);
2090 for (i = 0; i < gst_buffer_n_memory (inbuf); i++) {
2091 GstD3D11Memory *mem =
2092 (GstD3D11Memory *) gst_buffer_peek_memory (inbuf, i);
2093 guint subidx;
2094 D3D11_BOX src_box = { 0, };
2095 D3D11_TEXTURE2D_DESC src_desc;
2096 D3D11_TEXTURE2D_DESC dst_desc;
2097
2098 subidx = gst_d3d11_memory_get_subresource_index (mem);
2099 gst_d3d11_memory_get_texture_desc (mem, &src_desc);
2100
2101 self->in_texture[i]->GetDesc (&dst_desc);
2102
2103 src_box.left = 0;
2104 src_box.top = 0;
2105 src_box.front = 0;
2106 src_box.back = 1;
2107 src_box.right = MIN (src_desc.Width, dst_desc.Width);
2108 src_box.bottom = MIN (src_desc.Height, dst_desc.Height);
2109
2110 context_handle->CopySubresourceRegion (self->in_texture[i], 0, 0, 0, 0,
2111 (ID3D11Resource *) in_map[i].data, subidx, &src_box);
2112 }
2113 gst_d3d11_device_unlock (device);
2114 }
2115
2116 /* Ensure render target views */
2117 if (!gst_d3d11_buffer_get_render_target_view (outbuf, render_view)) {
2118 if (!create_shader_output_resource (self, device,
2119 self->out_d3d11_format, &filter->out_info)) {
2120 GST_ERROR_OBJECT (self, "Failed to configure fallback output texture");
2121 goto fallback_failed;
2122 }
2123
2124 copy_output = TRUE;
2125 }
2126
2127 /* If we need border, clear render target view first */
2128 if (copy_output) {
2129 target_rtv = self->render_target_view;
2130 } else {
2131 target_rtv = render_view;
2132 }
2133
2134 /* We need to clear background color as our shader wouldn't touch border
2135 * area. Likely output texture was initialized with zeros which is fine for
2136 * RGB, but it's not black color in case of YUV */
2137 if (self->borders_w || self->borders_h) {
2138 gst_d3d11_device_lock (device);
2139 clear_rtv_color_all (self, &filter->out_info, context_handle, target_rtv);
2140 gst_d3d11_device_unlock (device);
2141 }
2142
2143 if (!gst_d3d11_converter_convert (self->converter,
2144 copy_input ? self->shader_resource_view : resource_view,
2145 target_rtv, NULL, NULL)) {
2146 goto conversion_failed;
2147 }
2148
2149 if (copy_output) {
2150 gst_d3d11_device_lock (device);
2151 for (i = 0; i < gst_buffer_n_memory (outbuf); i++) {
2152 GstD3D11Memory *mem =
2153 (GstD3D11Memory *) gst_buffer_peek_memory (outbuf, i);
2154 guint subidx;
2155 D3D11_BOX src_box = { 0, };
2156 D3D11_TEXTURE2D_DESC src_desc;
2157 D3D11_TEXTURE2D_DESC dst_desc;
2158
2159 self->out_texture[i]->GetDesc (&src_desc);
2160 subidx = gst_d3d11_memory_get_subresource_index (mem);
2161 gst_d3d11_memory_get_texture_desc (mem, &dst_desc);
2162
2163 src_box.left = 0;
2164 src_box.top = 0;
2165 src_box.front = 0;
2166 src_box.back = 1;
2167 src_box.right = MIN (src_desc.Width, dst_desc.Width);
2168 src_box.bottom = MIN (src_desc.Height, dst_desc.Height);
2169
2170 context_handle->CopySubresourceRegion ((ID3D11Resource *) out_map[i].data,
2171 subidx, 0, 0, 0, self->out_texture[i], 0, &src_box);
2172 }
2173 gst_d3d11_device_unlock (device);
2174 }
2175
2176 gst_d3d11_buffer_unmap (inbuf, in_map);
2177 gst_d3d11_buffer_unmap (outbuf, out_map);
2178
2179 return GST_FLOW_OK;
2180
2181 invalid_memory:
2182 {
2183 GST_ELEMENT_ERROR (self, CORE, FAILED, (NULL), ("Invalid memory"));
2184 return GST_FLOW_ERROR;
2185 }
2186 fallback_failed:
2187 {
2188 GST_ELEMENT_ERROR (self, CORE, FAILED, (NULL),
2189 ("Couldn't prepare fallback memory"));
2190 gst_d3d11_buffer_unmap (inbuf, in_map);
2191 gst_d3d11_buffer_unmap (outbuf, out_map);
2192
2193 return GST_FLOW_ERROR;
2194 }
2195 conversion_failed:
2196 {
2197 GST_ELEMENT_ERROR (self, CORE, FAILED, (NULL),
2198 ("Couldn't convert texture"));
2199 gst_d3d11_buffer_unmap (inbuf, in_map);
2200 gst_d3d11_buffer_unmap (outbuf, out_map);
2201
2202 return GST_FLOW_ERROR;
2203 }
2204 }
2205
2206 static void
gst_d3d11_base_convert_set_add_border(GstD3D11BaseConvert * self,gboolean add_border)2207 gst_d3d11_base_convert_set_add_border (GstD3D11BaseConvert * self,
2208 gboolean add_border)
2209 {
2210 gboolean prev = self->add_borders;
2211
2212 self->add_borders = add_border;
2213 if (prev != self->add_borders)
2214 gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (self));
2215 }
2216
2217 /**
2218 * SECTION:element-d3d11convert
2219 * @title: d3d11convert
2220 * @short_description: A Direct3D11 based color conversion and video resizing element
2221 *
2222 * This element resizes video frames and change color space.
2223 * By default the element will try to negotiate to the same size on the source
2224 * and sinkpad so that no scaling is needed.
2225 * It is therefore safe to insert this element in a pipeline to
2226 * get more robust behaviour without any cost if no scaling is needed.
2227 *
2228 * ## Example launch line
2229 * ```
2230 * gst-launch-1.0 videotestsrc ! video/x-raw,format=NV12 ! d3d11upload ! d3d11convert ! d3d11videosink
2231 * ```
2232 * This will output a test video (generated in NV12 format) in a video
2233 * window. If the video sink selected does not support NV12
2234 * d3d11convert will automatically convert the video to a format understood
2235 * by the video sink.
2236 *
2237 * Since: 1.18
2238 *
2239 */
2240
2241 enum
2242 {
2243 PROP_CONVERT_0,
2244 PROP_CONVERT_ADD_BORDERS,
2245 };
2246
2247 struct _GstD3D11Convert
2248 {
2249 GstD3D11BaseConvert parent;
2250 };
2251
2252 G_DEFINE_TYPE (GstD3D11Convert, gst_d3d11_convert, GST_TYPE_D3D11_BASE_CONVERT);
2253
2254 static void gst_d3d11_convert_set_property (GObject * object, guint prop_id,
2255 const GValue * value, GParamSpec * pspec);
2256 static void gst_d3d11_convert_get_property (GObject * object, guint prop_id,
2257 GValue * value, GParamSpec * pspec);
2258
2259 static void
gst_d3d11_convert_class_init(GstD3D11ConvertClass * klass)2260 gst_d3d11_convert_class_init (GstD3D11ConvertClass * klass)
2261 {
2262 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
2263 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
2264
2265 gobject_class->set_property = gst_d3d11_convert_set_property;
2266 gobject_class->get_property = gst_d3d11_convert_get_property;
2267
2268 /**
2269 * GstD3D11Convert:add-borders:
2270 *
2271 * Add black borders if necessary to keep the display aspect ratio
2272 *
2273 * Since: 1.20
2274 */
2275 g_object_class_install_property (gobject_class, PROP_CONVERT_ADD_BORDERS,
2276 g_param_spec_boolean ("add-borders", "Add Borders",
2277 "Add black borders if necessary to keep the display aspect ratio",
2278 DEFAULT_ADD_BORDERS, (GParamFlags) (GST_PARAM_MUTABLE_PLAYING |
2279 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
2280
2281 gst_element_class_set_static_metadata (element_class,
2282 "Direct3D11 colorspace converter and scaler",
2283 "Filter/Converter/Scaler/Video/Hardware",
2284 "Resizes video and allow color conversion using Direct3D11",
2285 "Seungha Yang <seungha.yang@navercorp.com>, "
2286 "Jeongki Kim <jeongki.kim@jeongki.kim>");
2287 }
2288
2289 static void
gst_d3d11_convert_init(GstD3D11Convert * self)2290 gst_d3d11_convert_init (GstD3D11Convert * self)
2291 {
2292 }
2293
2294 static void
gst_d3d11_convert_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)2295 gst_d3d11_convert_set_property (GObject * object, guint prop_id,
2296 const GValue * value, GParamSpec * pspec)
2297 {
2298 GstD3D11BaseConvert *base = GST_D3D11_BASE_CONVERT (object);
2299
2300 switch (prop_id) {
2301 case PROP_CONVERT_ADD_BORDERS:
2302 gst_d3d11_base_convert_set_add_border (base, g_value_get_boolean (value));
2303 break;
2304 default:
2305 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2306 break;
2307 }
2308 }
2309
2310 static void
gst_d3d11_convert_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)2311 gst_d3d11_convert_get_property (GObject * object, guint prop_id,
2312 GValue * value, GParamSpec * pspec)
2313 {
2314 GstD3D11BaseConvert *base = GST_D3D11_BASE_CONVERT (object);
2315
2316 switch (prop_id) {
2317 case PROP_CONVERT_ADD_BORDERS:
2318 g_value_set_boolean (value, base->add_borders);
2319 break;
2320 default:
2321 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2322 break;
2323 }
2324 }
2325
2326 /**
2327 * SECTION:element-d3d11colorconvert
2328 * @title: d3d11colorconvert
2329 *
2330 * A Direct3D11 based color conversion element
2331 *
2332 * ## Example launch line
2333 * ```
2334 * gst-launch-1.0 videotestsrc ! video/x-raw,format=NV12 ! d3d11upload ! d3d11colorconvert ! d3d11download ! video/x-raw,format=RGBA ! fakesink
2335 * ```
2336 * This will upload a test video (generated in NV12 format) to Direct3D11
2337 * memory space and convert it to RGBA format. Then a converted Direct3D11
2338 * frame will be downloaded to system memory space.
2339 *
2340 * Since: 1.20
2341 *
2342 */
2343 struct _GstD3D11ColorConvert
2344 {
2345 GstD3D11BaseConvert parent;
2346 };
2347
2348 static GstCaps *gst_d3d11_color_convert_transform_caps (GstBaseTransform *
2349 trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter);
2350 static GstCaps *gst_d3d11_color_convert_fixate_caps (GstBaseTransform * base,
2351 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
2352
2353 G_DEFINE_TYPE (GstD3D11ColorConvert, gst_d3d11_color_convert,
2354 GST_TYPE_D3D11_BASE_CONVERT);
2355
2356 static void
gst_d3d11_color_convert_class_init(GstD3D11ColorConvertClass * klass)2357 gst_d3d11_color_convert_class_init (GstD3D11ColorConvertClass * klass)
2358 {
2359 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
2360 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
2361
2362 gst_element_class_set_static_metadata (element_class,
2363 "Direct3D11 colorspace converter",
2364 "Filter/Converter/Video/Hardware",
2365 "Color conversion using Direct3D11",
2366 "Seungha Yang <seungha@centricular.com>");
2367
2368 trans_class->transform_caps =
2369 GST_DEBUG_FUNCPTR (gst_d3d11_color_convert_transform_caps);
2370 trans_class->fixate_caps =
2371 GST_DEBUG_FUNCPTR (gst_d3d11_color_convert_fixate_caps);
2372 }
2373
2374 static void
gst_d3d11_color_convert_init(GstD3D11ColorConvert * self)2375 gst_d3d11_color_convert_init (GstD3D11ColorConvert * self)
2376 {
2377 }
2378
2379 static GstCaps *
gst_d3d11_color_convert_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)2380 gst_d3d11_color_convert_transform_caps (GstBaseTransform *
2381 trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter)
2382 {
2383 GstCaps *tmp, *tmp2;
2384 GstCaps *result;
2385
2386 /* Get all possible caps that we can transform to */
2387 tmp = gst_d3d11_base_convert_caps_remove_format_info (caps);
2388
2389 if (filter) {
2390 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
2391 gst_caps_unref (tmp);
2392 tmp = tmp2;
2393 }
2394
2395 result = tmp;
2396
2397 GST_DEBUG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " into %"
2398 GST_PTR_FORMAT, caps, result);
2399
2400 return result;
2401 }
2402
2403 static GstCaps *
gst_d3d11_color_convert_fixate_caps(GstBaseTransform * base,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)2404 gst_d3d11_color_convert_fixate_caps (GstBaseTransform * base,
2405 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
2406 {
2407 GstCaps *format = NULL;
2408
2409 GST_DEBUG_OBJECT (base,
2410 "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %"
2411 GST_PTR_FORMAT, othercaps, caps);
2412
2413 format = gst_d3d11_base_convert_get_fixed_format (base, direction, caps,
2414 othercaps);
2415 gst_caps_unref (othercaps);
2416
2417 if (gst_caps_is_empty (format)) {
2418 GST_ERROR_OBJECT (base, "Could not convert formats");
2419 } else {
2420 GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, format);
2421 }
2422
2423 return format;
2424 }
2425
2426 /**
2427 * SECTION:element-d3d11scale
2428 * @title: d3d11scale
2429 *
2430 * A Direct3D11 based video resizing element
2431 *
2432 * ## Example launch line
2433 * ```
2434 * gst-launch-1.0 videotestsrc ! video/x-raw,width=640,height=480 ! d3d11upload ! d3d11scale ! d3d11download ! video/x-raw,width=1280,height=720 ! fakesink
2435 * ```
2436 * This will upload a 640x480 resolution test video to Direct3D11
2437 * memory space and resize it to 1280x720 resolution. Then a resized Direct3D11
2438 * frame will be downloaded to system memory space.
2439 *
2440 * Since: 1.20
2441 *
2442 */
2443
2444 enum
2445 {
2446 PROP_SCALE_0,
2447 PROP_SCALE_ADD_BORDERS,
2448 };
2449
2450 struct _GstD3D11Scale
2451 {
2452 GstD3D11BaseConvert parent;
2453 };
2454
2455 static void gst_d3d11_scale_set_property (GObject * object, guint prop_id,
2456 const GValue * value, GParamSpec * pspec);
2457 static void gst_d3d11_scale_get_property (GObject * object, guint prop_id,
2458 GValue * value, GParamSpec * pspec);
2459 static GstCaps *gst_d3d11_scale_transform_caps (GstBaseTransform *
2460 trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter);
2461 static GstCaps *gst_d3d11_scale_fixate_caps (GstBaseTransform * base,
2462 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
2463
2464 G_DEFINE_TYPE (GstD3D11Scale, gst_d3d11_scale, GST_TYPE_D3D11_BASE_CONVERT);
2465
2466 static void
gst_d3d11_scale_class_init(GstD3D11ScaleClass * klass)2467 gst_d3d11_scale_class_init (GstD3D11ScaleClass * klass)
2468 {
2469 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
2470 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
2471 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
2472
2473 gobject_class->set_property = gst_d3d11_scale_set_property;
2474 gobject_class->get_property = gst_d3d11_scale_get_property;
2475
2476 /**
2477 * GstD3D11Scale:add-borders:
2478 *
2479 * Add black borders if necessary to keep the display aspect ratio
2480 *
2481 * Since: 1.20
2482 */
2483 g_object_class_install_property (gobject_class, PROP_SCALE_ADD_BORDERS,
2484 g_param_spec_boolean ("add-borders", "Add Borders",
2485 "Add black borders if necessary to keep the display aspect ratio",
2486 DEFAULT_ADD_BORDERS, (GParamFlags) (GST_PARAM_MUTABLE_PLAYING |
2487 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
2488
2489 gst_element_class_set_static_metadata (element_class,
2490 "Direct3D11 scaler",
2491 "Filter/Converter/Video/Scaler/Hardware",
2492 "Resizes video using Direct3D11",
2493 "Seungha Yang <seungha@centricular.com>");
2494
2495 trans_class->transform_caps =
2496 GST_DEBUG_FUNCPTR (gst_d3d11_scale_transform_caps);
2497 trans_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_d3d11_scale_fixate_caps);
2498 }
2499
2500 static void
gst_d3d11_scale_init(GstD3D11Scale * self)2501 gst_d3d11_scale_init (GstD3D11Scale * self)
2502 {
2503 }
2504
2505 static void
gst_d3d11_scale_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)2506 gst_d3d11_scale_set_property (GObject * object, guint prop_id,
2507 const GValue * value, GParamSpec * pspec)
2508 {
2509 GstD3D11BaseConvert *base = GST_D3D11_BASE_CONVERT (object);
2510
2511 switch (prop_id) {
2512 case PROP_CONVERT_ADD_BORDERS:
2513 gst_d3d11_base_convert_set_add_border (base, g_value_get_boolean (value));
2514 break;
2515 default:
2516 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2517 break;
2518 }
2519 }
2520
2521 static void
gst_d3d11_scale_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)2522 gst_d3d11_scale_get_property (GObject * object, guint prop_id,
2523 GValue * value, GParamSpec * pspec)
2524 {
2525 GstD3D11BaseConvert *base = GST_D3D11_BASE_CONVERT (object);
2526
2527 switch (prop_id) {
2528 case PROP_CONVERT_ADD_BORDERS:
2529 g_value_set_boolean (value, base->add_borders);
2530 break;
2531 default:
2532 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2533 break;
2534 }
2535 }
2536
2537 static GstCaps *
gst_d3d11_scale_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)2538 gst_d3d11_scale_transform_caps (GstBaseTransform *
2539 trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter)
2540 {
2541 GstCaps *tmp, *tmp2;
2542 GstCaps *result;
2543
2544 /* Get all possible caps that we can transform to */
2545 tmp = gst_d3d11_base_convert_caps_rangify_size_info (caps);
2546
2547 if (filter) {
2548 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
2549 gst_caps_unref (tmp);
2550 tmp = tmp2;
2551 }
2552
2553 result = tmp;
2554
2555 GST_DEBUG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " into %"
2556 GST_PTR_FORMAT, caps, result);
2557
2558 return result;
2559 }
2560
2561 static GstCaps *
gst_d3d11_scale_fixate_caps(GstBaseTransform * base,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)2562 gst_d3d11_scale_fixate_caps (GstBaseTransform * base,
2563 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
2564 {
2565 GST_DEBUG_OBJECT (base,
2566 "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %"
2567 GST_PTR_FORMAT, othercaps, caps);
2568
2569 othercaps =
2570 gst_d3d11_base_convert_fixate_size (base, direction, caps, othercaps);
2571
2572 GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps);
2573
2574 return othercaps;
2575 }
2576