1 /* GStreamer
2 * Copyright (C) 2020 Igalia, S.L.
3 * Author: Víctor Jáquez <vjaquez@igalia.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the0
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 /**
22 * SECTION:element-vapostproc
23 * @title: vapostproc
24 * @short_description: A VA-API base video postprocessing filter
25 *
26 * vapostproc applies different video filters to VA surfaces. These
27 * filters vary depending on the installed and chosen
28 * [VA-API](https://01.org/linuxmedia/vaapi) driver, but usually
29 * resizing and color conversion are available.
30 *
31 * The generated surfaces can be mapped onto main memory as video
32 * frames.
33 *
34 * Use gst-inspect-1.0 to introspect the available capabilities of the
35 * driver's post-processor entry point.
36 *
37 * ## Example launch line
38 * ```
39 * gst-launch-1.0 videotestsrc ! "video/x-raw,format=(string)NV12" ! vapostproc ! autovideosink
40 * ```
41 *
42 * Cropping is supported via buffers' crop meta. It's only done if the
43 * postproccessor is not in passthrough mode or if downstream doesn't
44 * support the crop meta API.
45 *
46 * ### Cropping example
47 * ```
48 * gst-launch-1.0 videotestsrc ! "video/x-raw,format=(string)NV12" ! videocrop bottom=50 left=100 ! vapostproc ! autovideosink
49 * ```
50 *
51 * If the VA driver support color balance filter, with controls such
52 * as hue, brightness, contrast, etc., those controls are exposed both
53 * as element properties and through the #GstColorBalance interface.
54 *
55 * Since: 1.20
56 *
57 */
58
59 /* ToDo:
60 *
61 * + HDR tone mapping
62 */
63
64 #ifdef HAVE_CONFIG_H
65 #include "config.h"
66 #endif
67
68 #include "gstvavpp.h"
69
70 #include <gst/video/video.h>
71
72 #include <va/va_drmcommon.h>
73
74 #include "gstvaallocator.h"
75 #include "gstvabasetransform.h"
76 #include "gstvacaps.h"
77 #include "gstvadisplay_priv.h"
78 #include "gstvafilter.h"
79 #include "gstvapool.h"
80 #include "gstvautils.h"
81
82 GST_DEBUG_CATEGORY_STATIC (gst_va_vpp_debug);
83 #define GST_CAT_DEFAULT gst_va_vpp_debug
84
85 #define GST_VA_VPP(obj) ((GstVaVpp *) obj)
86 #define GST_VA_VPP_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), G_TYPE_FROM_INSTANCE (obj), GstVaVppClass))
87 #define GST_VA_VPP_CLASS(klass) ((GstVaVppClass *) klass)
88
89 #define SWAP(a, b) do { const __typeof__ (a) t = a; a = b; b = t; } while (0)
90
91 typedef struct _GstVaVpp GstVaVpp;
92 typedef struct _GstVaVppClass GstVaVppClass;
93
94 struct _GstVaVppClass
95 {
96 /* GstVideoFilter overlaps functionality */
97 GstVaBaseTransformClass parent_class;
98 };
99
100 struct _GstVaVpp
101 {
102 GstVaBaseTransform parent;
103
104 gboolean rebuild_filters;
105 guint op_flags;
106
107 /* filters */
108 float denoise;
109 float sharpen;
110 float skintone;
111 float brightness;
112 float contrast;
113 float hue;
114 float saturation;
115 gboolean auto_contrast;
116 gboolean auto_brightness;
117 gboolean auto_saturation;
118 GstVideoOrientationMethod direction;
119 GstVideoOrientationMethod prev_direction;
120 GstVideoOrientationMethod tag_direction;
121 gboolean add_borders;
122 gint borders_h;
123 gint borders_w;
124
125 GList *channels;
126 };
127
128 static GstElementClass *parent_class = NULL;
129
130 struct CData
131 {
132 gchar *render_device_path;
133 gchar *description;
134 };
135
136 /* convertions that disable passthrough */
137 enum
138 {
139 VPP_CONVERT_SIZE = 1 << 0,
140 VPP_CONVERT_FORMAT = 1 << 1,
141 VPP_CONVERT_FILTERS = 1 << 2,
142 VPP_CONVERT_DIRECTION = 1 << 3,
143 VPP_CONVERT_FEATURE = 1 << 4,
144 VPP_CONVERT_CROP = 1 << 5,
145 VPP_CONVERT_DUMMY = 1 << 6,
146 };
147
148 /* *INDENT-OFF* */
149 static const gchar *caps_str =
150 GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_VA,
151 "{ NV12, I420, YV12, YUY2, RGBA, BGRA, P010_10LE, ARGB, ABGR }") " ;"
152 GST_VIDEO_CAPS_MAKE ("{ VUYA, GRAY8, NV12, NV21, YUY2, UYVY, YV12, "
153 "I420, P010_10LE, RGBA, BGRA, ARGB, ABGR }");
154 /* *INDENT-ON* */
155
156 #define META_TAG_COLORSPACE meta_tag_colorspace_quark
157 static GQuark meta_tag_colorspace_quark;
158 #define META_TAG_SIZE meta_tag_size_quark
159 static GQuark meta_tag_size_quark;
160 #define META_TAG_ORIENTATION meta_tag_orientation_quark
161 static GQuark meta_tag_orientation_quark;
162 #define META_TAG_VIDEO meta_tag_video_quark
163 static GQuark meta_tag_video_quark;
164
165 static void gst_va_vpp_colorbalance_init (gpointer iface, gpointer data);
166 static void gst_va_vpp_rebuild_filters (GstVaVpp * self);
167
168 static void
gst_va_vpp_dispose(GObject * object)169 gst_va_vpp_dispose (GObject * object)
170 {
171 GstVaVpp *self = GST_VA_VPP (object);
172
173 if (self->channels)
174 g_list_free_full (g_steal_pointer (&self->channels), g_object_unref);
175
176 G_OBJECT_CLASS (parent_class)->dispose (object);
177 }
178
179 static void
gst_va_vpp_update_passthrough(GstVaVpp * self,gboolean reconf)180 gst_va_vpp_update_passthrough (GstVaVpp * self, gboolean reconf)
181 {
182 GstBaseTransform *trans = GST_BASE_TRANSFORM (self);
183 gboolean old, new;
184
185 old = gst_base_transform_is_passthrough (trans);
186
187 GST_OBJECT_LOCK (self);
188 new = (self->op_flags == 0);
189 GST_OBJECT_UNLOCK (self);
190
191 if (old != new) {
192 GST_INFO_OBJECT (self, "%s passthrough", new ? "enabling" : "disabling");
193 if (reconf)
194 gst_base_transform_reconfigure_src (trans);
195 gst_base_transform_set_passthrough (trans, new);
196 }
197 }
198
199 static void
_update_properties_unlocked(GstVaVpp * self)200 _update_properties_unlocked (GstVaVpp * self)
201 {
202 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
203
204 if (!btrans->filter)
205 return;
206
207 if ((self->direction != GST_VIDEO_ORIENTATION_AUTO
208 && self->direction != self->prev_direction)
209 || (self->direction == GST_VIDEO_ORIENTATION_AUTO
210 && self->tag_direction != self->prev_direction)) {
211
212 GstVideoOrientationMethod direction =
213 (self->direction == GST_VIDEO_ORIENTATION_AUTO) ?
214 self->tag_direction : self->direction;
215
216 if (!gst_va_filter_set_orientation (btrans->filter, direction)) {
217 if (self->direction == GST_VIDEO_ORIENTATION_AUTO)
218 self->tag_direction = self->prev_direction;
219 else
220 self->direction = self->prev_direction;
221
222 self->op_flags &= ~VPP_CONVERT_DIRECTION;
223
224 /* FIXME: unlocked bus warning message */
225 GST_WARNING_OBJECT (self,
226 "Driver cannot set resquested orientation. Setting it back.");
227 } else {
228 self->prev_direction = direction;
229
230 self->op_flags |= VPP_CONVERT_DIRECTION;
231
232 gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (self));
233 }
234 } else {
235 self->op_flags &= ~VPP_CONVERT_DIRECTION;
236 }
237 }
238
239 static void
gst_va_vpp_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)240 gst_va_vpp_set_property (GObject * object, guint prop_id,
241 const GValue * value, GParamSpec * pspec)
242 {
243 GstVaVpp *self = GST_VA_VPP (object);
244
245 GST_OBJECT_LOCK (object);
246 switch (prop_id) {
247 case GST_VA_FILTER_PROP_DENOISE:
248 self->denoise = g_value_get_float (value);
249 g_atomic_int_set (&self->rebuild_filters, TRUE);
250 break;
251 case GST_VA_FILTER_PROP_SHARPEN:
252 self->sharpen = g_value_get_float (value);
253 g_atomic_int_set (&self->rebuild_filters, TRUE);
254 break;
255 case GST_VA_FILTER_PROP_SKINTONE:
256 if (G_VALUE_TYPE (value) == G_TYPE_BOOLEAN)
257 self->skintone = (float) g_value_get_boolean (value);
258 else
259 self->skintone = g_value_get_float (value);
260 g_atomic_int_set (&self->rebuild_filters, TRUE);
261 break;
262 case GST_VA_FILTER_PROP_VIDEO_DIR:{
263 GstVideoOrientationMethod direction = g_value_get_enum (value);
264 self->prev_direction = (direction == GST_VIDEO_ORIENTATION_AUTO) ?
265 self->tag_direction : self->direction;
266 self->direction = direction;
267 break;
268 }
269 case GST_VA_FILTER_PROP_HUE:
270 self->hue = g_value_get_float (value);
271 g_atomic_int_set (&self->rebuild_filters, TRUE);
272 break;
273 case GST_VA_FILTER_PROP_SATURATION:
274 self->saturation = g_value_get_float (value);
275 g_atomic_int_set (&self->rebuild_filters, TRUE);
276 break;
277 case GST_VA_FILTER_PROP_BRIGHTNESS:
278 self->brightness = g_value_get_float (value);
279 g_atomic_int_set (&self->rebuild_filters, TRUE);
280 break;
281 case GST_VA_FILTER_PROP_CONTRAST:
282 self->contrast = g_value_get_float (value);
283 g_atomic_int_set (&self->rebuild_filters, TRUE);
284 break;
285 case GST_VA_FILTER_PROP_AUTO_SATURATION:
286 self->auto_saturation = g_value_get_boolean (value);
287 g_atomic_int_set (&self->rebuild_filters, TRUE);
288 break;
289 case GST_VA_FILTER_PROP_AUTO_BRIGHTNESS:
290 self->auto_brightness = g_value_get_boolean (value);
291 g_atomic_int_set (&self->rebuild_filters, TRUE);
292 break;
293 case GST_VA_FILTER_PROP_AUTO_CONTRAST:
294 self->auto_contrast = g_value_get_boolean (value);
295 g_atomic_int_set (&self->rebuild_filters, TRUE);
296 break;
297 case GST_VA_FILTER_PROP_DISABLE_PASSTHROUGH:{
298 gboolean disable_passthrough = g_value_get_boolean (value);
299 if (disable_passthrough)
300 self->op_flags |= VPP_CONVERT_DUMMY;
301 else
302 self->op_flags &= ~VPP_CONVERT_DUMMY;
303 break;
304 }
305 case GST_VA_FILTER_PROP_ADD_BORDERS:
306 self->add_borders = g_value_get_boolean (value);
307 break;
308 default:
309 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
310 break;
311 }
312
313 _update_properties_unlocked (self);
314 GST_OBJECT_UNLOCK (object);
315
316 gst_va_vpp_update_passthrough (self, FALSE);
317 }
318
319 static void
gst_va_vpp_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)320 gst_va_vpp_get_property (GObject * object, guint prop_id, GValue * value,
321 GParamSpec * pspec)
322 {
323 GstVaVpp *self = GST_VA_VPP (object);
324
325 GST_OBJECT_LOCK (object);
326 switch (prop_id) {
327 case GST_VA_FILTER_PROP_DENOISE:
328 g_value_set_float (value, self->denoise);
329 break;
330 case GST_VA_FILTER_PROP_SHARPEN:
331 g_value_set_float (value, self->sharpen);
332 break;
333 case GST_VA_FILTER_PROP_SKINTONE:
334 if (G_VALUE_TYPE (value) == G_TYPE_BOOLEAN)
335 g_value_set_boolean (value, self->skintone > 0);
336 else
337 g_value_set_float (value, self->skintone);
338 break;
339 case GST_VA_FILTER_PROP_VIDEO_DIR:
340 g_value_set_enum (value, self->direction);
341 break;
342 case GST_VA_FILTER_PROP_HUE:
343 g_value_set_float (value, self->hue);
344 break;
345 case GST_VA_FILTER_PROP_SATURATION:
346 g_value_set_float (value, self->saturation);
347 break;
348 case GST_VA_FILTER_PROP_BRIGHTNESS:
349 g_value_set_float (value, self->brightness);
350 break;
351 case GST_VA_FILTER_PROP_CONTRAST:
352 g_value_set_float (value, self->contrast);
353 break;
354 case GST_VA_FILTER_PROP_AUTO_SATURATION:
355 g_value_set_boolean (value, self->auto_saturation);
356 break;
357 case GST_VA_FILTER_PROP_AUTO_BRIGHTNESS:
358 g_value_set_boolean (value, self->auto_brightness);
359 break;
360 case GST_VA_FILTER_PROP_AUTO_CONTRAST:
361 g_value_set_boolean (value, self->auto_contrast);
362 break;
363 case GST_VA_FILTER_PROP_DISABLE_PASSTHROUGH:
364 g_value_set_boolean (value, (self->op_flags & VPP_CONVERT_DUMMY));
365 break;
366 case GST_VA_FILTER_PROP_ADD_BORDERS:
367 g_value_set_boolean (value, self->add_borders);
368 break;
369 default:
370 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
371 break;
372 }
373 GST_OBJECT_UNLOCK (object);
374 }
375
376 static gboolean
gst_va_vpp_propose_allocation(GstBaseTransform * trans,GstQuery * decide_query,GstQuery * query)377 gst_va_vpp_propose_allocation (GstBaseTransform * trans,
378 GstQuery * decide_query, GstQuery * query)
379 {
380 /* if we are not passthrough, we can handle crop meta */
381 if (decide_query)
382 gst_query_add_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE, NULL);
383
384 return GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
385 decide_query, query);
386 }
387
388 static void
gst_va_vpp_update_properties(GstVaBaseTransform * btrans)389 gst_va_vpp_update_properties (GstVaBaseTransform * btrans)
390 {
391 GstVaVpp *self = GST_VA_VPP (btrans);
392
393 gst_va_vpp_rebuild_filters (self);
394 _update_properties_unlocked (self);
395 }
396
397 static gboolean
gst_va_vpp_set_info(GstVaBaseTransform * btrans,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)398 gst_va_vpp_set_info (GstVaBaseTransform * btrans, GstCaps * incaps,
399 GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
400 {
401 GstVaVpp *self = GST_VA_VPP (btrans);
402 GstCapsFeatures *infeat, *outfeat;
403 gint from_dar_n, from_dar_d, to_dar_n, to_dar_d;
404
405 if (GST_VIDEO_INFO_INTERLACE_MODE (in_info) !=
406 GST_VIDEO_INFO_INTERLACE_MODE (out_info)) {
407 GST_ERROR_OBJECT (self, "input and output formats do not match");
408 return FALSE;
409 }
410
411 /* calculate possible borders if display-aspect-ratio change */
412 {
413 if (!gst_util_fraction_multiply (GST_VIDEO_INFO_WIDTH (in_info),
414 GST_VIDEO_INFO_HEIGHT (in_info), GST_VIDEO_INFO_PAR_N (in_info),
415 GST_VIDEO_INFO_PAR_D (in_info), &from_dar_n, &from_dar_d)) {
416 from_dar_n = from_dar_d = -1;
417 }
418
419 if (!gst_util_fraction_multiply (GST_VIDEO_INFO_WIDTH (out_info),
420 GST_VIDEO_INFO_HEIGHT (out_info), GST_VIDEO_INFO_PAR_N (out_info),
421 GST_VIDEO_INFO_PAR_D (out_info), &to_dar_n, &to_dar_d)) {
422 to_dar_n = to_dar_d = -1;
423 }
424
425 /* if video-orientation changes consider it for borders */
426 switch (gst_va_filter_get_orientation (btrans->filter)) {
427 case GST_VIDEO_ORIENTATION_90R:
428 case GST_VIDEO_ORIENTATION_90L:
429 case GST_VIDEO_ORIENTATION_UL_LR:
430 case GST_VIDEO_ORIENTATION_UR_LL:
431 SWAP (from_dar_n, from_dar_d);
432 break;
433 default:
434 break;
435 }
436
437 self->borders_h = self->borders_w = 0;
438 if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) {
439 if (self->add_borders) {
440 gint n, d, to_h, to_w;
441
442 if (from_dar_n != -1 && from_dar_d != -1
443 && gst_util_fraction_multiply (from_dar_n, from_dar_d,
444 out_info->par_d, out_info->par_n, &n, &d)) {
445 to_h = gst_util_uint64_scale_int (out_info->width, d, n);
446 if (to_h <= out_info->height) {
447 self->borders_h = out_info->height - to_h;
448 self->borders_w = 0;
449 } else {
450 to_w = gst_util_uint64_scale_int (out_info->height, n, d);
451 g_assert (to_w <= out_info->width);
452 self->borders_h = 0;
453 self->borders_w = out_info->width - to_w;
454 }
455 } else {
456 GST_WARNING_OBJECT (self, "Can't calculate borders");
457 }
458 } else {
459 GST_WARNING_OBJECT (self, "Can't keep DAR!");
460 }
461 }
462 }
463
464 if (!gst_video_info_is_equal (in_info, out_info)) {
465 if (GST_VIDEO_INFO_FORMAT (in_info) != GST_VIDEO_INFO_FORMAT (out_info))
466 self->op_flags |= VPP_CONVERT_FORMAT;
467 else
468 self->op_flags &= ~VPP_CONVERT_FORMAT;
469
470 if (GST_VIDEO_INFO_WIDTH (in_info) != GST_VIDEO_INFO_WIDTH (out_info)
471 || GST_VIDEO_INFO_HEIGHT (in_info) != GST_VIDEO_INFO_HEIGHT (out_info)
472 || self->borders_h > 0 || self->borders_w > 0)
473 self->op_flags |= VPP_CONVERT_SIZE;
474 else
475 self->op_flags &= ~VPP_CONVERT_SIZE;
476 } else {
477 self->op_flags &= ~VPP_CONVERT_FORMAT & ~VPP_CONVERT_SIZE;
478 }
479
480 infeat = gst_caps_get_features (incaps, 0);
481 outfeat = gst_caps_get_features (outcaps, 0);
482 if (!gst_caps_features_is_equal (infeat, outfeat))
483 self->op_flags |= VPP_CONVERT_FEATURE;
484 else
485 self->op_flags &= ~VPP_CONVERT_FEATURE;
486
487 if (gst_va_filter_set_video_info (btrans->filter, in_info, out_info)) {
488 gst_va_vpp_update_passthrough (self, FALSE);
489 return TRUE;
490 }
491
492 return FALSE;
493 }
494
495 static inline gboolean
_get_filter_value(GstVaVpp * self,VAProcFilterType type,gfloat * value)496 _get_filter_value (GstVaVpp * self, VAProcFilterType type, gfloat * value)
497 {
498 gboolean ret = TRUE;
499
500 GST_OBJECT_LOCK (self);
501 switch (type) {
502 case VAProcFilterNoiseReduction:
503 *value = self->denoise;
504 break;
505 case VAProcFilterSharpening:
506 *value = self->sharpen;
507 break;
508 case VAProcFilterSkinToneEnhancement:
509 *value = self->skintone;
510 break;
511 default:
512 ret = FALSE;
513 break;
514 }
515 GST_OBJECT_UNLOCK (self);
516
517 return ret;
518 }
519
520 static inline gboolean
_add_filter_buffer(GstVaVpp * self,VAProcFilterType type,const VAProcFilterCap * cap)521 _add_filter_buffer (GstVaVpp * self, VAProcFilterType type,
522 const VAProcFilterCap * cap)
523 {
524 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
525 VAProcFilterParameterBuffer param;
526 gfloat value = 0;
527
528 if (!_get_filter_value (self, type, &value))
529 return FALSE;
530 if (value == cap->range.default_value)
531 return FALSE;
532
533 /* *INDENT-OFF* */
534 param = (VAProcFilterParameterBuffer) {
535 .type = type,
536 .value = value,
537 };
538 /* *INDENT-ON* */
539
540 return gst_va_filter_add_filter_buffer (btrans->filter, ¶m,
541 sizeof (param), 1);
542 }
543
544 static inline gboolean
_get_filter_cb_value(GstVaVpp * self,VAProcColorBalanceType type,gfloat * value)545 _get_filter_cb_value (GstVaVpp * self, VAProcColorBalanceType type,
546 gfloat * value)
547 {
548 gboolean ret = TRUE;
549
550 GST_OBJECT_LOCK (self);
551 switch (type) {
552 case VAProcColorBalanceHue:
553 *value = self->hue;
554 break;
555 case VAProcColorBalanceSaturation:
556 *value = self->saturation;
557 break;
558 case VAProcColorBalanceBrightness:
559 *value = self->brightness;
560 break;
561 case VAProcColorBalanceContrast:
562 *value = self->contrast;
563 break;
564 case VAProcColorBalanceAutoSaturation:
565 *value = self->auto_saturation;
566 break;
567 case VAProcColorBalanceAutoBrightness:
568 *value = self->auto_brightness;
569 break;
570 case VAProcColorBalanceAutoContrast:
571 *value = self->auto_contrast;
572 break;
573 default:
574 ret = FALSE;
575 break;
576 }
577 GST_OBJECT_UNLOCK (self);
578
579 return ret;
580 }
581
582 static inline gboolean
_add_filter_cb_buffer(GstVaVpp * self,const VAProcFilterCapColorBalance * caps,guint num_caps)583 _add_filter_cb_buffer (GstVaVpp * self,
584 const VAProcFilterCapColorBalance * caps, guint num_caps)
585 {
586 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
587 VAProcFilterParameterBufferColorBalance param[VAProcColorBalanceCount] =
588 { 0, };
589 gfloat value;
590 guint i, c = 0;
591
592 value = 0;
593 for (i = 0; i < num_caps && i < VAProcColorBalanceCount; i++) {
594 if (!_get_filter_cb_value (self, caps[i].type, &value))
595 continue;
596 if (value == caps[i].range.default_value)
597 continue;
598
599 /* *INDENT-OFF* */
600 param[c++] = (VAProcFilterParameterBufferColorBalance) {
601 .type = VAProcFilterColorBalance,
602 .attrib = caps[i].type,
603 .value = value,
604 };
605 /* *INDENT-ON* */
606 }
607
608 if (c > 0) {
609 return gst_va_filter_add_filter_buffer (btrans->filter, param,
610 sizeof (*param), c);
611 }
612 return FALSE;
613 }
614
615 static void
_build_filters(GstVaVpp * self)616 _build_filters (GstVaVpp * self)
617 {
618 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
619 static const VAProcFilterType filter_types[] = { VAProcFilterNoiseReduction,
620 VAProcFilterSharpening, VAProcFilterSkinToneEnhancement,
621 VAProcFilterColorBalance,
622 };
623 guint i, num_caps;
624 gboolean apply = FALSE;
625
626 for (i = 0; i < G_N_ELEMENTS (filter_types); i++) {
627 const gpointer caps = gst_va_filter_get_filter_caps (btrans->filter,
628 filter_types[i], &num_caps);
629 if (!caps)
630 continue;
631
632 switch (filter_types[i]) {
633 case VAProcFilterNoiseReduction:
634 apply |= _add_filter_buffer (self, filter_types[i], caps);
635 break;
636 case VAProcFilterSharpening:
637 apply |= _add_filter_buffer (self, filter_types[i], caps);
638 break;
639 case VAProcFilterSkinToneEnhancement:
640 apply |= _add_filter_buffer (self, filter_types[i], caps);
641 break;
642 case VAProcFilterColorBalance:
643 apply |= _add_filter_cb_buffer (self, caps, num_caps);
644 break;
645 default:
646 break;
647 }
648 }
649
650 GST_OBJECT_LOCK (self);
651 if (apply)
652 self->op_flags |= VPP_CONVERT_FILTERS;
653 else
654 self->op_flags &= ~VPP_CONVERT_FILTERS;
655 GST_OBJECT_UNLOCK (self);
656 }
657
658 static void
gst_va_vpp_rebuild_filters(GstVaVpp * self)659 gst_va_vpp_rebuild_filters (GstVaVpp * self)
660 {
661 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
662
663 if (!g_atomic_int_get (&self->rebuild_filters))
664 return;
665
666 gst_va_filter_drop_filter_buffers (btrans->filter);
667 _build_filters (self);
668 g_atomic_int_set (&self->rebuild_filters, FALSE);
669 }
670
671 static void
gst_va_vpp_before_transform(GstBaseTransform * trans,GstBuffer * inbuf)672 gst_va_vpp_before_transform (GstBaseTransform * trans, GstBuffer * inbuf)
673 {
674 GstVaVpp *self = GST_VA_VPP (trans);
675 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
676 GstClockTime ts, stream_time;
677 gboolean is_passthrough;
678
679 ts = GST_BUFFER_TIMESTAMP (inbuf);
680 stream_time =
681 gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, ts);
682
683 GST_TRACE_OBJECT (self, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
684
685 if (GST_CLOCK_TIME_IS_VALID (stream_time))
686 gst_object_sync_values (GST_OBJECT (self), stream_time);
687
688 gst_va_vpp_rebuild_filters (self);
689 gst_va_vpp_update_passthrough (self, TRUE);
690
691 /* cropping is only enabled if vapostproc is not in passthrough */
692 is_passthrough = gst_base_transform_is_passthrough (trans);
693 GST_OBJECT_LOCK (self);
694 if (!is_passthrough && gst_buffer_get_video_crop_meta (inbuf)) {
695 self->op_flags |= VPP_CONVERT_CROP;
696 } else {
697 self->op_flags &= ~VPP_CONVERT_CROP;
698 }
699 gst_va_filter_enable_cropping (btrans->filter,
700 (self->op_flags & VPP_CONVERT_CROP));
701 GST_OBJECT_UNLOCK (self);
702 }
703
704 static GstFlowReturn
gst_va_vpp_transform(GstBaseTransform * trans,GstBuffer * inbuf,GstBuffer * outbuf)705 gst_va_vpp_transform (GstBaseTransform * trans, GstBuffer * inbuf,
706 GstBuffer * outbuf)
707 {
708 GstVaVpp *self = GST_VA_VPP (trans);
709 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (trans);
710 GstBuffer *buf = NULL;
711 GstFlowReturn res = GST_FLOW_OK;
712 GstVaSample src, dst;
713
714 if (G_UNLIKELY (!btrans->negotiated))
715 goto unknown_format;
716
717 res = gst_va_base_transform_import_buffer (btrans, inbuf, &buf);
718 if (res != GST_FLOW_OK)
719 return res;
720
721 /* *INDENT-OFF* */
722 src = (GstVaSample) {
723 .buffer = buf,
724 .flags = gst_va_buffer_get_surface_flags (buf, &btrans->in_info),
725 };
726
727 dst = (GstVaSample) {
728 .buffer = outbuf,
729 .borders_h = self->borders_h,
730 .borders_w = self->borders_w,
731 .flags = gst_va_buffer_get_surface_flags (outbuf, &btrans->out_info),
732 };
733 /* *INDENT-ON* */
734
735 if (!gst_va_filter_process (btrans->filter, &src, &dst)) {
736 gst_buffer_set_flags (outbuf, GST_BUFFER_FLAG_CORRUPTED);
737 }
738
739 gst_buffer_unref (buf);
740
741 return res;
742
743 /* ERRORS */
744 unknown_format:
745 {
746 GST_ELEMENT_ERROR (self, CORE, NOT_IMPLEMENTED, (NULL), ("unknown format"));
747 return GST_FLOW_NOT_NEGOTIATED;
748 }
749 }
750
751 static gboolean
gst_va_vpp_transform_meta(GstBaseTransform * trans,GstBuffer * inbuf,GstMeta * meta,GstBuffer * outbuf)752 gst_va_vpp_transform_meta (GstBaseTransform * trans, GstBuffer * inbuf,
753 GstMeta * meta, GstBuffer * outbuf)
754 {
755 GstVaVpp *self = GST_VA_VPP (trans);
756 const GstMetaInfo *info = meta->info;
757 const gchar *const *tags;
758
759 tags = gst_meta_api_type_get_tags (info->api);
760
761 if (!tags)
762 return TRUE;
763
764 /* don't copy colorspace/size/orientation specific metadata */
765 if ((self->op_flags & VPP_CONVERT_FORMAT)
766 && gst_meta_api_type_has_tag (info->api, META_TAG_COLORSPACE))
767 return FALSE;
768 else if ((self->op_flags & (VPP_CONVERT_SIZE | VPP_CONVERT_CROP))
769 && gst_meta_api_type_has_tag (info->api, META_TAG_SIZE))
770 return FALSE;
771 else if ((self->op_flags & VPP_CONVERT_DIRECTION)
772 && gst_meta_api_type_has_tag (info->api, META_TAG_ORIENTATION))
773 return FALSE;
774 else if (gst_meta_api_type_has_tag (info->api, META_TAG_VIDEO))
775 return TRUE;
776
777 return FALSE;
778 }
779
780 /* In structures with supported caps features it's:
781 * + Rangified resolution size.
782 * + Rangified "pixel-aspect-ratio" if present.
783 * + Removed "format", "colorimetry", "chroma-site"
784 *
785 * Structures with unsupported caps features are copied as-is.
786 */
787 static GstCaps *
gst_va_vpp_caps_remove_fields(GstCaps * caps)788 gst_va_vpp_caps_remove_fields (GstCaps * caps)
789 {
790 GstCaps *ret;
791 GstStructure *structure;
792 GstCapsFeatures *features;
793 gint i, j, n, m;
794
795 ret = gst_caps_new_empty ();
796
797 n = gst_caps_get_size (caps);
798 for (i = 0; i < n; i++) {
799 structure = gst_caps_get_structure (caps, i);
800 features = gst_caps_get_features (caps, i);
801
802 /* If this is already expressed by the existing caps
803 * skip this structure */
804 if (i > 0 && gst_caps_is_subset_structure_full (ret, structure, features))
805 continue;
806
807 structure = gst_structure_copy (structure);
808
809 m = gst_caps_features_get_size (features);
810 for (j = 0; j < m; j++) {
811 const gchar *feature = gst_caps_features_get_nth (features, j);
812
813 if (g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY) == 0
814 || g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_DMABUF) == 0
815 || g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_VA) == 0) {
816
817 /* rangify frame size */
818 gst_structure_set (structure, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
819 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
820
821 /* if pixel aspect ratio, make a range of it */
822 if (gst_structure_has_field (structure, "pixel-aspect-ratio")) {
823 gst_structure_set (structure, "pixel-aspect-ratio",
824 GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL);
825 }
826
827 /* remove format-related fields */
828 gst_structure_remove_fields (structure, "format", "colorimetry",
829 "chroma-site", NULL);
830
831 break;
832 }
833 }
834
835 gst_caps_append_structure_full (ret, structure,
836 gst_caps_features_copy (features));
837 }
838
839 return ret;
840 }
841
842 /* Returns all structures in @caps without @feature_name but now with
843 * @feature_name */
844 static GstCaps *
gst_va_vpp_complete_caps_features(const GstCaps * caps,const gchar * feature_name)845 gst_va_vpp_complete_caps_features (const GstCaps * caps,
846 const gchar * feature_name)
847 {
848 guint i, j, m, n;
849 GstCaps *tmp;
850
851 tmp = gst_caps_new_empty ();
852
853 n = gst_caps_get_size (caps);
854 for (i = 0; i < n; i++) {
855 GstCapsFeatures *features, *orig_features;
856 GstStructure *s = gst_caps_get_structure (caps, i);
857 gboolean contained = FALSE;
858
859 orig_features = gst_caps_get_features (caps, i);
860 features = gst_caps_features_new (feature_name, NULL);
861
862 m = gst_caps_features_get_size (orig_features);
863 for (j = 0; j < m; j++) {
864 const gchar *feature = gst_caps_features_get_nth (orig_features, j);
865
866 /* if we already have the features */
867 if (gst_caps_features_contains (features, feature)) {
868 contained = TRUE;
869 break;
870 }
871 }
872
873 if (!contained && !gst_caps_is_subset_structure_full (tmp, s, features))
874 gst_caps_append_structure_full (tmp, gst_structure_copy (s), features);
875 else
876 gst_caps_features_free (features);
877 }
878
879 return tmp;
880 }
881
882 static GstCaps *
gst_va_vpp_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)883 gst_va_vpp_transform_caps (GstBaseTransform * trans, GstPadDirection direction,
884 GstCaps * caps, GstCaps * filter)
885 {
886 GstVaVpp *self = GST_VA_VPP (trans);
887 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (trans);
888 GstCaps *ret, *tmp, *filter_caps;
889
890 GST_DEBUG_OBJECT (self,
891 "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps,
892 (direction == GST_PAD_SINK) ? "sink" : "src");
893
894 filter_caps = gst_va_base_transform_get_filter_caps (btrans);
895 if (filter_caps && !gst_caps_can_intersect (caps, filter_caps)) {
896 ret = gst_caps_ref (caps);
897 goto bail;
898 }
899
900 ret = gst_va_vpp_caps_remove_fields (caps);
901
902 tmp = gst_va_vpp_complete_caps_features (ret, GST_CAPS_FEATURE_MEMORY_VA);
903 if (!gst_caps_is_subset (tmp, ret)) {
904 gst_caps_append (ret, tmp);
905 } else {
906 gst_caps_unref (tmp);
907 }
908
909 tmp = gst_va_vpp_complete_caps_features (ret, GST_CAPS_FEATURE_MEMORY_DMABUF);
910 if (!gst_caps_is_subset (tmp, ret)) {
911 gst_caps_append (ret, tmp);
912 } else {
913 gst_caps_unref (tmp);
914 }
915
916 tmp = gst_va_vpp_complete_caps_features (ret,
917 GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY);
918 if (!gst_caps_is_subset (tmp, ret)) {
919 gst_caps_append (ret, tmp);
920 } else {
921 gst_caps_unref (tmp);
922 }
923
924 bail:
925 if (filter) {
926 GstCaps *intersection;
927
928 intersection =
929 gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
930 gst_caps_unref (ret);
931 ret = intersection;
932 }
933
934 GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);
935
936 return ret;
937 }
938
939 /*
940 * This is an incomplete matrix of in formats and a score for the preferred output
941 * format.
942 *
943 * out: RGB24 RGB16 ARGB AYUV YUV444 YUV422 YUV420 YUV411 YUV410 PAL GRAY
944 * in
945 * RGB24 0 2 1 2 2 3 4 5 6 7 8
946 * RGB16 1 0 1 2 2 3 4 5 6 7 8
947 * ARGB 2 3 0 1 4 5 6 7 8 9 10
948 * AYUV 3 4 1 0 2 5 6 7 8 9 10
949 * YUV444 2 4 3 1 0 5 6 7 8 9 10
950 * YUV422 3 5 4 2 1 0 6 7 8 9 10
951 * YUV420 4 6 5 3 2 1 0 7 8 9 10
952 * YUV411 4 6 5 3 2 1 7 0 8 9 10
953 * YUV410 6 8 7 5 4 3 2 1 0 9 10
954 * PAL 1 3 2 6 4 6 7 8 9 0 10
955 * GRAY 1 4 3 2 1 5 6 7 8 9 0
956 *
957 * PAL or GRAY are never preferred, if we can we would convert to PAL instead
958 * of GRAY, though
959 * less subsampling is preferred and if any, preferably horizontal
960 * We would like to keep the alpha, even if we would need to to colorspace conversion
961 * or lose depth.
962 */
963 #define SCORE_FORMAT_CHANGE 1
964 #define SCORE_DEPTH_CHANGE 1
965 #define SCORE_ALPHA_CHANGE 1
966 #define SCORE_CHROMA_W_CHANGE 1
967 #define SCORE_CHROMA_H_CHANGE 1
968 #define SCORE_PALETTE_CHANGE 1
969
970 #define SCORE_COLORSPACE_LOSS 2 /* RGB <-> YUV */
971 #define SCORE_DEPTH_LOSS 4 /* change bit depth */
972 #define SCORE_ALPHA_LOSS 8 /* lose the alpha channel */
973 #define SCORE_CHROMA_W_LOSS 16 /* vertical subsample */
974 #define SCORE_CHROMA_H_LOSS 32 /* horizontal subsample */
975 #define SCORE_PALETTE_LOSS 64 /* convert to palette format */
976 #define SCORE_COLOR_LOSS 128 /* convert to GRAY */
977
978 #define COLORSPACE_MASK (GST_VIDEO_FORMAT_FLAG_YUV | \
979 GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_GRAY)
980 #define ALPHA_MASK (GST_VIDEO_FORMAT_FLAG_ALPHA)
981 #define PALETTE_MASK (GST_VIDEO_FORMAT_FLAG_PALETTE)
982
983 /* calculate how much loss a conversion would be */
984 static gboolean
score_value(GstVaVpp * self,const GstVideoFormatInfo * in_info,GstVideoFormat format,gint * min_loss,const GstVideoFormatInfo ** out_info)985 score_value (GstVaVpp * self, const GstVideoFormatInfo * in_info,
986 GstVideoFormat format, gint * min_loss,
987 const GstVideoFormatInfo ** out_info)
988 {
989 const GstVideoFormatInfo *t_info;
990 GstVideoFormatFlags in_flags, t_flags;
991 gint loss;
992
993 t_info = gst_video_format_get_info (format);
994 if (!t_info || t_info->format == GST_VIDEO_FORMAT_UNKNOWN)
995 return FALSE;
996
997 /* accept input format immediately without loss */
998 if (in_info == t_info) {
999 *min_loss = 0;
1000 *out_info = t_info;
1001 return TRUE;
1002 }
1003
1004 loss = SCORE_FORMAT_CHANGE;
1005
1006 in_flags = GST_VIDEO_FORMAT_INFO_FLAGS (in_info);
1007 in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
1008 in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
1009 in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
1010
1011 t_flags = GST_VIDEO_FORMAT_INFO_FLAGS (t_info);
1012 t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
1013 t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
1014 t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
1015
1016 if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) {
1017 loss += SCORE_PALETTE_CHANGE;
1018 if (t_flags & PALETTE_MASK)
1019 loss += SCORE_PALETTE_LOSS;
1020 }
1021
1022 if ((t_flags & COLORSPACE_MASK) != (in_flags & COLORSPACE_MASK)) {
1023 loss += SCORE_COLORSPACE_LOSS;
1024 if (t_flags & GST_VIDEO_FORMAT_FLAG_GRAY)
1025 loss += SCORE_COLOR_LOSS;
1026 }
1027
1028 if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) {
1029 loss += SCORE_ALPHA_CHANGE;
1030 if (in_flags & ALPHA_MASK)
1031 loss += SCORE_ALPHA_LOSS;
1032 }
1033
1034 if ((in_info->h_sub[1]) != (t_info->h_sub[1])) {
1035 loss += SCORE_CHROMA_H_CHANGE;
1036 if ((in_info->h_sub[1]) < (t_info->h_sub[1]))
1037 loss += SCORE_CHROMA_H_LOSS;
1038 }
1039 if ((in_info->w_sub[1]) != (t_info->w_sub[1])) {
1040 loss += SCORE_CHROMA_W_CHANGE;
1041 if ((in_info->w_sub[1]) < (t_info->w_sub[1]))
1042 loss += SCORE_CHROMA_W_LOSS;
1043 }
1044
1045 if ((in_info->bits) != (t_info->bits)) {
1046 loss += SCORE_DEPTH_CHANGE;
1047 if ((in_info->bits) > (t_info->bits))
1048 loss += SCORE_DEPTH_LOSS;
1049 }
1050
1051 GST_DEBUG_OBJECT (self, "score %s -> %s = %d",
1052 GST_VIDEO_FORMAT_INFO_NAME (in_info),
1053 GST_VIDEO_FORMAT_INFO_NAME (t_info), loss);
1054
1055 if (loss < *min_loss) {
1056 GST_DEBUG_OBJECT (self, "found new best %d", loss);
1057 *out_info = t_info;
1058 *min_loss = loss;
1059 return TRUE;
1060 }
1061
1062 return FALSE;
1063 }
1064
1065 static GstCaps *
gst_va_vpp_fixate_format(GstVaVpp * self,GstCaps * caps,GstCaps * result)1066 gst_va_vpp_fixate_format (GstVaVpp * self, GstCaps * caps, GstCaps * result)
1067 {
1068 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
1069 GstStructure *ins;
1070 const gchar *in_format;
1071 const GstVideoFormatInfo *in_info, *out_info = NULL;
1072 GstCapsFeatures *features;
1073 GstVideoFormat fmt;
1074 gint min_loss = G_MAXINT;
1075 guint i, best_i, capslen;
1076
1077 ins = gst_caps_get_structure (caps, 0);
1078 in_format = gst_structure_get_string (ins, "format");
1079 if (!in_format)
1080 return NULL;
1081
1082 GST_DEBUG_OBJECT (self, "source format %s", in_format);
1083
1084 in_info =
1085 gst_video_format_get_info (gst_video_format_from_string (in_format));
1086 if (!in_info)
1087 return NULL;
1088
1089 best_i = 0;
1090 capslen = gst_caps_get_size (result);
1091 GST_DEBUG_OBJECT (self, "iterate %d structures", capslen);
1092 for (i = 0; i < capslen; i++) {
1093 GstStructure *tests;
1094 const GValue *format;
1095
1096 tests = gst_caps_get_structure (result, i);
1097 format = gst_structure_get_value (tests, "format");
1098 /* should not happen */
1099 if (format == NULL)
1100 continue;
1101
1102 features = gst_caps_get_features (result, i);
1103
1104 if (GST_VALUE_HOLDS_LIST (format)) {
1105 gint j, len;
1106
1107 len = gst_value_list_get_size (format);
1108 GST_DEBUG_OBJECT (self, "have %d formats", len);
1109 for (j = 0; j < len; j++) {
1110 const GValue *val;
1111
1112 val = gst_value_list_get_value (format, j);
1113 if (G_VALUE_HOLDS_STRING (val)) {
1114 fmt = gst_video_format_from_string (g_value_get_string (val));
1115 if (!gst_va_filter_has_video_format (btrans->filter, fmt, features))
1116 continue;
1117 if (score_value (self, in_info, fmt, &min_loss, &out_info))
1118 best_i = i;
1119 if (min_loss == 0)
1120 break;
1121 }
1122 }
1123 } else if (G_VALUE_HOLDS_STRING (format)) {
1124 fmt = gst_video_format_from_string (g_value_get_string (format));
1125 if (!gst_va_filter_has_video_format (btrans->filter, fmt, features))
1126 continue;
1127 if (score_value (self, in_info, fmt, &min_loss, &out_info))
1128 best_i = i;
1129 }
1130
1131 if (min_loss == 0)
1132 break;
1133 }
1134
1135 if (out_info) {
1136 GstCaps *ret;
1137 GstStructure *out;
1138
1139 features = gst_caps_features_copy (gst_caps_get_features (result, best_i));
1140 out = gst_structure_copy (gst_caps_get_structure (result, best_i));
1141 gst_structure_set (out, "format", G_TYPE_STRING,
1142 GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL);
1143 ret = gst_caps_new_full (out, NULL);
1144 gst_caps_set_features_simple (ret, features);
1145 return ret;
1146 }
1147
1148 return NULL;
1149 }
1150
1151 static void
gst_va_vpp_fixate_size(GstVaVpp * self,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)1152 gst_va_vpp_fixate_size (GstVaVpp * self, GstPadDirection direction,
1153 GstCaps * caps, GstCaps * othercaps)
1154 {
1155 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
1156 GstStructure *ins, *outs;
1157 const GValue *from_par, *to_par;
1158 GValue fpar = { 0, };
1159 GValue tpar = { 0, };
1160
1161 ins = gst_caps_get_structure (caps, 0);
1162 outs = gst_caps_get_structure (othercaps, 0);
1163
1164 from_par = gst_structure_get_value (ins, "pixel-aspect-ratio");
1165 to_par = gst_structure_get_value (outs, "pixel-aspect-ratio");
1166
1167 /* If we're fixating from the sinkpad we always set the PAR and
1168 * assume that missing PAR on the sinkpad means 1/1 and
1169 * missing PAR on the srcpad means undefined
1170 */
1171 if (direction == GST_PAD_SINK) {
1172 if (!from_par) {
1173 g_value_init (&fpar, GST_TYPE_FRACTION);
1174 gst_value_set_fraction (&fpar, 1, 1);
1175 from_par = &fpar;
1176 }
1177 if (!to_par) {
1178 g_value_init (&tpar, GST_TYPE_FRACTION_RANGE);
1179 gst_value_set_fraction_range_full (&tpar, 1, G_MAXINT, G_MAXINT, 1);
1180 to_par = &tpar;
1181 }
1182 } else {
1183 if (!to_par) {
1184 g_value_init (&tpar, GST_TYPE_FRACTION);
1185 gst_value_set_fraction (&tpar, 1, 1);
1186 to_par = &tpar;
1187
1188 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
1189 NULL);
1190 }
1191 if (!from_par) {
1192 g_value_init (&fpar, GST_TYPE_FRACTION);
1193 gst_value_set_fraction (&fpar, 1, 1);
1194 from_par = &fpar;
1195 }
1196 }
1197
1198 /* we have both PAR but they might not be fixated */
1199 {
1200 gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d;
1201 gint w = 0, h = 0;
1202 gint from_dar_n, from_dar_d;
1203 gint num, den;
1204
1205 /* from_par should be fixed */
1206 g_return_if_fail (gst_value_is_fixed (from_par));
1207
1208 from_par_n = gst_value_get_fraction_numerator (from_par);
1209 from_par_d = gst_value_get_fraction_denominator (from_par);
1210
1211 gst_structure_get_int (ins, "width", &from_w);
1212 gst_structure_get_int (ins, "height", &from_h);
1213
1214 gst_structure_get_int (outs, "width", &w);
1215 gst_structure_get_int (outs, "height", &h);
1216
1217 /* if video-orientation changes */
1218 switch (gst_va_filter_get_orientation (btrans->filter)) {
1219 case GST_VIDEO_ORIENTATION_90R:
1220 case GST_VIDEO_ORIENTATION_90L:
1221 case GST_VIDEO_ORIENTATION_UL_LR:
1222 case GST_VIDEO_ORIENTATION_UR_LL:
1223 SWAP (from_w, from_h);
1224 SWAP (from_par_n, from_par_d);
1225 break;
1226 default:
1227 break;
1228 }
1229
1230 /* if both width and height are already fixed, we can't do anything
1231 * about it anymore */
1232 if (w && h) {
1233 guint n, d;
1234
1235 GST_DEBUG_OBJECT (self, "dimensions already set to %dx%d, not fixating",
1236 w, h);
1237 if (!gst_value_is_fixed (to_par)) {
1238 if (gst_video_calculate_display_ratio (&n, &d, from_w, from_h,
1239 from_par_n, from_par_d, w, h)) {
1240 GST_DEBUG_OBJECT (self, "fixating to_par to %dx%d", n, d);
1241 if (gst_structure_has_field (outs, "pixel-aspect-ratio"))
1242 gst_structure_fixate_field_nearest_fraction (outs,
1243 "pixel-aspect-ratio", n, d);
1244 else if (n != d)
1245 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1246 n, d, NULL);
1247 }
1248 }
1249 goto done;
1250 }
1251
1252 /* Calculate input DAR */
1253 if (!gst_util_fraction_multiply (from_w, from_h, from_par_n, from_par_d,
1254 &from_dar_n, &from_dar_d)) {
1255 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1256 ("Error calculating the output scaled size - integer overflow"));
1257 goto done;
1258 }
1259
1260 GST_DEBUG_OBJECT (self, "Input DAR is %d/%d", from_dar_n, from_dar_d);
1261
1262 /* If either width or height are fixed there's not much we
1263 * can do either except choosing a height or width and PAR
1264 * that matches the DAR as good as possible
1265 */
1266 if (h) {
1267 GstStructure *tmp;
1268 gint set_w, set_par_n, set_par_d;
1269
1270 GST_DEBUG_OBJECT (self, "height is fixed (%d)", h);
1271
1272 /* If the PAR is fixed too, there's not much to do
1273 * except choosing the width that is nearest to the
1274 * width with the same DAR */
1275 if (gst_value_is_fixed (to_par)) {
1276 to_par_n = gst_value_get_fraction_numerator (to_par);
1277 to_par_d = gst_value_get_fraction_denominator (to_par);
1278
1279 GST_DEBUG_OBJECT (self, "PAR is fixed %d/%d", to_par_n, to_par_d);
1280
1281 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d,
1282 to_par_n, &num, &den)) {
1283 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1284 ("Error calculating the output scaled size - integer overflow"));
1285 goto done;
1286 }
1287
1288 w = (guint) gst_util_uint64_scale_int_round (h, num, den);
1289 gst_structure_fixate_field_nearest_int (outs, "width", w);
1290
1291 goto done;
1292 }
1293
1294 /* The PAR is not fixed and it's quite likely that we can set
1295 * an arbitrary PAR. */
1296
1297 /* Check if we can keep the input width */
1298 tmp = gst_structure_copy (outs);
1299 gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
1300 gst_structure_get_int (tmp, "width", &set_w);
1301
1302 /* Might have failed but try to keep the DAR nonetheless by
1303 * adjusting the PAR */
1304 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, h, set_w,
1305 &to_par_n, &to_par_d)) {
1306 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1307 ("Error calculating the output scaled size - integer overflow"));
1308 gst_structure_free (tmp);
1309 goto done;
1310 }
1311
1312 if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
1313 gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
1314 gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
1315 to_par_n, to_par_d);
1316 gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
1317 &set_par_d);
1318 gst_structure_free (tmp);
1319
1320 /* Check if the adjusted PAR is accepted */
1321 if (set_par_n == to_par_n && set_par_d == to_par_d) {
1322 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1323 set_par_n != set_par_d)
1324 gst_structure_set (outs, "width", G_TYPE_INT, set_w,
1325 "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d,
1326 NULL);
1327 goto done;
1328 }
1329
1330 /* Otherwise scale the width to the new PAR and check if the
1331 * adjusted with is accepted. If all that fails we can't keep
1332 * the DAR */
1333 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
1334 set_par_n, &num, &den)) {
1335 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1336 ("Error calculating the output scaled size - integer overflow"));
1337 goto done;
1338 }
1339
1340 w = (guint) gst_util_uint64_scale_int_round (h, num, den);
1341 gst_structure_fixate_field_nearest_int (outs, "width", w);
1342 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1343 set_par_n != set_par_d)
1344 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1345 set_par_n, set_par_d, NULL);
1346
1347 goto done;
1348 } else if (w) {
1349 GstStructure *tmp;
1350 gint set_h, set_par_n, set_par_d;
1351
1352 GST_DEBUG_OBJECT (self, "width is fixed (%d)", w);
1353
1354 /* If the PAR is fixed too, there's not much to do
1355 * except choosing the height that is nearest to the
1356 * height with the same DAR */
1357 if (gst_value_is_fixed (to_par)) {
1358 to_par_n = gst_value_get_fraction_numerator (to_par);
1359 to_par_d = gst_value_get_fraction_denominator (to_par);
1360
1361 GST_DEBUG_OBJECT (self, "PAR is fixed %d/%d", to_par_n, to_par_d);
1362
1363 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d,
1364 to_par_n, &num, &den)) {
1365 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1366 ("Error calculating the output scaled size - integer overflow"));
1367 goto done;
1368 }
1369
1370 h = (guint) gst_util_uint64_scale_int_round (w, den, num);
1371 gst_structure_fixate_field_nearest_int (outs, "height", h);
1372
1373 goto done;
1374 }
1375
1376 /* The PAR is not fixed and it's quite likely that we can set
1377 * an arbitrary PAR. */
1378
1379 /* Check if we can keep the input height */
1380 tmp = gst_structure_copy (outs);
1381 gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
1382 gst_structure_get_int (tmp, "height", &set_h);
1383
1384 /* Might have failed but try to keep the DAR nonetheless by
1385 * adjusting the PAR */
1386 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, w,
1387 &to_par_n, &to_par_d)) {
1388 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1389 ("Error calculating the output scaled size - integer overflow"));
1390 gst_structure_free (tmp);
1391 goto done;
1392 }
1393 if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
1394 gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
1395 gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
1396 to_par_n, to_par_d);
1397 gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
1398 &set_par_d);
1399 gst_structure_free (tmp);
1400
1401 /* Check if the adjusted PAR is accepted */
1402 if (set_par_n == to_par_n && set_par_d == to_par_d) {
1403 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1404 set_par_n != set_par_d)
1405 gst_structure_set (outs, "height", G_TYPE_INT, set_h,
1406 "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d,
1407 NULL);
1408 goto done;
1409 }
1410
1411 /* Otherwise scale the height to the new PAR and check if the
1412 * adjusted with is accepted. If all that fails we can't keep
1413 * the DAR */
1414 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
1415 set_par_n, &num, &den)) {
1416 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1417 ("Error calculating the output scale sized - integer overflow"));
1418 goto done;
1419 }
1420
1421 h = (guint) gst_util_uint64_scale_int_round (w, den, num);
1422 gst_structure_fixate_field_nearest_int (outs, "height", h);
1423 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1424 set_par_n != set_par_d)
1425 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1426 set_par_n, set_par_d, NULL);
1427
1428 goto done;
1429 } else if (gst_value_is_fixed (to_par)) {
1430 GstStructure *tmp;
1431 gint set_h, set_w, f_h, f_w;
1432
1433 to_par_n = gst_value_get_fraction_numerator (to_par);
1434 to_par_d = gst_value_get_fraction_denominator (to_par);
1435
1436 /* Calculate scale factor for the PAR change */
1437 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_n,
1438 to_par_d, &num, &den)) {
1439 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1440 ("Error calculating the output scaled size - integer overflow"));
1441 goto done;
1442 }
1443
1444 /* Try to keep the input height (because of interlacing) */
1445 tmp = gst_structure_copy (outs);
1446 gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
1447 gst_structure_get_int (tmp, "height", &set_h);
1448
1449 /* This might have failed but try to scale the width
1450 * to keep the DAR nonetheless */
1451 w = (guint) gst_util_uint64_scale_int_round (set_h, num, den);
1452 gst_structure_fixate_field_nearest_int (tmp, "width", w);
1453 gst_structure_get_int (tmp, "width", &set_w);
1454 gst_structure_free (tmp);
1455
1456 /* We kept the DAR and the height is nearest to the original height */
1457 if (set_w == w) {
1458 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1459 G_TYPE_INT, set_h, NULL);
1460 goto done;
1461 }
1462
1463 f_h = set_h;
1464 f_w = set_w;
1465
1466 /* If the former failed, try to keep the input width at least */
1467 tmp = gst_structure_copy (outs);
1468 gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
1469 gst_structure_get_int (tmp, "width", &set_w);
1470
1471 /* This might have failed but try to scale the width
1472 * to keep the DAR nonetheless */
1473 h = (guint) gst_util_uint64_scale_int_round (set_w, den, num);
1474 gst_structure_fixate_field_nearest_int (tmp, "height", h);
1475 gst_structure_get_int (tmp, "height", &set_h);
1476 gst_structure_free (tmp);
1477
1478 /* We kept the DAR and the width is nearest to the original width */
1479 if (set_h == h) {
1480 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1481 G_TYPE_INT, set_h, NULL);
1482 goto done;
1483 }
1484
1485 /* If all this failed, keep the dimensions with the DAR that was closest
1486 * to the correct DAR. This changes the DAR but there's not much else to
1487 * do here.
1488 */
1489 if (set_w * ABS (set_h - h) < ABS (f_w - w) * f_h) {
1490 f_h = set_h;
1491 f_w = set_w;
1492 }
1493 gst_structure_set (outs, "width", G_TYPE_INT, f_w, "height", G_TYPE_INT,
1494 f_h, NULL);
1495 goto done;
1496 } else {
1497 GstStructure *tmp;
1498 gint set_h, set_w, set_par_n, set_par_d, tmp2;
1499
1500 /* width, height and PAR are not fixed but passthrough is not possible */
1501
1502 /* First try to keep the height and width as good as possible
1503 * and scale PAR */
1504 tmp = gst_structure_copy (outs);
1505 gst_structure_fixate_field_nearest_int (tmp, "height", from_h);
1506 gst_structure_get_int (tmp, "height", &set_h);
1507 gst_structure_fixate_field_nearest_int (tmp, "width", from_w);
1508 gst_structure_get_int (tmp, "width", &set_w);
1509
1510 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, set_w,
1511 &to_par_n, &to_par_d)) {
1512 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1513 ("Error calculating the output scaled size - integer overflow"));
1514 gst_structure_free (tmp);
1515 goto done;
1516 }
1517
1518 if (!gst_structure_has_field (tmp, "pixel-aspect-ratio"))
1519 gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par);
1520 gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio",
1521 to_par_n, to_par_d);
1522 gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n,
1523 &set_par_d);
1524 gst_structure_free (tmp);
1525
1526 if (set_par_n == to_par_n && set_par_d == to_par_d) {
1527 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1528 G_TYPE_INT, set_h, NULL);
1529
1530 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1531 set_par_n != set_par_d)
1532 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1533 set_par_n, set_par_d, NULL);
1534 goto done;
1535 }
1536
1537 /* Otherwise try to scale width to keep the DAR with the set
1538 * PAR and height */
1539 if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d,
1540 set_par_n, &num, &den)) {
1541 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, (NULL),
1542 ("Error calculating the output scaled size - integer overflow"));
1543 goto done;
1544 }
1545
1546 w = (guint) gst_util_uint64_scale_int_round (set_h, num, den);
1547 tmp = gst_structure_copy (outs);
1548 gst_structure_fixate_field_nearest_int (tmp, "width", w);
1549 gst_structure_get_int (tmp, "width", &tmp2);
1550 gst_structure_free (tmp);
1551
1552 if (tmp2 == w) {
1553 gst_structure_set (outs, "width", G_TYPE_INT, tmp2, "height",
1554 G_TYPE_INT, set_h, NULL);
1555 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1556 set_par_n != set_par_d)
1557 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1558 set_par_n, set_par_d, NULL);
1559 goto done;
1560 }
1561
1562 /* ... or try the same with the height */
1563 h = (guint) gst_util_uint64_scale_int_round (set_w, den, num);
1564 tmp = gst_structure_copy (outs);
1565 gst_structure_fixate_field_nearest_int (tmp, "height", h);
1566 gst_structure_get_int (tmp, "height", &tmp2);
1567 gst_structure_free (tmp);
1568
1569 if (tmp2 == h) {
1570 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1571 G_TYPE_INT, tmp2, NULL);
1572 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1573 set_par_n != set_par_d)
1574 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1575 set_par_n, set_par_d, NULL);
1576 goto done;
1577 }
1578
1579 /* If all fails we can't keep the DAR and take the nearest values
1580 * for everything from the first try */
1581 gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height",
1582 G_TYPE_INT, set_h, NULL);
1583 if (gst_structure_has_field (outs, "pixel-aspect-ratio") ||
1584 set_par_n != set_par_d)
1585 gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1586 set_par_n, set_par_d, NULL);
1587 }
1588 }
1589
1590 done:
1591 if (from_par == &fpar)
1592 g_value_unset (&fpar);
1593 if (to_par == &tpar)
1594 g_value_unset (&tpar);
1595 }
1596
1597 static gboolean
subsampling_unchanged(GstVideoInfo * in_info,GstVideoInfo * out_info)1598 subsampling_unchanged (GstVideoInfo * in_info, GstVideoInfo * out_info)
1599 {
1600 gint i;
1601 const GstVideoFormatInfo *in_format, *out_format;
1602
1603 if (GST_VIDEO_INFO_N_COMPONENTS (in_info) !=
1604 GST_VIDEO_INFO_N_COMPONENTS (out_info))
1605 return FALSE;
1606
1607 in_format = in_info->finfo;
1608 out_format = out_info->finfo;
1609
1610 for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (in_info); i++) {
1611 if (GST_VIDEO_FORMAT_INFO_W_SUB (in_format,
1612 i) != GST_VIDEO_FORMAT_INFO_W_SUB (out_format, i))
1613 return FALSE;
1614 if (GST_VIDEO_FORMAT_INFO_H_SUB (in_format,
1615 i) != GST_VIDEO_FORMAT_INFO_H_SUB (out_format, i))
1616 return FALSE;
1617 }
1618
1619 return TRUE;
1620 }
1621
1622 static void
transfer_colorimetry_from_input(GstVaVpp * self,GstCaps * in_caps,GstCaps * out_caps)1623 transfer_colorimetry_from_input (GstVaVpp * self, GstCaps * in_caps,
1624 GstCaps * out_caps)
1625 {
1626 GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
1627 GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
1628 gboolean have_colorimetry =
1629 gst_structure_has_field (out_caps_s, "colorimetry");
1630 gboolean have_chroma_site =
1631 gst_structure_has_field (out_caps_s, "chroma-site");
1632
1633 /* If the output already has colorimetry and chroma-site, stop,
1634 * otherwise try and transfer what we can from the input caps */
1635 if (have_colorimetry && have_chroma_site)
1636 return;
1637
1638 {
1639 GstVideoInfo in_info, out_info;
1640 const GValue *in_colorimetry =
1641 gst_structure_get_value (in_caps_s, "colorimetry");
1642
1643 if (!gst_video_info_from_caps (&in_info, in_caps)) {
1644 GST_WARNING_OBJECT (self,
1645 "Failed to convert sink pad caps to video info");
1646 return;
1647 }
1648 if (!gst_video_info_from_caps (&out_info, out_caps)) {
1649 GST_WARNING_OBJECT (self, "Failed to convert src pad caps to video info");
1650 return;
1651 }
1652
1653 if (!have_colorimetry && in_colorimetry != NULL) {
1654 if ((GST_VIDEO_INFO_IS_YUV (&out_info)
1655 && GST_VIDEO_INFO_IS_YUV (&in_info))
1656 || (GST_VIDEO_INFO_IS_RGB (&out_info)
1657 && GST_VIDEO_INFO_IS_RGB (&in_info))
1658 || (GST_VIDEO_INFO_IS_GRAY (&out_info)
1659 && GST_VIDEO_INFO_IS_GRAY (&in_info))) {
1660 /* Can transfer the colorimetry intact from the input if it has it */
1661 gst_structure_set_value (out_caps_s, "colorimetry", in_colorimetry);
1662 } else {
1663 gchar *colorimetry_str;
1664
1665 /* Changing between YUV/RGB - forward primaries and transfer function, but use
1666 * default range and matrix.
1667 * the primaries is used for conversion between RGB and XYZ (CIE 1931 coordinate).
1668 * the transfer function could be another reference (e.g., HDR)
1669 */
1670 out_info.colorimetry.primaries = in_info.colorimetry.primaries;
1671 out_info.colorimetry.transfer = in_info.colorimetry.transfer;
1672
1673 colorimetry_str =
1674 gst_video_colorimetry_to_string (&out_info.colorimetry);
1675 gst_caps_set_simple (out_caps, "colorimetry", G_TYPE_STRING,
1676 colorimetry_str, NULL);
1677 g_free (colorimetry_str);
1678 }
1679 }
1680
1681 /* Only YUV output needs chroma-site. If the input was also YUV and had the same chroma
1682 * subsampling, transfer the siting. If the sub-sampling is changing, then the planes get
1683 * scaled anyway so there's no real reason to prefer the input siting. */
1684 if (!have_chroma_site && GST_VIDEO_INFO_IS_YUV (&out_info)) {
1685 if (GST_VIDEO_INFO_IS_YUV (&in_info)) {
1686 const GValue *in_chroma_site =
1687 gst_structure_get_value (in_caps_s, "chroma-site");
1688 if (in_chroma_site != NULL
1689 && subsampling_unchanged (&in_info, &out_info))
1690 gst_structure_set_value (out_caps_s, "chroma-site", in_chroma_site);
1691 }
1692 }
1693 }
1694 }
1695
1696 static void
copy_misc_fields_from_input(GstCaps * in_caps,GstCaps * out_caps)1697 copy_misc_fields_from_input (GstCaps * in_caps, GstCaps * out_caps)
1698 {
1699 const gchar *fields[] = { "interlace-mode", "field-order", "multiview-mode",
1700 "multiview-flags", "framerate"
1701 };
1702 GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
1703 GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
1704 int i;
1705
1706 for (i = 0; i < G_N_ELEMENTS (fields); i++) {
1707 const GValue *in_field = gst_structure_get_value (in_caps_s, fields[i]);
1708 const GValue *out_field = gst_structure_get_value (out_caps_s, fields[i]);
1709
1710 if (out_field && gst_value_is_fixed (out_field))
1711 continue;
1712
1713 if (in_field)
1714 gst_structure_set_value (out_caps_s, fields[i], in_field);
1715 }
1716 }
1717
1718 static GstCaps *
gst_va_vpp_fixate_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)1719 gst_va_vpp_fixate_caps (GstBaseTransform * trans, GstPadDirection direction,
1720 GstCaps * caps, GstCaps * othercaps)
1721 {
1722 GstVaVpp *self = GST_VA_VPP (trans);
1723 GstCaps *result;
1724
1725 GST_DEBUG_OBJECT (self,
1726 "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %"
1727 GST_PTR_FORMAT, othercaps, caps);
1728
1729 /* will iterate in all structures to find one with "best color" */
1730 result = gst_va_vpp_fixate_format (self, caps, othercaps);
1731 if (!result)
1732 return othercaps;
1733
1734 gst_clear_caps (&othercaps);
1735
1736 gst_va_vpp_fixate_size (self, direction, caps, result);
1737
1738 /* some fields might be lost while feature caps conversion */
1739 copy_misc_fields_from_input (caps, result);
1740
1741 /* fixate remaining fields */
1742 result = gst_caps_fixate (result);
1743
1744 if (direction == GST_PAD_SINK) {
1745 if (gst_caps_is_subset (caps, result)) {
1746 gst_caps_replace (&result, caps);
1747 } else {
1748 /* Try and preserve input colorimetry / chroma information */
1749 transfer_colorimetry_from_input (self, caps, result);
1750 }
1751 }
1752
1753 GST_DEBUG_OBJECT (self, "fixated othercaps to %" GST_PTR_FORMAT, result);
1754
1755 return result;
1756 }
1757
1758 static void
_get_scale_factor(GstVaVpp * self,gdouble * w_factor,gdouble * h_factor)1759 _get_scale_factor (GstVaVpp * self, gdouble * w_factor, gdouble * h_factor)
1760 {
1761 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (self);
1762 gdouble w = GST_VIDEO_INFO_WIDTH (&btrans->in_info);
1763 gdouble h = GST_VIDEO_INFO_HEIGHT (&btrans->in_info);
1764
1765 switch (self->direction) {
1766 case GST_VIDEO_ORIENTATION_90R:
1767 case GST_VIDEO_ORIENTATION_90L:
1768 case GST_VIDEO_ORIENTATION_UR_LL:
1769 case GST_VIDEO_ORIENTATION_UL_LR:{
1770 gdouble tmp = h;
1771 h = w;
1772 w = tmp;
1773 break;
1774 }
1775 default:
1776 break;
1777 }
1778
1779 *w_factor = GST_VIDEO_INFO_WIDTH (&btrans->out_info);
1780 *w_factor /= w;
1781
1782 *h_factor = GST_VIDEO_INFO_HEIGHT (&btrans->out_info);
1783 *h_factor /= h;
1784 }
1785
1786 static gboolean
gst_va_vpp_src_event(GstBaseTransform * trans,GstEvent * event)1787 gst_va_vpp_src_event (GstBaseTransform * trans, GstEvent * event)
1788 {
1789 GstVaVpp *self = GST_VA_VPP (trans);
1790 GstVaBaseTransform *btrans = GST_VA_BASE_TRANSFORM (trans);
1791 GstStructure *structure;
1792 const GstVideoInfo *in_info = &btrans->in_info, *out_info = &btrans->out_info;
1793 gdouble new_x = 0, new_y = 0, x = 0, y = 0, w_factor = 1, h_factor = 1;
1794 gboolean ret;
1795
1796 GST_TRACE_OBJECT (self, "handling %s event", GST_EVENT_TYPE_NAME (event));
1797
1798 switch (GST_EVENT_TYPE (event)) {
1799 case GST_EVENT_NAVIGATION:
1800 if (GST_VIDEO_INFO_WIDTH (in_info) != GST_VIDEO_INFO_WIDTH (out_info)
1801 || GST_VIDEO_INFO_HEIGHT (in_info) != GST_VIDEO_INFO_HEIGHT (out_info)
1802 || gst_va_filter_get_orientation (btrans->filter) !=
1803 GST_VIDEO_ORIENTATION_IDENTITY) {
1804
1805 event =
1806 GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event)));
1807
1808 structure = (GstStructure *) gst_event_get_structure (event);
1809 if (!gst_structure_get_double (structure, "pointer_x", &x)
1810 || !gst_structure_get_double (structure, "pointer_y", &y))
1811 break;
1812
1813 /* video-direction compensation */
1814 switch (self->direction) {
1815 case GST_VIDEO_ORIENTATION_90R:
1816 new_x = y;
1817 new_y = GST_VIDEO_INFO_WIDTH (in_info) - 1 - x;
1818 break;
1819 case GST_VIDEO_ORIENTATION_90L:
1820 new_x = GST_VIDEO_INFO_HEIGHT (in_info) - 1 - y;
1821 new_y = x;
1822 break;
1823 case GST_VIDEO_ORIENTATION_UR_LL:
1824 new_x = GST_VIDEO_INFO_HEIGHT (in_info) - 1 - y;
1825 new_y = GST_VIDEO_INFO_WIDTH (in_info) - 1 - x;
1826 break;
1827 case GST_VIDEO_ORIENTATION_UL_LR:
1828 new_x = y;
1829 new_y = x;
1830 break;
1831 case GST_VIDEO_ORIENTATION_180:
1832 /* FIXME: is this correct? */
1833 new_x = GST_VIDEO_INFO_WIDTH (in_info) - 1 - x;
1834 new_y = GST_VIDEO_INFO_HEIGHT (in_info) - 1 - y;
1835 break;
1836 case GST_VIDEO_ORIENTATION_HORIZ:
1837 new_x = GST_VIDEO_INFO_WIDTH (in_info) - 1 - x;
1838 new_y = y;
1839 break;
1840 case GST_VIDEO_ORIENTATION_VERT:
1841 new_x = x;
1842 new_y = GST_VIDEO_INFO_HEIGHT (in_info) - 1 - y;
1843 break;
1844 default:
1845 new_x = x;
1846 new_y = y;
1847 break;
1848 }
1849
1850 /* scale compensation */
1851 _get_scale_factor (self, &w_factor, &h_factor);
1852 new_x *= w_factor;
1853 new_y *= h_factor;
1854
1855 /* crop compensation is done by videocrop */
1856
1857 GST_TRACE_OBJECT (self, "from %fx%f to %fx%f", x, y, new_x, new_y);
1858 gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, new_x,
1859 "pointer_y", G_TYPE_DOUBLE, new_y, NULL);
1860 }
1861 break;
1862 default:
1863 break;
1864 }
1865
1866 ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
1867
1868 return ret;
1869 }
1870
1871 static gboolean
gst_va_vpp_sink_event(GstBaseTransform * trans,GstEvent * event)1872 gst_va_vpp_sink_event (GstBaseTransform * trans, GstEvent * event)
1873 {
1874 GstVaVpp *self = GST_VA_VPP (trans);
1875 GstTagList *taglist;
1876 gchar *orientation;
1877
1878 switch (GST_EVENT_TYPE (event)) {
1879 case GST_EVENT_TAG:
1880 gst_event_parse_tag (event, &taglist);
1881
1882 if (!gst_tag_list_get_string (taglist, "image-orientation", &orientation))
1883 break;
1884
1885 if (self->direction != GST_VIDEO_ORIENTATION_AUTO)
1886 break;
1887
1888 GST_DEBUG_OBJECT (self, "tag orientation %s", orientation);
1889
1890 GST_OBJECT_LOCK (self);
1891 if (!g_strcmp0 ("rotate-0", orientation))
1892 self->tag_direction = GST_VIDEO_ORIENTATION_IDENTITY;
1893 else if (!g_strcmp0 ("rotate-90", orientation))
1894 self->tag_direction = GST_VIDEO_ORIENTATION_90R;
1895 else if (!g_strcmp0 ("rotate-180", orientation))
1896 self->tag_direction = GST_VIDEO_ORIENTATION_180;
1897 else if (!g_strcmp0 ("rotate-270", orientation))
1898 self->tag_direction = GST_VIDEO_ORIENTATION_90L;
1899 else if (!g_strcmp0 ("flip-rotate-0", orientation))
1900 self->tag_direction = GST_VIDEO_ORIENTATION_HORIZ;
1901 else if (!g_strcmp0 ("flip-rotate-90", orientation))
1902 self->tag_direction = GST_VIDEO_ORIENTATION_UL_LR;
1903 else if (!g_strcmp0 ("flip-rotate-180", orientation))
1904 self->tag_direction = GST_VIDEO_ORIENTATION_VERT;
1905 else if (!g_strcmp0 ("flip-rotate-270", orientation))
1906 self->tag_direction = GST_VIDEO_ORIENTATION_UR_LL;
1907
1908 _update_properties_unlocked (self);
1909 GST_OBJECT_UNLOCK (self);
1910
1911 gst_va_vpp_update_passthrough (self, FALSE);
1912
1913 break;
1914 default:
1915 break;
1916 }
1917
1918 return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
1919 }
1920
1921 static void
gst_va_vpp_class_init(gpointer g_class,gpointer class_data)1922 gst_va_vpp_class_init (gpointer g_class, gpointer class_data)
1923 {
1924 GstCaps *doc_caps, *caps = NULL;
1925 GstPadTemplate *sink_pad_templ, *src_pad_templ;
1926 GObjectClass *object_class = G_OBJECT_CLASS (g_class);
1927 GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (g_class);
1928 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
1929 GstVaBaseTransformClass *btrans_class = GST_VA_BASE_TRANSFORM_CLASS (g_class);
1930 GstVaDisplay *display;
1931 GstVaFilter *filter;
1932 struct CData *cdata = class_data;
1933 gchar *long_name;
1934
1935 parent_class = g_type_class_peek_parent (g_class);
1936
1937 btrans_class->render_device_path = g_strdup (cdata->render_device_path);
1938
1939 if (cdata->description) {
1940 long_name = g_strdup_printf ("VA-API Video Postprocessor in %s",
1941 cdata->description);
1942 } else {
1943 long_name = g_strdup ("VA-API Video Postprocessor");
1944 }
1945
1946 gst_element_class_set_metadata (element_class, long_name,
1947 "Filter/Converter/Video/Scaler/Hardware",
1948 "VA-API based video postprocessor",
1949 "Víctor Jáquez <vjaquez@igalia.com>");
1950
1951 display = gst_va_display_drm_new_from_path (btrans_class->render_device_path);
1952 filter = gst_va_filter_new (display);
1953
1954 if (gst_va_filter_open (filter)) {
1955 caps = gst_va_filter_get_caps (filter);
1956
1957 /* adds any to enable passthrough */
1958 {
1959 GstCaps *any_caps = gst_caps_new_empty_simple ("video/x-raw");
1960 gst_caps_set_features_simple (any_caps, gst_caps_features_new_any ());
1961 caps = gst_caps_merge (caps, any_caps);
1962 }
1963 } else {
1964 caps = gst_caps_from_string (caps_str);
1965 }
1966
1967 doc_caps = gst_caps_from_string (caps_str);
1968
1969 sink_pad_templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1970 caps);
1971 gst_element_class_add_pad_template (element_class, sink_pad_templ);
1972 gst_pad_template_set_documentation_caps (sink_pad_templ,
1973 gst_caps_ref (doc_caps));
1974
1975 src_pad_templ = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1976 caps);
1977 gst_element_class_add_pad_template (element_class, src_pad_templ);
1978 gst_pad_template_set_documentation_caps (src_pad_templ,
1979 gst_caps_ref (doc_caps));
1980 gst_caps_unref (doc_caps);
1981
1982 gst_caps_unref (caps);
1983
1984 object_class->dispose = gst_va_vpp_dispose;
1985 object_class->set_property = gst_va_vpp_set_property;
1986 object_class->get_property = gst_va_vpp_get_property;
1987
1988 trans_class->propose_allocation =
1989 GST_DEBUG_FUNCPTR (gst_va_vpp_propose_allocation);
1990 trans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_va_vpp_transform_caps);
1991 trans_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_va_vpp_fixate_caps);
1992 trans_class->before_transform =
1993 GST_DEBUG_FUNCPTR (gst_va_vpp_before_transform);
1994 trans_class->transform = GST_DEBUG_FUNCPTR (gst_va_vpp_transform);
1995 trans_class->transform_meta = GST_DEBUG_FUNCPTR (gst_va_vpp_transform_meta);
1996 trans_class->src_event = GST_DEBUG_FUNCPTR (gst_va_vpp_src_event);
1997 trans_class->sink_event = GST_DEBUG_FUNCPTR (gst_va_vpp_sink_event);
1998
1999 trans_class->transform_ip_on_passthrough = FALSE;
2000
2001 btrans_class->set_info = GST_DEBUG_FUNCPTR (gst_va_vpp_set_info);
2002 btrans_class->update_properties =
2003 GST_DEBUG_FUNCPTR (gst_va_vpp_update_properties);
2004
2005 gst_va_filter_install_properties (filter, object_class);
2006
2007 g_free (long_name);
2008 g_free (cdata->description);
2009 g_free (cdata->render_device_path);
2010 g_free (cdata);
2011 gst_object_unref (filter);
2012 gst_object_unref (display);
2013 }
2014
2015 static inline void
_create_colorbalance_channel(GstVaVpp * self,const gchar * label)2016 _create_colorbalance_channel (GstVaVpp * self, const gchar * label)
2017 {
2018 GstColorBalanceChannel *channel;
2019
2020 channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
2021 channel->label = g_strdup_printf ("VA-%s", label);
2022 channel->min_value = -1000;
2023 channel->max_value = 1000;
2024
2025 self->channels = g_list_append (self->channels, channel);
2026 }
2027
2028 static void
gst_va_vpp_init(GTypeInstance * instance,gpointer g_class)2029 gst_va_vpp_init (GTypeInstance * instance, gpointer g_class)
2030 {
2031 GstVaVpp *self = GST_VA_VPP (instance);
2032 GParamSpec *pspec;
2033
2034 self->direction = GST_VIDEO_ORIENTATION_IDENTITY;
2035 self->prev_direction = self->direction;
2036 self->tag_direction = GST_VIDEO_ORIENTATION_AUTO;
2037
2038 pspec = g_object_class_find_property (g_class, "denoise");
2039 if (pspec)
2040 self->denoise = g_value_get_float (g_param_spec_get_default_value (pspec));
2041
2042 pspec = g_object_class_find_property (g_class, "sharpen");
2043 if (pspec)
2044 self->sharpen = g_value_get_float (g_param_spec_get_default_value (pspec));
2045
2046 pspec = g_object_class_find_property (g_class, "skin-tone");
2047 if (pspec) {
2048 const GValue *value = g_param_spec_get_default_value (pspec);
2049 if (G_VALUE_TYPE (value) == G_TYPE_BOOLEAN)
2050 self->skintone = g_value_get_boolean (value);
2051 else
2052 self->skintone = g_value_get_float (value);
2053 }
2054
2055 /* color balance */
2056 pspec = g_object_class_find_property (g_class, "brightness");
2057 if (pspec) {
2058 self->brightness =
2059 g_value_get_float (g_param_spec_get_default_value (pspec));
2060 _create_colorbalance_channel (self, "BRIGHTNESS");
2061 }
2062 pspec = g_object_class_find_property (g_class, "contrast");
2063 if (pspec) {
2064 self->contrast = g_value_get_float (g_param_spec_get_default_value (pspec));
2065 _create_colorbalance_channel (self, "CONTRAST");
2066 }
2067 pspec = g_object_class_find_property (g_class, "hue");
2068 if (pspec) {
2069 self->hue = g_value_get_float (g_param_spec_get_default_value (pspec));
2070 _create_colorbalance_channel (self, "HUE");
2071 }
2072 pspec = g_object_class_find_property (g_class, "saturation");
2073 if (pspec) {
2074 self->saturation =
2075 g_value_get_float (g_param_spec_get_default_value (pspec));
2076 _create_colorbalance_channel (self, "SATURATION");
2077 }
2078
2079 /* enable QoS */
2080 gst_base_transform_set_qos_enabled (GST_BASE_TRANSFORM (instance), TRUE);
2081 }
2082
2083 static gpointer
_register_debug_category(gpointer data)2084 _register_debug_category (gpointer data)
2085 {
2086 GST_DEBUG_CATEGORY_INIT (gst_va_vpp_debug, "vapostproc", 0,
2087 "VA Video Postprocessor");
2088
2089 #define D(type) \
2090 G_PASTE (META_TAG_, type) = \
2091 g_quark_from_static_string (G_PASTE (G_PASTE (GST_META_TAG_VIDEO_, type), _STR))
2092 D (COLORSPACE);
2093 D (SIZE);
2094 D (ORIENTATION);
2095 #undef D
2096 META_TAG_VIDEO = g_quark_from_static_string (GST_META_TAG_VIDEO_STR);
2097
2098 return NULL;
2099 }
2100
2101 gboolean
gst_va_vpp_register(GstPlugin * plugin,GstVaDevice * device,gboolean has_colorbalance,guint rank)2102 gst_va_vpp_register (GstPlugin * plugin, GstVaDevice * device,
2103 gboolean has_colorbalance, guint rank)
2104 {
2105 static GOnce debug_once = G_ONCE_INIT;
2106 GType type;
2107 GTypeInfo type_info = {
2108 .class_size = sizeof (GstVaVppClass),
2109 .class_init = gst_va_vpp_class_init,
2110 .instance_size = sizeof (GstVaVpp),
2111 .instance_init = gst_va_vpp_init,
2112 };
2113 struct CData *cdata;
2114 gboolean ret;
2115 gchar *type_name, *feature_name;
2116
2117 g_return_val_if_fail (GST_IS_PLUGIN (plugin), FALSE);
2118 g_return_val_if_fail (GST_IS_VA_DEVICE (device), FALSE);
2119
2120 cdata = g_new (struct CData, 1);
2121 cdata->description = NULL;
2122 cdata->render_device_path = g_strdup (device->render_device_path);
2123
2124 type_info.class_data = cdata;
2125
2126 type_name = g_strdup ("GstVaPostProc");
2127 feature_name = g_strdup ("vapostproc");
2128
2129 /* The first postprocessor to be registered should use a constant
2130 * name, like vapostproc, for any additional postprocessors, we
2131 * create unique names, using inserting the render device name. */
2132 if (g_type_from_name (type_name)) {
2133 gchar *basename = g_path_get_basename (device->render_device_path);
2134 g_free (type_name);
2135 g_free (feature_name);
2136 type_name = g_strdup_printf ("GstVa%sPostProc", basename);
2137 feature_name = g_strdup_printf ("va%spostproc", basename);
2138 cdata->description = basename;
2139
2140 /* lower rank for non-first device */
2141 if (rank > 0)
2142 rank--;
2143 }
2144
2145 g_once (&debug_once, _register_debug_category, NULL);
2146
2147 type = g_type_register_static (GST_TYPE_VA_BASE_TRANSFORM, type_name,
2148 &type_info, 0);
2149
2150 if (has_colorbalance) {
2151 const GInterfaceInfo info = { gst_va_vpp_colorbalance_init, NULL, NULL };
2152 g_type_add_interface_static (type, GST_TYPE_COLOR_BALANCE, &info);
2153 }
2154
2155 ret = gst_element_register (plugin, feature_name, rank, type);
2156
2157 g_free (type_name);
2158 g_free (feature_name);
2159
2160 return ret;
2161 }
2162
2163 /* Color Balance interface */
2164 static const GList *
gst_va_vpp_colorbalance_list_channels(GstColorBalance * balance)2165 gst_va_vpp_colorbalance_list_channels (GstColorBalance * balance)
2166 {
2167 GstVaVpp *self = GST_VA_VPP (balance);
2168
2169 return self->channels;
2170 }
2171
2172 /* This assumes --as happens with intel drivers-- that max values are
2173 * bigger than the simmetrical values of min values */
2174 static float
make_max_simmetrical(GParamSpecFloat * fpspec)2175 make_max_simmetrical (GParamSpecFloat * fpspec)
2176 {
2177 gfloat max;
2178
2179 if (fpspec->default_value == 0)
2180 max = -fpspec->minimum;
2181 else
2182 max = fpspec->default_value + ABS (fpspec->minimum - fpspec->default_value);
2183
2184 return MIN (max, fpspec->maximum);
2185 }
2186
2187 static gboolean
_set_cb_val(GstVaVpp * self,const gchar * name,GstColorBalanceChannel * channel,gint value,gfloat * cb)2188 _set_cb_val (GstVaVpp * self, const gchar * name,
2189 GstColorBalanceChannel * channel, gint value, gfloat * cb)
2190 {
2191 GObjectClass *klass = G_OBJECT_CLASS (GST_VA_VPP_GET_CLASS (self));
2192 GParamSpec *pspec;
2193 GParamSpecFloat *fpspec;
2194 gfloat new_value, max;
2195 gboolean changed;
2196
2197 pspec = g_object_class_find_property (klass, name);
2198 if (!pspec)
2199 return FALSE;
2200
2201 fpspec = G_PARAM_SPEC_FLOAT (pspec);
2202 max = make_max_simmetrical (fpspec);
2203
2204 new_value = (value - channel->min_value) * (max - fpspec->minimum)
2205 / (channel->max_value - channel->min_value) + fpspec->minimum;
2206
2207 GST_OBJECT_LOCK (self);
2208 changed = new_value != *cb;
2209 *cb = new_value;
2210 value = (*cb + fpspec->minimum) * (channel->max_value - channel->min_value)
2211 / (max - fpspec->minimum) + channel->min_value;
2212 GST_OBJECT_UNLOCK (self);
2213
2214 if (changed) {
2215 GST_INFO_OBJECT (self, "%s: %d / %f", channel->label, value, new_value);
2216 gst_color_balance_value_changed (GST_COLOR_BALANCE (self), channel, value);
2217 g_atomic_int_set (&self->rebuild_filters, TRUE);
2218 }
2219
2220 return TRUE;
2221 }
2222
2223 static void
gst_va_vpp_colorbalance_set_value(GstColorBalance * balance,GstColorBalanceChannel * channel,gint value)2224 gst_va_vpp_colorbalance_set_value (GstColorBalance * balance,
2225 GstColorBalanceChannel * channel, gint value)
2226 {
2227 GstVaVpp *self = GST_VA_VPP (balance);
2228
2229 if (g_str_has_suffix (channel->label, "HUE"))
2230 _set_cb_val (self, "hue", channel, value, &self->hue);
2231 else if (g_str_has_suffix (channel->label, "BRIGHTNESS"))
2232 _set_cb_val (self, "brightness", channel, value, &self->brightness);
2233 else if (g_str_has_suffix (channel->label, "CONTRAST"))
2234 _set_cb_val (self, "contrast", channel, value, &self->contrast);
2235 else if (g_str_has_suffix (channel->label, "SATURATION"))
2236 _set_cb_val (self, "saturation", channel, value, &self->saturation);
2237 }
2238
2239 static gboolean
_get_cb_val(GstVaVpp * self,const gchar * name,GstColorBalanceChannel * channel,gfloat * cb,gint * val)2240 _get_cb_val (GstVaVpp * self, const gchar * name,
2241 GstColorBalanceChannel * channel, gfloat * cb, gint * val)
2242 {
2243 GObjectClass *klass = G_OBJECT_CLASS (GST_VA_VPP_GET_CLASS (self));
2244 GParamSpec *pspec;
2245 GParamSpecFloat *fpspec;
2246 gfloat max;
2247
2248 pspec = g_object_class_find_property (klass, name);
2249 if (!pspec)
2250 return FALSE;
2251
2252 fpspec = G_PARAM_SPEC_FLOAT (pspec);
2253 max = make_max_simmetrical (fpspec);
2254
2255 GST_OBJECT_LOCK (self);
2256 *val = (*cb + fpspec->minimum) * (channel->max_value - channel->min_value)
2257 / (max - fpspec->minimum) + channel->min_value;
2258 GST_OBJECT_UNLOCK (self);
2259
2260 return TRUE;
2261 }
2262
2263 static gint
gst_va_vpp_colorbalance_get_value(GstColorBalance * balance,GstColorBalanceChannel * channel)2264 gst_va_vpp_colorbalance_get_value (GstColorBalance * balance,
2265 GstColorBalanceChannel * channel)
2266 {
2267 GstVaVpp *self = GST_VA_VPP (balance);
2268 gint value = 0;
2269
2270 if (g_str_has_suffix (channel->label, "HUE"))
2271 _get_cb_val (self, "hue", channel, &self->hue, &value);
2272 else if (g_str_has_suffix (channel->label, "BRIGHTNESS"))
2273 _get_cb_val (self, "brightness", channel, &self->brightness, &value);
2274 else if (g_str_has_suffix (channel->label, "CONTRAST"))
2275 _get_cb_val (self, "contrast", channel, &self->contrast, &value);
2276 else if (g_str_has_suffix (channel->label, "SATURATION"))
2277 _get_cb_val (self, "saturation", channel, &self->saturation, &value);
2278
2279 return value;
2280 }
2281
2282 static GstColorBalanceType
gst_va_vpp_colorbalance_get_balance_type(GstColorBalance * balance)2283 gst_va_vpp_colorbalance_get_balance_type (GstColorBalance * balance)
2284 {
2285 return GST_COLOR_BALANCE_HARDWARE;
2286 }
2287
2288 static void
gst_va_vpp_colorbalance_init(gpointer iface,gpointer data)2289 gst_va_vpp_colorbalance_init (gpointer iface, gpointer data)
2290 {
2291 GstColorBalanceInterface *cbiface = iface;
2292
2293 cbiface->list_channels = gst_va_vpp_colorbalance_list_channels;
2294 cbiface->set_value = gst_va_vpp_colorbalance_set_value;
2295 cbiface->get_value = gst_va_vpp_colorbalance_get_value;
2296 cbiface->get_balance_type = gst_va_vpp_colorbalance_get_balance_type;
2297 }
2298