1 /* GStreamer
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * This file:
4 * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
5 * Copyright (C) 2010 David Schleef <ds@schleef.org>
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
16 *
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
21 */
22
23 /**
24 * SECTION:element-videoconvert
25 * @title: videoconvert
26 *
27 * Convert video frames between a great variety of video formats.
28 *
29 * ## Example launch line
30 * |[
31 * gst-launch-1.0 -v videotestsrc ! video/x-raw,format=YUY2 ! videoconvert ! autovideosink
32 * ]|
33 * This will output a test video (generated in YUY2 format) in a video
34 * window. If the video sink selected does not support YUY2 videoconvert will
35 * automatically convert the video to a format understood by the video sink.
36 *
37 */
38
39 #ifdef HAVE_CONFIG_H
40 # include "config.h"
41 #endif
42
43 #include "gstvideoconvert.h"
44
45 #include <gst/video/video.h>
46 #include <gst/video/gstvideometa.h>
47 #include <gst/video/gstvideopool.h>
48
49 #include <string.h>
50
51 GST_DEBUG_CATEGORY (videoconvert_debug);
52 #define GST_CAT_DEFAULT videoconvert_debug
53 GST_DEBUG_CATEGORY_STATIC (CAT_PERFORMANCE);
54
55 static GQuark _colorspace_quark;
56
57 #define gst_video_convert_parent_class parent_class
58 G_DEFINE_TYPE (GstVideoConvert, gst_video_convert, GST_TYPE_VIDEO_FILTER);
59 GST_ELEMENT_REGISTER_DEFINE (videoconvert, "videoconvert",
60 GST_RANK_NONE, GST_TYPE_VIDEO_CONVERT);
61
62 #define DEFAULT_PROP_DITHER GST_VIDEO_DITHER_BAYER
63 #define DEFAULT_PROP_DITHER_QUANTIZATION 1
64 #define DEFAULT_PROP_CHROMA_RESAMPLER GST_VIDEO_RESAMPLER_METHOD_LINEAR
65 #define DEFAULT_PROP_ALPHA_MODE GST_VIDEO_ALPHA_MODE_COPY
66 #define DEFAULT_PROP_ALPHA_VALUE 1.0
67 #define DEFAULT_PROP_CHROMA_MODE GST_VIDEO_CHROMA_MODE_FULL
68 #define DEFAULT_PROP_MATRIX_MODE GST_VIDEO_MATRIX_MODE_FULL
69 #define DEFAULT_PROP_GAMMA_MODE GST_VIDEO_GAMMA_MODE_NONE
70 #define DEFAULT_PROP_PRIMARIES_MODE GST_VIDEO_PRIMARIES_MODE_NONE
71 #define DEFAULT_PROP_N_THREADS 1
72
73 enum
74 {
75 PROP_0,
76 PROP_DITHER,
77 PROP_DITHER_QUANTIZATION,
78 PROP_CHROMA_RESAMPLER,
79 PROP_ALPHA_MODE,
80 PROP_ALPHA_VALUE,
81 PROP_CHROMA_MODE,
82 PROP_MATRIX_MODE,
83 PROP_GAMMA_MODE,
84 PROP_PRIMARIES_MODE,
85 PROP_N_THREADS
86 };
87
88 #define CSP_VIDEO_CAPS GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ";" \
89 GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
90
91 static GstStaticPadTemplate gst_video_convert_src_template =
92 GST_STATIC_PAD_TEMPLATE ("src",
93 GST_PAD_SRC,
94 GST_PAD_ALWAYS,
95 GST_STATIC_CAPS (CSP_VIDEO_CAPS)
96 );
97
98 static GstStaticPadTemplate gst_video_convert_sink_template =
99 GST_STATIC_PAD_TEMPLATE ("sink",
100 GST_PAD_SINK,
101 GST_PAD_ALWAYS,
102 GST_STATIC_CAPS (CSP_VIDEO_CAPS)
103 );
104
105 static void gst_video_convert_set_property (GObject * object,
106 guint property_id, const GValue * value, GParamSpec * pspec);
107 static void gst_video_convert_get_property (GObject * object,
108 guint property_id, GValue * value, GParamSpec * pspec);
109
110 static gboolean gst_video_convert_set_info (GstVideoFilter * filter,
111 GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
112 GstVideoInfo * out_info);
113 static GstFlowReturn gst_video_convert_transform_frame (GstVideoFilter * filter,
114 GstVideoFrame * in_frame, GstVideoFrame * out_frame);
115
116 static GstCapsFeatures *features_format_interlaced,
117 *features_format_interlaced_sysmem;
118
119 /* copies the given caps */
120 static GstCaps *
gst_video_convert_caps_remove_format_info(GstCaps * caps)121 gst_video_convert_caps_remove_format_info (GstCaps * caps)
122 {
123 GstStructure *st;
124 GstCapsFeatures *f;
125 gint i, n;
126 GstCaps *res;
127
128 res = gst_caps_new_empty ();
129
130 n = gst_caps_get_size (caps);
131 for (i = 0; i < n; i++) {
132 st = gst_caps_get_structure (caps, i);
133 f = gst_caps_get_features (caps, i);
134
135 /* If this is already expressed by the existing caps
136 * skip this structure */
137 if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
138 continue;
139
140 st = gst_structure_copy (st);
141 /* Only remove format info for the cases when we can actually convert */
142 if (!gst_caps_features_is_any (f)
143 && (gst_caps_features_is_equal (f,
144 GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)
145 || gst_caps_features_is_equal (f, features_format_interlaced)
146 || gst_caps_features_is_equal (f,
147 features_format_interlaced_sysmem))) {
148 gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
149 NULL);
150 }
151
152 gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
153 }
154
155 return res;
156 }
157
158 /*
159 * This is an incomplete matrix of in formats and a score for the preferred output
160 * format.
161 *
162 * out: RGB24 RGB16 ARGB AYUV YUV444 YUV422 YUV420 YUV411 YUV410 PAL GRAY
163 * in
164 * RGB24 0 2 1 2 2 3 4 5 6 7 8
165 * RGB16 1 0 1 2 2 3 4 5 6 7 8
166 * ARGB 2 3 0 1 4 5 6 7 8 9 10
167 * AYUV 3 4 1 0 2 5 6 7 8 9 10
168 * YUV444 2 4 3 1 0 5 6 7 8 9 10
169 * YUV422 3 5 4 2 1 0 6 7 8 9 10
170 * YUV420 4 6 5 3 2 1 0 7 8 9 10
171 * YUV411 4 6 5 3 2 1 7 0 8 9 10
172 * YUV410 6 8 7 5 4 3 2 1 0 9 10
173 * PAL 1 3 2 6 4 6 7 8 9 0 10
174 * GRAY 1 4 3 2 1 5 6 7 8 9 0
175 *
176 * PAL or GRAY are never preferred, if we can we would convert to PAL instead
177 * of GRAY, though
178 * less subsampling is preferred and if any, preferably horizontal
179 * We would like to keep the alpha, even if we would need to to colorspace conversion
180 * or lose depth.
181 */
182 #define SCORE_FORMAT_CHANGE 1
183 #define SCORE_DEPTH_CHANGE 1
184 #define SCORE_ALPHA_CHANGE 1
185 #define SCORE_CHROMA_W_CHANGE 1
186 #define SCORE_CHROMA_H_CHANGE 1
187 #define SCORE_PALETTE_CHANGE 1
188
189 #define SCORE_COLORSPACE_LOSS 2 /* RGB <-> YUV */
190 #define SCORE_DEPTH_LOSS 4 /* change bit depth */
191 #define SCORE_ALPHA_LOSS 8 /* lose the alpha channel */
192 #define SCORE_CHROMA_W_LOSS 16 /* vertical subsample */
193 #define SCORE_CHROMA_H_LOSS 32 /* horizontal subsample */
194 #define SCORE_PALETTE_LOSS 64 /* convert to palette format */
195 #define SCORE_COLOR_LOSS 128 /* convert to GRAY */
196
197 #define COLORSPACE_MASK (GST_VIDEO_FORMAT_FLAG_YUV | \
198 GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_GRAY)
199 #define ALPHA_MASK (GST_VIDEO_FORMAT_FLAG_ALPHA)
200 #define PALETTE_MASK (GST_VIDEO_FORMAT_FLAG_PALETTE)
201
202 /* calculate how much loss a conversion would be */
203 static void
score_value(GstBaseTransform * base,const GstVideoFormatInfo * in_info,const GValue * val,gint * min_loss,const GstVideoFormatInfo ** out_info)204 score_value (GstBaseTransform * base, const GstVideoFormatInfo * in_info,
205 const GValue * val, gint * min_loss, const GstVideoFormatInfo ** out_info)
206 {
207 const gchar *fname;
208 const GstVideoFormatInfo *t_info;
209 GstVideoFormatFlags in_flags, t_flags;
210 gint loss;
211
212 fname = g_value_get_string (val);
213 t_info = gst_video_format_get_info (gst_video_format_from_string (fname));
214 if (!t_info)
215 return;
216
217 /* accept input format immediately without loss */
218 if (in_info == t_info) {
219 *min_loss = 0;
220 *out_info = t_info;
221 return;
222 }
223
224 loss = SCORE_FORMAT_CHANGE;
225
226 in_flags = GST_VIDEO_FORMAT_INFO_FLAGS (in_info);
227 in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
228 in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
229 in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
230
231 t_flags = GST_VIDEO_FORMAT_INFO_FLAGS (t_info);
232 t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
233 t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
234 t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
235
236 if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) {
237 loss += SCORE_PALETTE_CHANGE;
238 if (t_flags & PALETTE_MASK)
239 loss += SCORE_PALETTE_LOSS;
240 }
241
242 if ((t_flags & COLORSPACE_MASK) != (in_flags & COLORSPACE_MASK)) {
243 loss += SCORE_COLORSPACE_LOSS;
244 if (t_flags & GST_VIDEO_FORMAT_FLAG_GRAY)
245 loss += SCORE_COLOR_LOSS;
246 }
247
248 if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) {
249 loss += SCORE_ALPHA_CHANGE;
250 if (in_flags & ALPHA_MASK)
251 loss += SCORE_ALPHA_LOSS;
252 }
253
254 if ((in_info->h_sub[1]) != (t_info->h_sub[1])) {
255 loss += SCORE_CHROMA_H_CHANGE;
256 if ((in_info->h_sub[1]) < (t_info->h_sub[1]))
257 loss += SCORE_CHROMA_H_LOSS;
258 }
259 if ((in_info->w_sub[1]) != (t_info->w_sub[1])) {
260 loss += SCORE_CHROMA_W_CHANGE;
261 if ((in_info->w_sub[1]) < (t_info->w_sub[1]))
262 loss += SCORE_CHROMA_W_LOSS;
263 }
264
265 if ((in_info->bits) != (t_info->bits)) {
266 loss += SCORE_DEPTH_CHANGE;
267 if ((in_info->bits) > (t_info->bits))
268 loss += SCORE_DEPTH_LOSS;
269 }
270
271 GST_DEBUG_OBJECT (base, "score %s -> %s = %d",
272 GST_VIDEO_FORMAT_INFO_NAME (in_info),
273 GST_VIDEO_FORMAT_INFO_NAME (t_info), loss);
274
275 if (loss < *min_loss) {
276 GST_DEBUG_OBJECT (base, "found new best %d", loss);
277 *out_info = t_info;
278 *min_loss = loss;
279 }
280 }
281
282 static void
gst_video_convert_fixate_format(GstBaseTransform * base,GstCaps * caps,GstCaps * result)283 gst_video_convert_fixate_format (GstBaseTransform * base, GstCaps * caps,
284 GstCaps * result)
285 {
286 GstStructure *ins, *outs;
287 const gchar *in_format;
288 const GstVideoFormatInfo *in_info, *out_info = NULL;
289 gint min_loss = G_MAXINT;
290 guint i, capslen;
291
292 ins = gst_caps_get_structure (caps, 0);
293 in_format = gst_structure_get_string (ins, "format");
294 if (!in_format)
295 return;
296
297 GST_DEBUG_OBJECT (base, "source format %s", in_format);
298
299 in_info =
300 gst_video_format_get_info (gst_video_format_from_string (in_format));
301 if (!in_info)
302 return;
303
304 outs = gst_caps_get_structure (result, 0);
305
306 capslen = gst_caps_get_size (result);
307 GST_DEBUG_OBJECT (base, "iterate %d structures", capslen);
308 for (i = 0; i < capslen; i++) {
309 GstStructure *tests;
310 const GValue *format;
311
312 tests = gst_caps_get_structure (result, i);
313 format = gst_structure_get_value (tests, "format");
314 /* should not happen */
315 if (format == NULL)
316 continue;
317
318 if (GST_VALUE_HOLDS_LIST (format)) {
319 gint j, len;
320
321 len = gst_value_list_get_size (format);
322 GST_DEBUG_OBJECT (base, "have %d formats", len);
323 for (j = 0; j < len; j++) {
324 const GValue *val;
325
326 val = gst_value_list_get_value (format, j);
327 if (G_VALUE_HOLDS_STRING (val)) {
328 score_value (base, in_info, val, &min_loss, &out_info);
329 if (min_loss == 0)
330 break;
331 }
332 }
333 } else if (G_VALUE_HOLDS_STRING (format)) {
334 score_value (base, in_info, format, &min_loss, &out_info);
335 }
336 }
337 if (out_info)
338 gst_structure_set (outs, "format", G_TYPE_STRING,
339 GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL);
340 }
341
342 static gboolean
subsampling_unchanged(GstVideoInfo * in_info,GstVideoInfo * out_info)343 subsampling_unchanged (GstVideoInfo * in_info, GstVideoInfo * out_info)
344 {
345 gint i;
346 const GstVideoFormatInfo *in_format, *out_format;
347
348 if (GST_VIDEO_INFO_N_COMPONENTS (in_info) !=
349 GST_VIDEO_INFO_N_COMPONENTS (out_info))
350 return FALSE;
351
352 in_format = in_info->finfo;
353 out_format = out_info->finfo;
354
355 for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (in_info); i++) {
356 if (GST_VIDEO_FORMAT_INFO_W_SUB (in_format,
357 i) != GST_VIDEO_FORMAT_INFO_W_SUB (out_format, i))
358 return FALSE;
359 if (GST_VIDEO_FORMAT_INFO_H_SUB (in_format,
360 i) != GST_VIDEO_FORMAT_INFO_H_SUB (out_format, i))
361 return FALSE;
362 }
363
364 return TRUE;
365 }
366
367 static void
transfer_colorimetry_from_input(GstBaseTransform * trans,GstCaps * in_caps,GstCaps * out_caps)368 transfer_colorimetry_from_input (GstBaseTransform * trans, GstCaps * in_caps,
369 GstCaps * out_caps)
370 {
371 GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
372 GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
373 gboolean have_colorimetry =
374 gst_structure_has_field (out_caps_s, "colorimetry");
375 gboolean have_chroma_site =
376 gst_structure_has_field (out_caps_s, "chroma-site");
377
378 /* If the output already has colorimetry and chroma-site, stop,
379 * otherwise try and transfer what we can from the input caps */
380 if (have_colorimetry && have_chroma_site)
381 return;
382
383 {
384 GstVideoInfo in_info, out_info;
385 const GValue *in_colorimetry =
386 gst_structure_get_value (in_caps_s, "colorimetry");
387
388 if (!gst_video_info_from_caps (&in_info, in_caps)) {
389 GST_WARNING_OBJECT (trans,
390 "Failed to convert sink pad caps to video info");
391 return;
392 }
393 if (!gst_video_info_from_caps (&out_info, out_caps)) {
394 GST_WARNING_OBJECT (trans,
395 "Failed to convert src pad caps to video info");
396 return;
397 }
398
399 if (!have_colorimetry && in_colorimetry != NULL) {
400 if ((GST_VIDEO_INFO_IS_YUV (&out_info)
401 && GST_VIDEO_INFO_IS_YUV (&in_info))
402 || (GST_VIDEO_INFO_IS_RGB (&out_info)
403 && GST_VIDEO_INFO_IS_RGB (&in_info))
404 || (GST_VIDEO_INFO_IS_GRAY (&out_info)
405 && GST_VIDEO_INFO_IS_GRAY (&in_info))) {
406 /* Can transfer the colorimetry intact from the input if it has it */
407 gst_structure_set_value (out_caps_s, "colorimetry", in_colorimetry);
408 } else {
409 gchar *colorimetry_str;
410
411 /* Changing between YUV/RGB - forward primaries and transfer function, but use
412 * default range and matrix.
413 * the primaries is used for conversion between RGB and XYZ (CIE 1931 coordinate).
414 * the transfer function could be another reference (e.g., HDR)
415 */
416 out_info.colorimetry.primaries = in_info.colorimetry.primaries;
417 out_info.colorimetry.transfer = in_info.colorimetry.transfer;
418
419 colorimetry_str =
420 gst_video_colorimetry_to_string (&out_info.colorimetry);
421 gst_caps_set_simple (out_caps, "colorimetry", G_TYPE_STRING,
422 colorimetry_str, NULL);
423 g_free (colorimetry_str);
424 }
425 }
426
427 /* Only YUV output needs chroma-site. If the input was also YUV and had the same chroma
428 * subsampling, transfer the siting. If the sub-sampling is changing, then the planes get
429 * scaled anyway so there's no real reason to prefer the input siting. */
430 if (!have_chroma_site && GST_VIDEO_INFO_IS_YUV (&out_info)) {
431 if (GST_VIDEO_INFO_IS_YUV (&in_info)) {
432 const GValue *in_chroma_site =
433 gst_structure_get_value (in_caps_s, "chroma-site");
434 if (in_chroma_site != NULL
435 && subsampling_unchanged (&in_info, &out_info))
436 gst_structure_set_value (out_caps_s, "chroma-site", in_chroma_site);
437 }
438 }
439 }
440 }
441
442 static GstCaps *
gst_video_convert_fixate_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * othercaps)443 gst_video_convert_fixate_caps (GstBaseTransform * trans,
444 GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
445 {
446 GstCaps *result;
447
448 GST_DEBUG_OBJECT (trans, "trying to fixate othercaps %" GST_PTR_FORMAT
449 " based on caps %" GST_PTR_FORMAT, othercaps, caps);
450
451 result = gst_caps_intersect (othercaps, caps);
452 if (gst_caps_is_empty (result)) {
453 gst_caps_unref (result);
454 result = othercaps;
455 } else {
456 gst_caps_unref (othercaps);
457 }
458
459 GST_DEBUG_OBJECT (trans, "now fixating %" GST_PTR_FORMAT, result);
460
461 result = gst_caps_make_writable (result);
462 gst_video_convert_fixate_format (trans, caps, result);
463
464 /* fixate remaining fields */
465 result = gst_caps_fixate (result);
466
467 if (direction == GST_PAD_SINK) {
468 if (gst_caps_is_subset (caps, result)) {
469 gst_caps_replace (&result, caps);
470 } else {
471 /* Try and preserve input colorimetry / chroma information */
472 transfer_colorimetry_from_input (trans, caps, result);
473 }
474 }
475
476 return result;
477 }
478
479 static gboolean
gst_video_convert_filter_meta(GstBaseTransform * trans,GstQuery * query,GType api,const GstStructure * params)480 gst_video_convert_filter_meta (GstBaseTransform * trans, GstQuery * query,
481 GType api, const GstStructure * params)
482 {
483 /* This element cannot passthrough the crop meta, because it would convert the
484 * wrong sub-region of the image, and worst, our output image may not be large
485 * enough for the crop to be applied later */
486 if (api == GST_VIDEO_CROP_META_API_TYPE)
487 return FALSE;
488
489 /* propose all other metadata upstream */
490 return TRUE;
491 }
492
493 /* The caps can be transformed into any other caps with format info removed.
494 * However, we should prefer passthrough, so if passthrough is possible,
495 * put it first in the list. */
496 static GstCaps *
gst_video_convert_transform_caps(GstBaseTransform * btrans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)497 gst_video_convert_transform_caps (GstBaseTransform * btrans,
498 GstPadDirection direction, GstCaps * caps, GstCaps * filter)
499 {
500 GstCaps *tmp, *tmp2;
501 GstCaps *result;
502
503 /* Get all possible caps that we can transform to */
504 tmp = gst_video_convert_caps_remove_format_info (caps);
505
506 if (filter) {
507 tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
508 gst_caps_unref (tmp);
509 tmp = tmp2;
510 }
511
512 result = tmp;
513
514 GST_DEBUG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " into %"
515 GST_PTR_FORMAT, caps, result);
516
517 return result;
518 }
519
520 static gboolean
gst_video_convert_transform_meta(GstBaseTransform * trans,GstBuffer * outbuf,GstMeta * meta,GstBuffer * inbuf)521 gst_video_convert_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
522 GstMeta * meta, GstBuffer * inbuf)
523 {
524 const GstMetaInfo *info = meta->info;
525 gboolean ret;
526
527 if (gst_meta_api_type_has_tag (info->api, _colorspace_quark)) {
528 /* don't copy colorspace specific metadata, FIXME, we need a MetaTransform
529 * for the colorspace metadata. */
530 ret = FALSE;
531 } else {
532 /* copy other metadata */
533 ret = TRUE;
534 }
535 return ret;
536 }
537
538 static gboolean
gst_video_convert_set_info(GstVideoFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)539 gst_video_convert_set_info (GstVideoFilter * filter,
540 GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
541 GstVideoInfo * out_info)
542 {
543 GstVideoConvert *space;
544 GstBaseTransformClass *gstbasetransform_class =
545 GST_BASE_TRANSFORM_GET_CLASS (filter);
546 GstVideoInfo tmp_info;
547
548 space = GST_VIDEO_CONVERT_CAST (filter);
549
550 if (space->convert) {
551 gst_video_converter_free (space->convert);
552 space->convert = NULL;
553 }
554
555 /* these must match */
556 if (in_info->width != out_info->width || in_info->height != out_info->height
557 || in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
558 goto format_mismatch;
559
560 /* if present, these must match too */
561 if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
562 goto format_mismatch;
563
564 /* if present, these must match too */
565 if (in_info->interlace_mode != out_info->interlace_mode)
566 goto format_mismatch;
567
568 /* if the only thing different in the caps is the transfer function, and
569 * we're converting between equivalent transfer functions, do passthrough */
570 tmp_info = *in_info;
571 tmp_info.colorimetry.transfer = out_info->colorimetry.transfer;
572 if (gst_video_info_is_equal (&tmp_info, out_info)) {
573 if (gst_video_transfer_function_is_equivalent (in_info->
574 colorimetry.transfer, in_info->finfo->bits,
575 out_info->colorimetry.transfer, out_info->finfo->bits)) {
576 gstbasetransform_class->passthrough_on_same_caps = FALSE;
577 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
578 return TRUE;
579 }
580 }
581 gstbasetransform_class->passthrough_on_same_caps = TRUE;
582 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE);
583
584 space->convert = gst_video_converter_new (in_info, out_info,
585 gst_structure_new ("GstVideoConvertConfig",
586 GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,
587 space->dither,
588 GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,
589 space->dither_quantization,
590 GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD,
591 GST_TYPE_VIDEO_RESAMPLER_METHOD, space->chroma_resampler,
592 GST_VIDEO_CONVERTER_OPT_ALPHA_MODE,
593 GST_TYPE_VIDEO_ALPHA_MODE, space->alpha_mode,
594 GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE,
595 G_TYPE_DOUBLE, space->alpha_value,
596 GST_VIDEO_CONVERTER_OPT_CHROMA_MODE,
597 GST_TYPE_VIDEO_CHROMA_MODE, space->chroma_mode,
598 GST_VIDEO_CONVERTER_OPT_MATRIX_MODE,
599 GST_TYPE_VIDEO_MATRIX_MODE, space->matrix_mode,
600 GST_VIDEO_CONVERTER_OPT_GAMMA_MODE,
601 GST_TYPE_VIDEO_GAMMA_MODE, space->gamma_mode,
602 GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE,
603 GST_TYPE_VIDEO_PRIMARIES_MODE, space->primaries_mode,
604 GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT,
605 space->n_threads, NULL));
606 if (space->convert == NULL)
607 goto no_convert;
608
609 GST_DEBUG_OBJECT (filter, "converting format %s -> %s",
610 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (in_info)),
611 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
612
613 return TRUE;
614
615 /* ERRORS */
616 format_mismatch:
617 {
618 GST_ERROR_OBJECT (space, "input and output formats do not match");
619 return FALSE;
620 }
621 no_convert:
622 {
623 GST_ERROR_OBJECT (space, "could not create converter");
624 return FALSE;
625 }
626 }
627
628 static void
gst_video_convert_finalize(GObject * obj)629 gst_video_convert_finalize (GObject * obj)
630 {
631 GstVideoConvert *space = GST_VIDEO_CONVERT (obj);
632
633 if (space->convert) {
634 gst_video_converter_free (space->convert);
635 }
636
637 G_OBJECT_CLASS (parent_class)->finalize (obj);
638 }
639
640 static void
gst_video_convert_class_init(GstVideoConvertClass * klass)641 gst_video_convert_class_init (GstVideoConvertClass * klass)
642 {
643 GObjectClass *gobject_class = (GObjectClass *) klass;
644 GstElementClass *gstelement_class = (GstElementClass *) klass;
645 GstBaseTransformClass *gstbasetransform_class =
646 (GstBaseTransformClass *) klass;
647 GstVideoFilterClass *gstvideofilter_class = (GstVideoFilterClass *) klass;
648
649 gobject_class->set_property = gst_video_convert_set_property;
650 gobject_class->get_property = gst_video_convert_get_property;
651 gobject_class->finalize = gst_video_convert_finalize;
652
653 gst_element_class_add_static_pad_template (gstelement_class,
654 &gst_video_convert_src_template);
655 gst_element_class_add_static_pad_template (gstelement_class,
656 &gst_video_convert_sink_template);
657
658 gst_element_class_set_static_metadata (gstelement_class,
659 "Colorspace converter", "Filter/Converter/Video",
660 "Converts video from one colorspace to another",
661 "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
662
663 gstbasetransform_class->transform_caps =
664 GST_DEBUG_FUNCPTR (gst_video_convert_transform_caps);
665 gstbasetransform_class->fixate_caps =
666 GST_DEBUG_FUNCPTR (gst_video_convert_fixate_caps);
667 gstbasetransform_class->filter_meta =
668 GST_DEBUG_FUNCPTR (gst_video_convert_filter_meta);
669 gstbasetransform_class->transform_meta =
670 GST_DEBUG_FUNCPTR (gst_video_convert_transform_meta);
671
672 gstbasetransform_class->passthrough_on_same_caps = TRUE;
673
674 gstvideofilter_class->set_info =
675 GST_DEBUG_FUNCPTR (gst_video_convert_set_info);
676 gstvideofilter_class->transform_frame =
677 GST_DEBUG_FUNCPTR (gst_video_convert_transform_frame);
678
679 g_object_class_install_property (gobject_class, PROP_DITHER,
680 g_param_spec_enum ("dither", "Dither", "Apply dithering while converting",
681 gst_video_dither_method_get_type (), DEFAULT_PROP_DITHER,
682 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
683 g_object_class_install_property (gobject_class, PROP_DITHER_QUANTIZATION,
684 g_param_spec_uint ("dither-quantization", "Dither Quantize",
685 "Quantizer to use", 0, G_MAXUINT, DEFAULT_PROP_DITHER_QUANTIZATION,
686 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
687 g_object_class_install_property (gobject_class, PROP_CHROMA_RESAMPLER,
688 g_param_spec_enum ("chroma-resampler", "Chroma resampler",
689 "Chroma resampler method", gst_video_resampler_method_get_type (),
690 DEFAULT_PROP_CHROMA_RESAMPLER,
691 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
692 g_object_class_install_property (gobject_class, PROP_ALPHA_MODE,
693 g_param_spec_enum ("alpha-mode", "Alpha Mode",
694 "Alpha Mode to use", gst_video_alpha_mode_get_type (),
695 DEFAULT_PROP_ALPHA_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
696 g_object_class_install_property (gobject_class, PROP_ALPHA_VALUE,
697 g_param_spec_double ("alpha-value", "Alpha Value",
698 "Alpha Value to use", 0.0, 1.0,
699 DEFAULT_PROP_ALPHA_VALUE,
700 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
701 g_object_class_install_property (gobject_class, PROP_CHROMA_MODE,
702 g_param_spec_enum ("chroma-mode", "Chroma Mode", "Chroma Resampling Mode",
703 gst_video_chroma_mode_get_type (), DEFAULT_PROP_CHROMA_MODE,
704 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
705 g_object_class_install_property (gobject_class, PROP_MATRIX_MODE,
706 g_param_spec_enum ("matrix-mode", "Matrix Mode", "Matrix Conversion Mode",
707 gst_video_matrix_mode_get_type (), DEFAULT_PROP_MATRIX_MODE,
708 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
709 g_object_class_install_property (gobject_class, PROP_GAMMA_MODE,
710 g_param_spec_enum ("gamma-mode", "Gamma Mode", "Gamma Conversion Mode",
711 gst_video_gamma_mode_get_type (), DEFAULT_PROP_GAMMA_MODE,
712 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
713 g_object_class_install_property (gobject_class, PROP_PRIMARIES_MODE,
714 g_param_spec_enum ("primaries-mode", "Primaries Mode",
715 "Primaries Conversion Mode", gst_video_primaries_mode_get_type (),
716 DEFAULT_PROP_PRIMARIES_MODE,
717 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
718 g_object_class_install_property (gobject_class, PROP_N_THREADS,
719 g_param_spec_uint ("n-threads", "Threads",
720 "Maximum number of threads to use", 0, G_MAXUINT,
721 DEFAULT_PROP_N_THREADS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
722 }
723
724 static void
gst_video_convert_init(GstVideoConvert * space)725 gst_video_convert_init (GstVideoConvert * space)
726 {
727 space->dither = DEFAULT_PROP_DITHER;
728 space->dither_quantization = DEFAULT_PROP_DITHER_QUANTIZATION;
729 space->chroma_resampler = DEFAULT_PROP_CHROMA_RESAMPLER;
730 space->alpha_mode = DEFAULT_PROP_ALPHA_MODE;
731 space->alpha_value = DEFAULT_PROP_ALPHA_VALUE;
732 space->chroma_mode = DEFAULT_PROP_CHROMA_MODE;
733 space->matrix_mode = DEFAULT_PROP_MATRIX_MODE;
734 space->gamma_mode = DEFAULT_PROP_GAMMA_MODE;
735 space->primaries_mode = DEFAULT_PROP_PRIMARIES_MODE;
736 space->n_threads = DEFAULT_PROP_N_THREADS;
737 }
738
739 void
gst_video_convert_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)740 gst_video_convert_set_property (GObject * object, guint property_id,
741 const GValue * value, GParamSpec * pspec)
742 {
743 GstVideoConvert *csp;
744
745 csp = GST_VIDEO_CONVERT (object);
746
747 switch (property_id) {
748 case PROP_DITHER:
749 csp->dither = g_value_get_enum (value);
750 break;
751 case PROP_CHROMA_RESAMPLER:
752 csp->chroma_resampler = g_value_get_enum (value);
753 break;
754 case PROP_ALPHA_MODE:
755 csp->alpha_mode = g_value_get_enum (value);
756 break;
757 case PROP_ALPHA_VALUE:
758 csp->alpha_value = g_value_get_double (value);
759 break;
760 case PROP_CHROMA_MODE:
761 csp->chroma_mode = g_value_get_enum (value);
762 break;
763 case PROP_MATRIX_MODE:
764 csp->matrix_mode = g_value_get_enum (value);
765 break;
766 case PROP_GAMMA_MODE:
767 csp->gamma_mode = g_value_get_enum (value);
768 break;
769 case PROP_PRIMARIES_MODE:
770 csp->primaries_mode = g_value_get_enum (value);
771 break;
772 case PROP_DITHER_QUANTIZATION:
773 csp->dither_quantization = g_value_get_uint (value);
774 break;
775 case PROP_N_THREADS:
776 csp->n_threads = g_value_get_uint (value);
777 break;
778 default:
779 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
780 break;
781 }
782 }
783
784 void
gst_video_convert_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)785 gst_video_convert_get_property (GObject * object, guint property_id,
786 GValue * value, GParamSpec * pspec)
787 {
788 GstVideoConvert *csp;
789
790 csp = GST_VIDEO_CONVERT (object);
791
792 switch (property_id) {
793 case PROP_DITHER:
794 g_value_set_enum (value, csp->dither);
795 break;
796 case PROP_CHROMA_RESAMPLER:
797 g_value_set_enum (value, csp->chroma_resampler);
798 break;
799 case PROP_ALPHA_MODE:
800 g_value_set_enum (value, csp->alpha_mode);
801 break;
802 case PROP_ALPHA_VALUE:
803 g_value_set_double (value, csp->alpha_value);
804 break;
805 case PROP_CHROMA_MODE:
806 g_value_set_enum (value, csp->chroma_mode);
807 break;
808 case PROP_MATRIX_MODE:
809 g_value_set_enum (value, csp->matrix_mode);
810 break;
811 case PROP_GAMMA_MODE:
812 g_value_set_enum (value, csp->gamma_mode);
813 break;
814 case PROP_PRIMARIES_MODE:
815 g_value_set_enum (value, csp->primaries_mode);
816 break;
817 case PROP_DITHER_QUANTIZATION:
818 g_value_set_uint (value, csp->dither_quantization);
819 break;
820 case PROP_N_THREADS:
821 g_value_set_uint (value, csp->n_threads);
822 break;
823 default:
824 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
825 break;
826 }
827 }
828
829 static GstFlowReturn
gst_video_convert_transform_frame(GstVideoFilter * filter,GstVideoFrame * in_frame,GstVideoFrame * out_frame)830 gst_video_convert_transform_frame (GstVideoFilter * filter,
831 GstVideoFrame * in_frame, GstVideoFrame * out_frame)
832 {
833 GstVideoConvert *space;
834
835 space = GST_VIDEO_CONVERT_CAST (filter);
836
837 GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter,
838 "doing colorspace conversion from %s -> to %s",
839 GST_VIDEO_INFO_NAME (&filter->in_info),
840 GST_VIDEO_INFO_NAME (&filter->out_info));
841
842 gst_video_converter_frame (space->convert, in_frame, out_frame);
843
844 return GST_FLOW_OK;
845 }
846
847 static gboolean
plugin_init(GstPlugin * plugin)848 plugin_init (GstPlugin * plugin)
849 {
850 GST_DEBUG_CATEGORY_INIT (videoconvert_debug, "videoconvert", 0,
851 "Colorspace Converter");
852
853 GST_DEBUG_CATEGORY_GET (CAT_PERFORMANCE, "GST_PERFORMANCE");
854
855 _colorspace_quark = g_quark_from_static_string ("colorspace");
856
857 features_format_interlaced =
858 gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
859 features_format_interlaced_sysmem =
860 gst_caps_features_copy (features_format_interlaced);
861 gst_caps_features_add (features_format_interlaced_sysmem,
862 GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY);
863
864 return GST_ELEMENT_REGISTER (videoconvert, plugin);
865 }
866
867 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
868 GST_VERSION_MINOR,
869 videoconvert, "Colorspace conversion", plugin_init, VERSION, GST_LICENSE,
870 GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
871