• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Generic video aggregator plugin
2  * Copyright (C) 2004, 2008 Wim Taymans <wim@fluendo.com>
3  * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20 
21 /**
22  * SECTION:gstvideoaggregator
23  * @title: GstVideoAggregator
24  * @short_description: Base class for video aggregators
25  *
26  * VideoAggregator can accept AYUV, ARGB and BGRA video streams. For each of the requested
27  * sink pads it will compare the incoming geometry and framerate to define the
28  * output parameters. Indeed output video frames will have the geometry of the
29  * biggest incoming video stream and the framerate of the fastest incoming one.
30  *
31  * VideoAggregator will do colorspace conversion.
32  *
33  * Zorder for each input stream can be configured on the
34  * #GstVideoAggregatorPad.
35  *
36  */
37 
38 #ifdef HAVE_CONFIG_H
39 #include "config.h"
40 #endif
41 
42 #include <string.h>
43 
44 #include "gstvideoaggregator.h"
45 
46 GST_DEBUG_CATEGORY_STATIC (gst_video_aggregator_debug);
47 #define GST_CAT_DEFAULT gst_video_aggregator_debug
48 
49 /* Needed prototypes */
50 static void gst_video_aggregator_reset_qos (GstVideoAggregator * vagg);
51 
52 struct _GstVideoAggregatorPrivate
53 {
54   /* Lock to prevent the state to change while aggregating */
55   GMutex lock;
56 
57   /* Current downstream segment */
58   GstClockTime ts_offset;
59   guint64 nframes;
60 
61   /* QoS stuff */
62   gdouble proportion;
63   GstClockTime earliest_time;
64   guint64 qos_processed, qos_dropped;
65 
66   /* current caps */
67   GstCaps *current_caps;
68 
69   gboolean live;
70 
71   /* The (ordered) list of #GstVideoFormatInfo supported by the aggregation
72      method (from the srcpad template caps). */
73   GPtrArray *supported_formats;
74 
75   GstTaskPool *task_pool;
76 };
77 
78 /****************************************
79  * GstVideoAggregatorPad implementation *
80  ****************************************/
81 
82 #define DEFAULT_PAD_ZORDER 0
83 #define DEFAULT_PAD_REPEAT_AFTER_EOS FALSE
84 #define DEFAULT_PAD_MAX_LAST_BUFFER_REPEAT GST_CLOCK_TIME_NONE
85 enum
86 {
87   PROP_PAD_0,
88   PROP_PAD_ZORDER,
89   PROP_PAD_REPEAT_AFTER_EOS,
90   PROP_PAD_MAX_LAST_BUFFER_REPEAT,
91 };
92 
93 
94 struct _GstVideoAggregatorPadPrivate
95 {
96   GstBuffer *buffer;
97   GstCaps *caps;
98   GstVideoFrame prepared_frame;
99 
100   /* properties */
101   guint zorder;
102   gboolean repeat_after_eos;
103   GstClockTime max_last_buffer_repeat;
104 
105   /* Subclasses can force an alpha channel in the (input thus output)
106    * colorspace format */
107   gboolean needs_alpha;
108 
109   GstClockTime start_time;
110   GstClockTime end_time;
111 
112   GstVideoInfo pending_vinfo;
113   GstCaps *pending_caps;
114 };
115 
116 
117 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorPad, gst_video_aggregator_pad,
118     GST_TYPE_AGGREGATOR_PAD);
119 
120 static void
gst_video_aggregator_pad_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)121 gst_video_aggregator_pad_get_property (GObject * object, guint prop_id,
122     GValue * value, GParamSpec * pspec)
123 {
124   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
125 
126   switch (prop_id) {
127     case PROP_PAD_ZORDER:
128       g_value_set_uint (value, pad->priv->zorder);
129       break;
130     case PROP_PAD_REPEAT_AFTER_EOS:
131       g_value_set_boolean (value, pad->priv->repeat_after_eos);
132       break;
133     case PROP_PAD_MAX_LAST_BUFFER_REPEAT:
134       g_value_set_uint64 (value, pad->priv->max_last_buffer_repeat);
135       break;
136     default:
137       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
138       break;
139   }
140 }
141 
142 static int
pad_zorder_compare(const GstVideoAggregatorPad * pad1,const GstVideoAggregatorPad * pad2)143 pad_zorder_compare (const GstVideoAggregatorPad * pad1,
144     const GstVideoAggregatorPad * pad2)
145 {
146   return pad1->priv->zorder - pad2->priv->zorder;
147 }
148 
149 static void
gst_video_aggregator_pad_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)150 gst_video_aggregator_pad_set_property (GObject * object, guint prop_id,
151     const GValue * value, GParamSpec * pspec)
152 {
153   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
154 
155   switch (prop_id) {
156     case PROP_PAD_ZORDER:{
157       GstVideoAggregator *vagg =
158           GST_VIDEO_AGGREGATOR (gst_pad_get_parent (GST_PAD (pad)));
159       if (vagg) {
160         GST_OBJECT_LOCK (vagg);
161         pad->priv->zorder = g_value_get_uint (value);
162         GST_ELEMENT (vagg)->sinkpads =
163             g_list_sort (GST_ELEMENT (vagg)->sinkpads,
164             (GCompareFunc) pad_zorder_compare);
165         GST_OBJECT_UNLOCK (vagg);
166         gst_object_unref (vagg);
167       } else {
168         pad->priv->zorder = g_value_get_uint (value);
169       }
170       break;
171     }
172     case PROP_PAD_REPEAT_AFTER_EOS:
173       pad->priv->repeat_after_eos = g_value_get_boolean (value);
174       break;
175     case PROP_PAD_MAX_LAST_BUFFER_REPEAT:
176       pad->priv->max_last_buffer_repeat = g_value_get_uint64 (value);
177       break;
178     default:
179       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
180       break;
181   }
182 }
183 
184 static GstFlowReturn
_flush_pad(GstAggregatorPad * aggpad,GstAggregator * aggregator)185 _flush_pad (GstAggregatorPad * aggpad, GstAggregator * aggregator)
186 {
187   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (aggregator);
188   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
189 
190   gst_video_aggregator_reset_qos (vagg);
191   gst_buffer_replace (&pad->priv->buffer, NULL);
192   gst_caps_replace (&pad->priv->caps, NULL);
193   pad->priv->start_time = -1;
194   pad->priv->end_time = -1;
195 
196   return GST_FLOW_OK;
197 }
198 
199 static gboolean
gst_video_aggregator_pad_skip_buffer(GstAggregatorPad * aggpad,GstAggregator * agg,GstBuffer * buffer)200 gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad,
201     GstAggregator * agg, GstBuffer * buffer)
202 {
203   gboolean ret = FALSE;
204   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
205 
206   if (agg_segment->position != GST_CLOCK_TIME_NONE
207       && GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE) {
208     GstClockTime start_time =
209         gst_segment_to_running_time (&aggpad->segment, GST_FORMAT_TIME,
210         GST_BUFFER_PTS (buffer));
211     GstClockTime end_time = start_time + GST_BUFFER_DURATION (buffer);
212     GstClockTime output_start_running_time =
213         gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
214         agg_segment->position);
215 
216     ret = end_time < output_start_running_time;
217   }
218 
219   return ret;
220 }
221 
222 static gboolean
gst_video_aggregator_pad_prepare_frame(GstVideoAggregatorPad * pad,GstVideoAggregator * vagg,GstBuffer * buffer,GstVideoFrame * prepared_frame)223 gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad,
224     GstVideoAggregator * vagg, GstBuffer * buffer,
225     GstVideoFrame * prepared_frame)
226 {
227   if (!gst_video_frame_map (prepared_frame, &pad->info, buffer, GST_MAP_READ)) {
228     GST_WARNING_OBJECT (vagg, "Could not map input buffer");
229     return FALSE;
230   }
231 
232   return TRUE;
233 }
234 
235 static void
gst_video_aggregator_pad_clean_frame(GstVideoAggregatorPad * pad,GstVideoAggregator * vagg,GstVideoFrame * prepared_frame)236 gst_video_aggregator_pad_clean_frame (GstVideoAggregatorPad * pad,
237     GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
238 {
239   if (prepared_frame->buffer) {
240     gst_video_frame_unmap (prepared_frame);
241     memset (prepared_frame, 0, sizeof (GstVideoFrame));
242   }
243 }
244 
245 static GstSample *
gst_video_aggregator_peek_next_sample(GstAggregator * agg,GstAggregatorPad * aggpad)246 gst_video_aggregator_peek_next_sample (GstAggregator * agg,
247     GstAggregatorPad * aggpad)
248 {
249   GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
250   GstSample *res = NULL;
251 
252   if (vaggpad->priv->buffer) {
253     res = gst_sample_new (vaggpad->priv->buffer, vaggpad->priv->caps,
254         &aggpad->segment, NULL);
255   }
256 
257   return res;
258 }
259 
260 static void
gst_video_aggregator_pad_class_init(GstVideoAggregatorPadClass * klass)261 gst_video_aggregator_pad_class_init (GstVideoAggregatorPadClass * klass)
262 {
263   GObjectClass *gobject_class = (GObjectClass *) klass;
264   GstAggregatorPadClass *aggpadclass = (GstAggregatorPadClass *) klass;
265 
266   gobject_class->set_property = gst_video_aggregator_pad_set_property;
267   gobject_class->get_property = gst_video_aggregator_pad_get_property;
268 
269   g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
270       g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
271           0, G_MAXUINT, DEFAULT_PAD_ZORDER,
272           G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
273   g_object_class_install_property (gobject_class, PROP_PAD_REPEAT_AFTER_EOS,
274       g_param_spec_boolean ("repeat-after-eos", "Repeat After EOS",
275           "Repeat the " "last frame after EOS until all pads are EOS",
276           DEFAULT_PAD_REPEAT_AFTER_EOS,
277           G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
278 
279   /**
280    * GstVideoAggregatorPad::max-last-buffer-repeat:
281    *
282    * Repeat last buffer for time (in ns, -1 = until EOS).
283    * The default behaviour is for the last buffer received on a pad to be
284    * aggregated until a new buffer is received.
285    *
286    * Setting this property causes the last buffer to be discarded once the
287    * running time of the output buffer is `max-last-buffer-repeat` nanoseconds
288    * past its end running time. When the buffer didn't have a duration, the
289    * comparison is made against its running start time.
290    *
291    * This is useful in live scenarios: when a stream encounters a temporary
292    * networking problem, a #GstVideoAggregator subclass can then fall back to
293    * displaying a lower z-order stream, or the background.
294    *
295    * Setting this property doesn't affect the behaviour on EOS.
296    *
297    * Since: 1.18
298    */
299   g_object_class_install_property (gobject_class,
300       PROP_PAD_MAX_LAST_BUFFER_REPEAT,
301       g_param_spec_uint64 ("max-last-buffer-repeat", "Max Last Buffer Repeat",
302           "Repeat last buffer for time (in ns, -1=until EOS), "
303           "behaviour on EOS is not affected", 0, G_MAXUINT64,
304           DEFAULT_PAD_MAX_LAST_BUFFER_REPEAT,
305           G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
306           G_PARAM_STATIC_STRINGS));
307 
308   aggpadclass->flush = GST_DEBUG_FUNCPTR (_flush_pad);
309   aggpadclass->skip_buffer =
310       GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_skip_buffer);
311   klass->prepare_frame =
312       GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_prepare_frame);
313   klass->clean_frame = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_clean_frame);
314 }
315 
316 static void
gst_video_aggregator_pad_init(GstVideoAggregatorPad * vaggpad)317 gst_video_aggregator_pad_init (GstVideoAggregatorPad * vaggpad)
318 {
319   vaggpad->priv = gst_video_aggregator_pad_get_instance_private (vaggpad);
320 
321   vaggpad->priv->zorder = DEFAULT_PAD_ZORDER;
322   vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS;
323   vaggpad->priv->max_last_buffer_repeat = DEFAULT_PAD_MAX_LAST_BUFFER_REPEAT;
324   memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
325 }
326 
327 /**
328  * gst_video_aggregator_pad_has_current_buffer:
329  * @pad: a #GstVideoAggregatorPad
330  *
331  * Checks if the pad currently has a buffer queued that is going to be used
332  * for the current output frame.
333  *
334  * This must only be called from the #GstVideoAggregatorClass::aggregate_frames virtual method,
335  * or from the #GstVideoAggregatorPadClass::prepare_frame virtual method of the aggregator pads.
336  *
337  * Returns: %TRUE if the pad has currently a buffer queued
338  */
339 gboolean
gst_video_aggregator_pad_has_current_buffer(GstVideoAggregatorPad * pad)340 gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad * pad)
341 {
342   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), FALSE);
343 
344   return pad->priv->buffer != NULL;
345 }
346 
347 /**
348  * gst_video_aggregator_pad_get_current_buffer:
349  * @pad: a #GstVideoAggregatorPad
350  *
351  * Returns the currently queued buffer that is going to be used
352  * for the current output frame.
353  *
354  * This must only be called from the #GstVideoAggregatorClass::aggregate_frames virtual method,
355  * or from the #GstVideoAggregatorPadClass::prepare_frame virtual method of the aggregator pads.
356  *
357  * The return value is only valid until #GstVideoAggregatorClass::aggregate_frames or #GstVideoAggregatorPadClass::prepare_frame
358  * returns.
359  *
360  * Returns: (transfer none): The currently queued buffer
361  */
362 GstBuffer *
gst_video_aggregator_pad_get_current_buffer(GstVideoAggregatorPad * pad)363 gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad * pad)
364 {
365   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
366 
367   return pad->priv->buffer;
368 }
369 
370 /**
371  * gst_video_aggregator_pad_get_prepared_frame:
372  * @pad: a #GstVideoAggregatorPad
373  *
374  * Returns the currently prepared video frame that has to be aggregated into
375  * the current output frame.
376  *
377  * This must only be called from the #GstVideoAggregatorClass::aggregate_frames virtual method,
378  * or from the #GstVideoAggregatorPadClass::prepare_frame virtual method of the aggregator pads.
379  *
380  * The return value is only valid until #GstVideoAggregatorClass::aggregate_frames or #GstVideoAggregatorPadClass::prepare_frame
381  * returns.
382  *
383  * Returns: (transfer none): The currently prepared video frame
384  */
385 GstVideoFrame *
gst_video_aggregator_pad_get_prepared_frame(GstVideoAggregatorPad * pad)386 gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad * pad)
387 {
388   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
389 
390   return pad->priv->prepared_frame.buffer ? &pad->priv->prepared_frame : NULL;
391 }
392 
393 /**
394  * gst_video_aggregator_pad_set_needs_alpha:
395  * @pad: a #GstVideoAggregatorPad
396  * @needs_alpha: %TRUE if this pad requires alpha output
397  *
398  * Allows selecting that this pad requires an output format with alpha
399  *
400  */
401 void
gst_video_aggregator_pad_set_needs_alpha(GstVideoAggregatorPad * pad,gboolean needs_alpha)402 gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad * pad,
403     gboolean needs_alpha)
404 {
405   g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad));
406 
407   if (needs_alpha != pad->priv->needs_alpha) {
408     GstAggregator *agg =
409         GST_AGGREGATOR (gst_object_get_parent (GST_OBJECT (pad)));
410     pad->priv->needs_alpha = needs_alpha;
411     if (agg) {
412       gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (agg));
413       gst_object_unref (agg);
414     }
415   }
416 }
417 
418 /****************************************
419  * GstVideoAggregatorConvertPad implementation *
420  ****************************************/
421 
422 enum
423 {
424   PROP_CONVERT_PAD_0,
425   PROP_CONVERT_PAD_CONVERTER_CONFIG,
426 };
427 
428 struct _GstVideoAggregatorConvertPadPrivate
429 {
430   /* The following fields are only used from the aggregate thread and when
431    * initializing / finalizing */
432 
433   /* Converter, if NULL no conversion is done */
434   GstVideoConverter *convert;
435 
436   /* caps used for conversion if needed */
437   GstVideoInfo conversion_info;
438   GstBuffer *converted_buffer;
439 
440   /* The following fields are accessed from the property setters / getters,
441    * and as such are protected with the object lock */
442   GstStructure *converter_config;
443   gboolean converter_config_changed;
444 };
445 
446 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorConvertPad,
447     gst_video_aggregator_convert_pad, GST_TYPE_VIDEO_AGGREGATOR_PAD);
448 
449 static void
gst_video_aggregator_convert_pad_finalize(GObject * o)450 gst_video_aggregator_convert_pad_finalize (GObject * o)
451 {
452   GstVideoAggregatorConvertPad *vaggpad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (o);
453 
454   if (vaggpad->priv->convert)
455     gst_video_converter_free (vaggpad->priv->convert);
456   vaggpad->priv->convert = NULL;
457 
458   if (vaggpad->priv->converter_config)
459     gst_structure_free (vaggpad->priv->converter_config);
460   vaggpad->priv->converter_config = NULL;
461 
462   G_OBJECT_CLASS (gst_video_aggregator_pad_parent_class)->finalize (o);
463 }
464 
465 static void
gst_video_aggregator_convert_pad_update_conversion_info_internal(GstVideoAggregatorPad * vpad)466     gst_video_aggregator_convert_pad_update_conversion_info_internal
467     (GstVideoAggregatorPad * vpad)
468 {
469   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
470 
471   GST_OBJECT_LOCK (pad);
472   pad->priv->converter_config_changed = TRUE;
473   GST_OBJECT_UNLOCK (pad);
474 }
475 
476 static gboolean
gst_video_aggregator_convert_pad_prepare_frame(GstVideoAggregatorPad * vpad,GstVideoAggregator * vagg,GstBuffer * buffer,GstVideoFrame * prepared_frame)477 gst_video_aggregator_convert_pad_prepare_frame (GstVideoAggregatorPad * vpad,
478     GstVideoAggregator * vagg, GstBuffer * buffer,
479     GstVideoFrame * prepared_frame)
480 {
481   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
482   GstVideoFrame frame;
483 
484   /* Update/create converter as needed */
485   GST_OBJECT_LOCK (pad);
486   if (pad->priv->converter_config_changed) {
487     GstVideoAggregatorConvertPadClass *klass =
488         GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS (pad);
489     GstVideoInfo conversion_info;
490 
491     gst_video_info_init (&conversion_info);
492     klass->create_conversion_info (pad, vagg, &conversion_info);
493     if (conversion_info.finfo == NULL) {
494       GST_OBJECT_UNLOCK (pad);
495       return FALSE;
496     }
497     pad->priv->converter_config_changed = FALSE;
498 
499     pad->priv->conversion_info = conversion_info;
500 
501     if (pad->priv->convert)
502       gst_video_converter_free (pad->priv->convert);
503     pad->priv->convert = NULL;
504 
505     if (!gst_video_info_is_equal (&vpad->info, &pad->priv->conversion_info)) {
506       pad->priv->convert =
507           gst_video_converter_new_with_pool (&vpad->info,
508           &pad->priv->conversion_info,
509           pad->priv->converter_config ? gst_structure_copy (pad->
510               priv->converter_config) : NULL, vagg->priv->task_pool);
511       if (!pad->priv->convert) {
512         GST_WARNING_OBJECT (pad, "No path found for conversion");
513         GST_OBJECT_UNLOCK (pad);
514         return FALSE;
515       }
516 
517       GST_DEBUG_OBJECT (pad, "This pad will be converted from %s to %s",
518           gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&vpad->info)),
519           gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&pad->priv->
520                   conversion_info)));
521     } else {
522       GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
523     }
524   }
525   GST_OBJECT_UNLOCK (pad);
526 
527   if (!gst_video_frame_map (&frame, &vpad->info, buffer, GST_MAP_READ)) {
528     GST_WARNING_OBJECT (vagg, "Could not map input buffer");
529     return FALSE;
530   }
531 
532   if (pad->priv->convert) {
533     GstVideoFrame converted_frame;
534     GstBuffer *converted_buf = NULL;
535     static GstAllocationParams params = { 0, 15, 0, 0, };
536     gint converted_size;
537     guint outsize;
538 
539     /* We wait until here to set the conversion infos, in case vagg->info changed */
540     converted_size = pad->priv->conversion_info.size;
541     outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
542     converted_size = converted_size > outsize ? converted_size : outsize;
543     converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
544 
545     if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
546             converted_buf, GST_MAP_READWRITE)) {
547       GST_WARNING_OBJECT (vagg, "Could not map converted frame");
548 
549       gst_video_frame_unmap (&frame);
550       return FALSE;
551     }
552 
553     gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
554     pad->priv->converted_buffer = converted_buf;
555     gst_video_frame_unmap (&frame);
556     *prepared_frame = converted_frame;
557   } else {
558     *prepared_frame = frame;
559   }
560 
561   return TRUE;
562 }
563 
564 static void
gst_video_aggregator_convert_pad_clean_frame(GstVideoAggregatorPad * vpad,GstVideoAggregator * vagg,GstVideoFrame * prepared_frame)565 gst_video_aggregator_convert_pad_clean_frame (GstVideoAggregatorPad * vpad,
566     GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
567 {
568   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
569 
570   if (prepared_frame->buffer) {
571     gst_video_frame_unmap (prepared_frame);
572     memset (prepared_frame, 0, sizeof (GstVideoFrame));
573   }
574 
575   if (pad->priv->converted_buffer) {
576     gst_buffer_unref (pad->priv->converted_buffer);
577     pad->priv->converted_buffer = NULL;
578   }
579 }
580 
581 static void
gst_video_aggregator_convert_pad_create_conversion_info(GstVideoAggregatorConvertPad * pad,GstVideoAggregator * agg,GstVideoInfo * convert_info)582     gst_video_aggregator_convert_pad_create_conversion_info
583     (GstVideoAggregatorConvertPad * pad, GstVideoAggregator * agg,
584     GstVideoInfo * convert_info)
585 {
586   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD (pad);
587   gchar *colorimetry, *best_colorimetry;
588   gchar *chroma, *best_chroma;
589 
590   g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
591   g_return_if_fail (convert_info != NULL);
592 
593   if (!vpad->info.finfo
594       || GST_VIDEO_INFO_FORMAT (&vpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
595     return;
596   }
597 
598   if (!agg->info.finfo
599       || GST_VIDEO_INFO_FORMAT (&agg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
600     return;
601   }
602 
603   colorimetry = gst_video_colorimetry_to_string (&vpad->info.colorimetry);
604   chroma = gst_video_chroma_site_to_string (vpad->info.chroma_site);
605 
606   best_colorimetry = gst_video_colorimetry_to_string (&agg->info.colorimetry);
607   best_chroma = gst_video_chroma_site_to_string (agg->info.chroma_site);
608 
609   if (GST_VIDEO_INFO_FORMAT (&agg->info) != GST_VIDEO_INFO_FORMAT (&vpad->info)
610       || g_strcmp0 (colorimetry, best_colorimetry)
611       || g_strcmp0 (chroma, best_chroma)) {
612     GstVideoInfo tmp_info;
613 
614     /* Initialize with the wanted video format and our original width and
615      * height as we don't want to rescale. Then copy over the wanted
616      * colorimetry, and chroma-site and our current pixel-aspect-ratio
617      * and other relevant fields.
618      */
619     gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (&agg->info),
620         vpad->info.width, vpad->info.height);
621     tmp_info.chroma_site = agg->info.chroma_site;
622     tmp_info.colorimetry = agg->info.colorimetry;
623     tmp_info.par_n = vpad->info.par_n;
624     tmp_info.par_d = vpad->info.par_d;
625     tmp_info.fps_n = vpad->info.fps_n;
626     tmp_info.fps_d = vpad->info.fps_d;
627     tmp_info.flags = vpad->info.flags;
628     tmp_info.interlace_mode = vpad->info.interlace_mode;
629 
630     *convert_info = tmp_info;
631   } else {
632     *convert_info = vpad->info;
633   }
634 
635   g_free (colorimetry);
636   g_free (best_colorimetry);
637   g_free (chroma);
638   g_free (best_chroma);
639 }
640 
641 static void
gst_video_aggregator_convert_pad_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)642 gst_video_aggregator_convert_pad_get_property (GObject * object, guint prop_id,
643     GValue * value, GParamSpec * pspec)
644 {
645   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
646 
647   switch (prop_id) {
648     case PROP_CONVERT_PAD_CONVERTER_CONFIG:
649       GST_OBJECT_LOCK (pad);
650       if (pad->priv->converter_config)
651         g_value_set_boxed (value, pad->priv->converter_config);
652       GST_OBJECT_UNLOCK (pad);
653       break;
654     default:
655       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
656       break;
657   }
658 }
659 
660 static void
gst_video_aggregator_convert_pad_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)661 gst_video_aggregator_convert_pad_set_property (GObject * object, guint prop_id,
662     const GValue * value, GParamSpec * pspec)
663 {
664   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
665 
666   switch (prop_id) {
667     case PROP_CONVERT_PAD_CONVERTER_CONFIG:
668       GST_OBJECT_LOCK (pad);
669       if (pad->priv->converter_config)
670         gst_structure_free (pad->priv->converter_config);
671       pad->priv->converter_config = g_value_dup_boxed (value);
672       pad->priv->converter_config_changed = TRUE;
673       GST_OBJECT_UNLOCK (pad);
674       break;
675     default:
676       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
677       break;
678   }
679 }
680 
681 static void
gst_video_aggregator_convert_pad_class_init(GstVideoAggregatorConvertPadClass * klass)682 gst_video_aggregator_convert_pad_class_init (GstVideoAggregatorConvertPadClass *
683     klass)
684 {
685   GObjectClass *gobject_class = (GObjectClass *) klass;
686   GstVideoAggregatorPadClass *vaggpadclass =
687       (GstVideoAggregatorPadClass *) klass;
688 
689   gobject_class->finalize = gst_video_aggregator_convert_pad_finalize;
690   gobject_class->get_property =
691       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_get_property);
692   gobject_class->set_property =
693       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_set_property);
694 
695   g_object_class_install_property (gobject_class,
696       PROP_CONVERT_PAD_CONVERTER_CONFIG, g_param_spec_boxed ("converter-config",
697           "Converter configuration",
698           "A GstStructure describing the configuration that should be used "
699           "when scaling and converting this pad's video frames",
700           GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
701 
702   vaggpadclass->update_conversion_info =
703       GST_DEBUG_FUNCPTR
704       (gst_video_aggregator_convert_pad_update_conversion_info_internal);
705   vaggpadclass->prepare_frame =
706       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_prepare_frame);
707   vaggpadclass->clean_frame =
708       GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_clean_frame);
709 
710   klass->create_conversion_info =
711       gst_video_aggregator_convert_pad_create_conversion_info;
712 }
713 
714 static void
gst_video_aggregator_convert_pad_init(GstVideoAggregatorConvertPad * vaggpad)715 gst_video_aggregator_convert_pad_init (GstVideoAggregatorConvertPad * vaggpad)
716 {
717   vaggpad->priv =
718       gst_video_aggregator_convert_pad_get_instance_private (vaggpad);
719 
720   vaggpad->priv->converted_buffer = NULL;
721   vaggpad->priv->convert = NULL;
722   vaggpad->priv->converter_config = NULL;
723   vaggpad->priv->converter_config_changed = FALSE;
724 }
725 
726 /**
727  * gst_video_aggregator_convert_pad_update_conversion_info:
728  * @pad: a #GstVideoAggregatorPad
729  *
730  * Requests the pad to check and update the converter before the next usage to
731  * update for any changes that have happened.
732  *
733  */
gst_video_aggregator_convert_pad_update_conversion_info(GstVideoAggregatorConvertPad * pad)734 void gst_video_aggregator_convert_pad_update_conversion_info
735     (GstVideoAggregatorConvertPad * pad)
736 {
737   g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
738 
739   GST_OBJECT_LOCK (pad);
740   pad->priv->converter_config_changed = TRUE;
741   GST_OBJECT_UNLOCK (pad);
742 }
743 
744 struct _GstVideoAggregatorParallelConvertPadPrivate
745 {
746   GstVideoFrame src_frame;
747   gboolean is_converting;
748 };
749 
750 typedef struct _GstVideoAggregatorParallelConvertPadPrivate
751     GstVideoAggregatorParallelConvertPadPrivate;
752 
753 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorParallelConvertPad,
754     gst_video_aggregator_parallel_convert_pad,
755     GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD);
756 #define PARALLEL_CONVERT_PAD_GET_PRIVATE(o) \
757     gst_video_aggregator_parallel_convert_pad_get_instance_private (o)
758 
759 static void
gst_video_aggregator_parallel_convert_pad_prepare_frame_start(GstVideoAggregatorPad * vpad,GstVideoAggregator * vagg,GstBuffer * buffer,GstVideoFrame * prepared_frame)760     gst_video_aggregator_parallel_convert_pad_prepare_frame_start
761     (GstVideoAggregatorPad * vpad, GstVideoAggregator * vagg,
762     GstBuffer * buffer, GstVideoFrame * prepared_frame)
763 {
764   GstVideoAggregatorParallelConvertPad *ppad =
765       GST_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD (vpad);
766   GstVideoAggregatorParallelConvertPadPrivate *pcp_priv =
767       PARALLEL_CONVERT_PAD_GET_PRIVATE (ppad);
768   GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
769 
770   memset (&pcp_priv->src_frame, 0, sizeof (pcp_priv->src_frame));
771 
772   pcp_priv->is_converting = FALSE;
773 
774   /* Update/create converter as needed */
775   GST_OBJECT_LOCK (pad);
776   if (pad->priv->converter_config_changed) {
777     GstVideoAggregatorConvertPadClass *klass =
778         GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS (pad);
779     GstVideoInfo conversion_info;
780 
781     gst_video_info_init (&conversion_info);
782     klass->create_conversion_info (pad, vagg, &conversion_info);
783     if (conversion_info.finfo == NULL) {
784       GST_OBJECT_UNLOCK (pad);
785       return;
786     }
787     pad->priv->converter_config_changed = FALSE;
788 
789     pad->priv->conversion_info = conversion_info;
790 
791     if (pad->priv->convert)
792       gst_video_converter_free (pad->priv->convert);
793     pad->priv->convert = NULL;
794 
795     if (!gst_video_info_is_equal (&vpad->info, &pad->priv->conversion_info)) {
796       GstStructure *conv_config;
797 
798       if (pad->priv->converter_config) {
799         conv_config = gst_structure_copy (pad->priv->converter_config);
800       } else {
801         conv_config = gst_structure_new_empty ("GstVideoConverterConfig");
802       }
803       gst_structure_set (conv_config, GST_VIDEO_CONVERTER_OPT_ASYNC_TASKS,
804           G_TYPE_BOOLEAN, TRUE, NULL);
805 
806       pad->priv->convert =
807           gst_video_converter_new_with_pool (&vpad->info,
808           &pad->priv->conversion_info, conv_config, vagg->priv->task_pool);
809       if (!pad->priv->convert) {
810         GST_WARNING_OBJECT (pad, "No path found for conversion");
811         GST_OBJECT_UNLOCK (pad);
812         return;
813       }
814 
815       GST_DEBUG_OBJECT (pad, "This pad will be converted from %s to %s",
816           gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&vpad->info)),
817           gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&pad->priv->
818                   conversion_info)));
819     } else {
820       GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
821     }
822   }
823   GST_OBJECT_UNLOCK (pad);
824 
825   if (!gst_video_frame_map (&pcp_priv->src_frame, &vpad->info, buffer,
826           GST_MAP_READ)) {
827     GST_WARNING_OBJECT (vagg, "Could not map input buffer");
828     return;
829   }
830 
831   if (pad->priv->convert) {
832     GstBuffer *converted_buf = NULL;
833     static GstAllocationParams params = { 0, 15, 0, 0, };
834     gint converted_size;
835     guint outsize;
836 
837     /* We wait until here to set the conversion infos, in case vagg->info changed */
838     converted_size = pad->priv->conversion_info.size;
839     outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
840     converted_size = converted_size > outsize ? converted_size : outsize;
841     converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
842 
843     if (!gst_video_frame_map (prepared_frame, &(pad->priv->conversion_info),
844             converted_buf, GST_MAP_READWRITE)) {
845       GST_WARNING_OBJECT (vagg, "Could not map converted frame");
846 
847       gst_clear_buffer (&converted_buf);
848       gst_video_frame_unmap (&pcp_priv->src_frame);
849       memset (&pcp_priv->src_frame, 0, sizeof (pcp_priv->src_frame));
850       return;
851     }
852 
853     gst_video_converter_frame (pad->priv->convert, &pcp_priv->src_frame,
854         prepared_frame);
855     pad->priv->converted_buffer = converted_buf;
856     pcp_priv->is_converting = TRUE;
857   } else {
858     *prepared_frame = pcp_priv->src_frame;
859     memset (&pcp_priv->src_frame, 0, sizeof (pcp_priv->src_frame));
860   }
861 }
862 
863 static void
gst_video_aggregator_parallel_convert_pad_prepare_frame_finish(GstVideoAggregatorPad * vpad,GstVideoAggregator * vagg,GstVideoFrame * prepared_frame)864     gst_video_aggregator_parallel_convert_pad_prepare_frame_finish
865     (GstVideoAggregatorPad * vpad, GstVideoAggregator * vagg,
866     GstVideoFrame * prepared_frame)
867 {
868   GstVideoAggregatorParallelConvertPad *ppad =
869       GST_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD (vpad);
870   GstVideoAggregatorParallelConvertPadPrivate *pcp_priv =
871       PARALLEL_CONVERT_PAD_GET_PRIVATE (ppad);
872   GstVideoAggregatorConvertPad *cpad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
873 
874   if (cpad->priv->convert && pcp_priv->is_converting) {
875     pcp_priv->is_converting = FALSE;
876     gst_video_converter_frame_finish (cpad->priv->convert);
877     if (pcp_priv->src_frame.buffer) {
878       gst_video_frame_unmap (&pcp_priv->src_frame);
879       memset (&pcp_priv->src_frame, 0, sizeof (pcp_priv->src_frame));
880     }
881   }
882 }
883 
884 static void
gst_video_aggregator_parallel_convert_pad_finalize(GObject * object)885 gst_video_aggregator_parallel_convert_pad_finalize (GObject * object)
886 {
887   GstVideoAggregatorParallelConvertPad *ppad =
888       GST_VIDEO_AGGREGATOR_PARALLEL_CONVERT_PAD (object);
889   GstVideoAggregatorParallelConvertPadPrivate *pcp_priv =
890       PARALLEL_CONVERT_PAD_GET_PRIVATE (ppad);
891   GstVideoAggregatorConvertPad *cpad =
892       GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
893 
894   if (cpad->priv->convert && pcp_priv->is_converting) {
895     pcp_priv->is_converting = FALSE;
896     gst_video_converter_frame_finish (cpad->priv->convert);
897     if (pcp_priv->src_frame.buffer) {
898       gst_video_frame_unmap (&pcp_priv->src_frame);
899       memset (&pcp_priv->src_frame, 0, sizeof (pcp_priv->src_frame));
900     }
901   }
902 
903   G_OBJECT_CLASS
904       (gst_video_aggregator_parallel_convert_pad_parent_class)->finalize
905       (object);
906 }
907 
908 static void
gst_video_aggregator_parallel_convert_pad_class_init(GstVideoAggregatorParallelConvertPadClass * klass)909     gst_video_aggregator_parallel_convert_pad_class_init
910     (GstVideoAggregatorParallelConvertPadClass * klass)
911 {
912   GObjectClass *gobject_class = (GObjectClass *) klass;
913   GstVideoAggregatorPadClass *vaggpadclass =
914       (GstVideoAggregatorPadClass *) klass;
915 
916   gobject_class->finalize =
917       GST_DEBUG_FUNCPTR (gst_video_aggregator_parallel_convert_pad_finalize);
918 
919   vaggpadclass->prepare_frame = NULL;
920   vaggpadclass->prepare_frame_start =
921       GST_DEBUG_FUNCPTR
922       (gst_video_aggregator_parallel_convert_pad_prepare_frame_start);
923   vaggpadclass->prepare_frame_finish =
924       GST_DEBUG_FUNCPTR
925       (gst_video_aggregator_parallel_convert_pad_prepare_frame_finish);
926 }
927 
928 static void
gst_video_aggregator_parallel_convert_pad_init(GstVideoAggregatorParallelConvertPad * vaggpad)929     gst_video_aggregator_parallel_convert_pad_init
930     (GstVideoAggregatorParallelConvertPad * vaggpad)
931 {
932 }
933 
934 /**************************************
935  * GstVideoAggregator implementation  *
936  **************************************/
937 
938 #define GST_VIDEO_AGGREGATOR_GET_LOCK(vagg) (&GST_VIDEO_AGGREGATOR(vagg)->priv->lock)
939 
940 #define GST_VIDEO_AGGREGATOR_LOCK(vagg)   G_STMT_START {       \
941   GST_LOG_OBJECT (vagg, "Taking EVENT lock from thread %p",    \
942         g_thread_self());                                      \
943   g_mutex_lock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg));           \
944   GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p",      \
945         g_thread_self());                                      \
946   } G_STMT_END
947 
948 #define GST_VIDEO_AGGREGATOR_UNLOCK(vagg)   G_STMT_START {     \
949   GST_LOG_OBJECT (vagg, "Releasing EVENT lock from thread %p", \
950         g_thread_self());                                      \
951   g_mutex_unlock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg));         \
952   GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p",      \
953         g_thread_self());                                      \
954   } G_STMT_END
955 
956 
957 
958 /* Can't use the G_DEFINE_TYPE macros because we need the
959  * videoaggregator class in the _init to be able to set
960  * the sink pad non-alpha caps. Using the G_DEFINE_TYPE there
961  * seems to be no way of getting the real class being initialized */
962 static void gst_video_aggregator_init (GstVideoAggregator * self,
963     GstVideoAggregatorClass * klass);
964 static void gst_video_aggregator_class_init (GstVideoAggregatorClass * klass);
965 static gpointer gst_video_aggregator_parent_class = NULL;
966 static gint video_aggregator_private_offset = 0;
967 
968 GType
gst_video_aggregator_get_type(void)969 gst_video_aggregator_get_type (void)
970 {
971   static gsize static_g_define_type_id = 0;
972 
973   if (g_once_init_enter (&static_g_define_type_id)) {
974     GType g_define_type_id = g_type_register_static_simple (GST_TYPE_AGGREGATOR,
975         g_intern_static_string ("GstVideoAggregator"),
976         sizeof (GstVideoAggregatorClass),
977         (GClassInitFunc) gst_video_aggregator_class_init,
978         sizeof (GstVideoAggregator),
979         (GInstanceInitFunc) gst_video_aggregator_init,
980         (GTypeFlags) G_TYPE_FLAG_ABSTRACT);
981 
982     video_aggregator_private_offset =
983         g_type_add_instance_private (g_define_type_id,
984         sizeof (GstVideoAggregatorPrivate));
985 
986     g_once_init_leave (&static_g_define_type_id, g_define_type_id);
987   }
988   return static_g_define_type_id;
989 }
990 
991 static inline GstVideoAggregatorPrivate *
gst_video_aggregator_get_instance_private(GstVideoAggregator * self)992 gst_video_aggregator_get_instance_private (GstVideoAggregator * self)
993 {
994   return (G_STRUCT_MEMBER_P (self, video_aggregator_private_offset));
995 }
996 
997 static gboolean
gst_video_aggregator_supports_format(GstVideoAggregator * vagg,GstVideoFormat format)998 gst_video_aggregator_supports_format (GstVideoAggregator * vagg,
999     GstVideoFormat format)
1000 {
1001   gint i;
1002 
1003   for (i = 0; i < vagg->priv->supported_formats->len; i++) {
1004     GstVideoFormatInfo *format_info = vagg->priv->supported_formats->pdata[i];
1005 
1006     if (GST_VIDEO_FORMAT_INFO_FORMAT (format_info) == format)
1007       return TRUE;
1008   }
1009 
1010   return FALSE;
1011 }
1012 
1013 static GstCaps *
gst_video_aggregator_get_possible_caps_for_info(GstVideoInfo * info)1014 gst_video_aggregator_get_possible_caps_for_info (GstVideoInfo * info)
1015 {
1016   GstStructure *s;
1017   GstCaps *possible_caps = gst_video_info_to_caps (info);
1018 
1019   s = gst_caps_get_structure (possible_caps, 0);
1020   gst_structure_remove_fields (s, "width", "height", "framerate",
1021       "pixel-aspect-ratio", "interlace-mode", NULL);
1022 
1023   return possible_caps;
1024 }
1025 
1026 static void
gst_video_aggregator_find_best_format(GstVideoAggregator * vagg,GstCaps * downstream_caps,GstVideoInfo * best_info,gboolean * at_least_one_alpha)1027 gst_video_aggregator_find_best_format (GstVideoAggregator * vagg,
1028     GstCaps * downstream_caps, GstVideoInfo * best_info,
1029     gboolean * at_least_one_alpha)
1030 {
1031   GList *tmp;
1032   GstCaps *possible_caps;
1033   GstVideoAggregatorPad *pad;
1034   gboolean need_alpha = FALSE;
1035   gint best_format_number = 0, i;
1036   GHashTable *formats_table = g_hash_table_new (g_direct_hash, g_direct_equal);
1037 
1038   GST_OBJECT_LOCK (vagg);
1039   for (tmp = GST_ELEMENT (vagg)->sinkpads; tmp; tmp = tmp->next) {
1040     gint format_number = 0;
1041 
1042     pad = tmp->data;
1043 
1044     if (!pad->info.finfo)
1045       continue;
1046 
1047     if (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
1048       *at_least_one_alpha = TRUE;
1049 
1050     /* If we want alpha, disregard all the other formats */
1051     if (need_alpha && !(pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA))
1052       continue;
1053 
1054     /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
1055     if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
1056       continue;
1057 
1058     /* Can downstream accept this format ? */
1059     if (!GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
1060       possible_caps =
1061           gst_video_aggregator_get_possible_caps_for_info (&pad->info);
1062       if (!gst_caps_can_intersect (downstream_caps, possible_caps)) {
1063         gst_caps_unref (possible_caps);
1064         continue;
1065       }
1066 
1067       gst_caps_unref (possible_caps);
1068     }
1069 
1070     /* If the format is supported, consider it very high weight */
1071     if (gst_video_aggregator_supports_format (vagg,
1072             GST_VIDEO_INFO_FORMAT (&pad->info))) {
1073       format_number =
1074           GPOINTER_TO_INT (g_hash_table_lookup (formats_table,
1075               GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info))));
1076 
1077       format_number += pad->info.width * pad->info.height;
1078 
1079       g_hash_table_replace (formats_table,
1080           GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info)),
1081           GINT_TO_POINTER (format_number));
1082     }
1083 
1084     /* If that pad is the first with alpha, set it as the new best format */
1085     if (!need_alpha && (pad->priv->needs_alpha
1086             && (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (pad->info.finfo)))) {
1087       need_alpha = TRUE;
1088       /* Just fallback to ARGB in case we require alpha but the input pad
1089        * does not have alpha.
1090        * Do not increment best_format_number in that case. */
1091       gst_video_info_set_format (best_info,
1092           GST_VIDEO_FORMAT_ARGB,
1093           GST_VIDEO_INFO_WIDTH (&pad->info),
1094           GST_VIDEO_INFO_HEIGHT (&pad->info));
1095     } else if (!need_alpha
1096         && (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
1097       need_alpha = TRUE;
1098       *best_info = pad->info;
1099       best_format_number = format_number;
1100     } else if (format_number > best_format_number) {
1101       *best_info = pad->info;
1102       best_format_number = format_number;
1103     }
1104   }
1105   GST_OBJECT_UNLOCK (vagg);
1106 
1107   g_hash_table_unref (formats_table);
1108 
1109   if (gst_video_aggregator_supports_format (vagg,
1110           GST_VIDEO_INFO_FORMAT (best_info))) {
1111     possible_caps = gst_video_aggregator_get_possible_caps_for_info (best_info);
1112     if (gst_caps_can_intersect (downstream_caps, possible_caps)) {
1113       gst_caps_unref (possible_caps);
1114       return;
1115     }
1116     gst_caps_unref (possible_caps);
1117   }
1118 
1119   for (i = 0; i < vagg->priv->supported_formats->len; i++) {
1120     GstVideoFormatInfo *format_info = vagg->priv->supported_formats->pdata[i];
1121 
1122     /* either we don't care about alpha, or the output format needs to have
1123      * alpha */
1124     if (!need_alpha || GST_VIDEO_FORMAT_INFO_HAS_ALPHA (format_info)) {
1125       gst_video_info_set_format (best_info, format_info->format,
1126           best_info->width, best_info->height);
1127       possible_caps =
1128           gst_video_aggregator_get_possible_caps_for_info (best_info);
1129 
1130       if (gst_caps_can_intersect (downstream_caps, possible_caps)) {
1131         GST_INFO_OBJECT (vagg, "Using supported caps: %" GST_PTR_FORMAT,
1132             possible_caps);
1133         gst_caps_unref (possible_caps);
1134 
1135         return;
1136       }
1137 
1138       gst_caps_unref (possible_caps);
1139     }
1140   }
1141 
1142   GST_WARNING_OBJECT (vagg, "Nothing compatible with %" GST_PTR_FORMAT,
1143       downstream_caps);
1144   gst_video_info_init (best_info);
1145 }
1146 
1147 static GstCaps *
gst_video_aggregator_default_fixate_src_caps(GstAggregator * agg,GstCaps * caps)1148 gst_video_aggregator_default_fixate_src_caps (GstAggregator * agg,
1149     GstCaps * caps)
1150 {
1151   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1152   gint best_width = -1, best_height = -1;
1153   gint best_fps_n = -1, best_fps_d = -1;
1154   gdouble best_fps = -1.;
1155   GstStructure *s;
1156   GList *l;
1157 
1158   GST_OBJECT_LOCK (vagg);
1159   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1160     GstVideoAggregatorPad *mpad = l->data;
1161     gint fps_n, fps_d;
1162     gint width, height;
1163     gdouble cur_fps;
1164 
1165     fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
1166     fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
1167     width = GST_VIDEO_INFO_WIDTH (&mpad->info);
1168     height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
1169 
1170     if (width == 0 || height == 0)
1171       continue;
1172 
1173     if (best_width < width)
1174       best_width = width;
1175     if (best_height < height)
1176       best_height = height;
1177 
1178     if (fps_d == 0)
1179       cur_fps = 0.0;
1180     else
1181       gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
1182 
1183     if (best_fps < cur_fps) {
1184       best_fps = cur_fps;
1185       best_fps_n = fps_n;
1186       best_fps_d = fps_d;
1187     }
1188   }
1189   GST_OBJECT_UNLOCK (vagg);
1190 
1191   if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) {
1192     best_fps_n = 25;
1193     best_fps_d = 1;
1194     best_fps = 25.0;
1195   }
1196 
1197   caps = gst_caps_make_writable (caps);
1198   s = gst_caps_get_structure (caps, 0);
1199   gst_structure_fixate_field_nearest_int (s, "width", best_width);
1200   gst_structure_fixate_field_nearest_int (s, "height", best_height);
1201   gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
1202       best_fps_d);
1203   if (gst_structure_has_field (s, "pixel-aspect-ratio"))
1204     gst_structure_fixate_field_nearest_fraction (s, "pixel-aspect-ratio", 1, 1);
1205   caps = gst_caps_fixate (caps);
1206 
1207   return caps;
1208 }
1209 
1210 static GstCaps *
gst_video_aggregator_default_update_caps(GstVideoAggregator * vagg,GstCaps * caps)1211 gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg,
1212     GstCaps * caps)
1213 {
1214   GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg);
1215   GstCaps *ret, *best_format_caps;
1216   gboolean at_least_one_alpha = FALSE;
1217   GstVideoFormat best_format;
1218   GstVideoInfo best_info;
1219   gchar *color_name;
1220   gchar *chroma_site;
1221 
1222   best_format = GST_VIDEO_FORMAT_UNKNOWN;
1223   gst_video_info_init (&best_info);
1224 
1225   if (vagg_klass->find_best_format) {
1226     vagg_klass->find_best_format (vagg, caps, &best_info, &at_least_one_alpha);
1227 
1228     best_format = GST_VIDEO_INFO_FORMAT (&best_info);
1229   }
1230 
1231   if (best_format == GST_VIDEO_FORMAT_UNKNOWN) {
1232     GstCaps *tmp = gst_caps_fixate (gst_caps_ref (caps));
1233     gst_video_info_from_caps (&best_info, tmp);
1234     best_format = GST_VIDEO_INFO_FORMAT (&best_info);
1235     gst_caps_unref (tmp);
1236   }
1237 
1238   color_name = gst_video_colorimetry_to_string (&best_info.colorimetry);
1239   chroma_site = gst_video_chroma_site_to_string (best_info.chroma_site);
1240 
1241   GST_DEBUG_OBJECT (vagg,
1242       "The output format will now be : %s with chroma : %s and colorimetry %s",
1243       gst_video_format_to_string (best_format),
1244       GST_STR_NULL (chroma_site), GST_STR_NULL (color_name));
1245 
1246   best_format_caps = gst_caps_copy (caps);
1247   gst_caps_set_simple (best_format_caps, "format", G_TYPE_STRING,
1248       gst_video_format_to_string (best_format), NULL);
1249 
1250   if (chroma_site != NULL)
1251     gst_caps_set_simple (best_format_caps, "chroma-site", G_TYPE_STRING,
1252         chroma_site, NULL);
1253   if (color_name != NULL)
1254     gst_caps_set_simple (best_format_caps, "colorimetry", G_TYPE_STRING,
1255         color_name, NULL);
1256 
1257   g_free (color_name);
1258   g_free (chroma_site);
1259   ret = gst_caps_merge (best_format_caps, gst_caps_ref (caps));
1260 
1261   return ret;
1262 }
1263 
1264 static GstFlowReturn
gst_video_aggregator_default_update_src_caps(GstAggregator * agg,GstCaps * caps,GstCaps ** ret)1265 gst_video_aggregator_default_update_src_caps (GstAggregator * agg,
1266     GstCaps * caps, GstCaps ** ret)
1267 {
1268   GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (agg);
1269   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1270 
1271   g_assert (vagg_klass->update_caps);
1272 
1273   *ret = vagg_klass->update_caps (vagg, caps);
1274 
1275   return GST_FLOW_OK;
1276 }
1277 
1278 static gboolean
_update_conversion_info(GstElement * element,GstPad * pad,gpointer user_data)1279 _update_conversion_info (GstElement * element, GstPad * pad, gpointer user_data)
1280 {
1281   GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1282   GstVideoAggregatorPadClass *vaggpad_klass =
1283       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (vaggpad);
1284 
1285   if (vaggpad_klass->update_conversion_info) {
1286     vaggpad_klass->update_conversion_info (vaggpad);
1287   }
1288 
1289   return TRUE;
1290 }
1291 
1292 static gboolean
gst_video_aggregator_default_negotiated_src_caps(GstAggregator * agg,GstCaps * caps)1293 gst_video_aggregator_default_negotiated_src_caps (GstAggregator * agg,
1294     GstCaps * caps)
1295 {
1296   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1297   gboolean at_least_one_alpha = FALSE;
1298   gboolean ret = FALSE;
1299   const GstVideoFormatInfo *finfo;
1300   GstVideoInfo info;
1301   GList *l;
1302 
1303   GST_INFO_OBJECT (agg->srcpad, "set src caps: %" GST_PTR_FORMAT, caps);
1304 
1305   GST_VIDEO_AGGREGATOR_LOCK (vagg);
1306 
1307   GST_OBJECT_LOCK (vagg);
1308   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1309     GstVideoAggregatorPad *mpad = l->data;
1310 
1311     if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
1312         || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
1313       continue;
1314 
1315     if (mpad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
1316       at_least_one_alpha = TRUE;
1317   }
1318   GST_OBJECT_UNLOCK (vagg);
1319 
1320   if (!gst_video_info_from_caps (&info, caps))
1321     goto unlock_and_return;
1322 
1323   if (GST_VIDEO_INFO_FPS_N (&vagg->info) != GST_VIDEO_INFO_FPS_N (&info) ||
1324       GST_VIDEO_INFO_FPS_D (&vagg->info) != GST_VIDEO_INFO_FPS_D (&info)) {
1325     if (GST_AGGREGATOR_PAD (agg->srcpad)->segment.position != -1) {
1326       vagg->priv->nframes = 0;
1327       /* The timestamp offset will be updated based on the
1328        * segment position the next time we aggregate */
1329       GST_DEBUG_OBJECT (vagg,
1330           "Resetting frame counter because of framerate change");
1331     }
1332     gst_video_aggregator_reset_qos (vagg);
1333   }
1334 
1335   GST_OBJECT_LOCK (vagg);
1336   vagg->info = info;
1337   GST_OBJECT_UNLOCK (vagg);
1338 
1339   finfo = info.finfo;
1340 
1341   if (at_least_one_alpha && !(finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
1342     GST_ELEMENT_ERROR (vagg, CORE, NEGOTIATION,
1343         ("At least one of the input pads contains alpha, but configured caps don't support alpha."),
1344         ("Either convert your inputs to not contain alpha or add a videoconvert after the aggregator"));
1345     goto unlock_and_return;
1346   }
1347 
1348   /* Then browse the sinks once more, setting or unsetting conversion if needed */
1349   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg),
1350       _update_conversion_info, NULL);
1351 
1352   if (vagg->priv->current_caps == NULL ||
1353       gst_caps_is_equal (caps, vagg->priv->current_caps) == FALSE) {
1354     GstClockTime latency;
1355 
1356     gst_caps_replace (&vagg->priv->current_caps, caps);
1357 
1358     gst_aggregator_set_src_caps (agg, caps);
1359     latency = gst_util_uint64_scale (GST_SECOND,
1360         GST_VIDEO_INFO_FPS_D (&info), GST_VIDEO_INFO_FPS_N (&info));
1361     gst_aggregator_set_latency (agg, latency, latency);
1362   }
1363 
1364   ret = TRUE;
1365 
1366 unlock_and_return:
1367   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1368   return ret;
1369 }
1370 
1371 static gboolean
gst_video_aggregator_get_sinkpads_interlace_mode(GstVideoAggregator * vagg,GstVideoAggregatorPad * skip_pad,GstVideoInterlaceMode * mode)1372 gst_video_aggregator_get_sinkpads_interlace_mode (GstVideoAggregator * vagg,
1373     GstVideoAggregatorPad * skip_pad, GstVideoInterlaceMode * mode)
1374 {
1375   GList *walk;
1376 
1377   GST_OBJECT_LOCK (vagg);
1378   for (walk = GST_ELEMENT (vagg)->sinkpads; walk; walk = g_list_next (walk)) {
1379     GstVideoAggregatorPad *vaggpad = walk->data;
1380 
1381     if (skip_pad && vaggpad == skip_pad)
1382       continue;
1383     if (vaggpad->info.finfo
1384         && GST_VIDEO_INFO_FORMAT (&vaggpad->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1385       *mode = GST_VIDEO_INFO_INTERLACE_MODE (&vaggpad->info);
1386       GST_OBJECT_UNLOCK (vagg);
1387       return TRUE;
1388     }
1389   }
1390   GST_OBJECT_UNLOCK (vagg);
1391   return FALSE;
1392 }
1393 
1394 static gboolean
gst_video_aggregator_pad_sink_setcaps(GstPad * pad,GstObject * parent,GstCaps * caps)1395 gst_video_aggregator_pad_sink_setcaps (GstPad * pad, GstObject * parent,
1396     GstCaps * caps)
1397 {
1398   GstVideoAggregator *vagg;
1399   GstVideoAggregatorPad *vaggpad;
1400   GstVideoInfo info;
1401   gboolean ret = FALSE;
1402 
1403   GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
1404 
1405   vagg = GST_VIDEO_AGGREGATOR (parent);
1406   vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1407 
1408   if (!gst_video_info_from_caps (&info, caps)) {
1409     GST_DEBUG_OBJECT (pad, "Failed to parse caps");
1410     goto beach;
1411   }
1412 
1413   GST_VIDEO_AGGREGATOR_LOCK (vagg);
1414   {
1415     GstVideoInterlaceMode pads_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1416     gboolean has_mode = FALSE;
1417 
1418     /* get the current output setting or fallback to other pads settings */
1419     if (GST_VIDEO_INFO_FORMAT (&vagg->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1420       pads_mode = GST_VIDEO_INFO_INTERLACE_MODE (&vagg->info);
1421       has_mode = TRUE;
1422     } else {
1423       has_mode =
1424           gst_video_aggregator_get_sinkpads_interlace_mode (vagg, vaggpad,
1425           &pads_mode);
1426     }
1427 
1428     if (has_mode) {
1429       if (pads_mode != GST_VIDEO_INFO_INTERLACE_MODE (&info)) {
1430         GST_ERROR_OBJECT (pad,
1431             "got input caps %" GST_PTR_FORMAT ", but current caps are %"
1432             GST_PTR_FORMAT, caps, vagg->priv->current_caps);
1433         GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1434         return FALSE;
1435       }
1436     }
1437   }
1438 
1439   if (!vaggpad->info.finfo ||
1440       GST_VIDEO_INFO_FORMAT (&vaggpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
1441     /* no video info was already set, so this is the first time
1442      * that this pad is getting configured; configure immediately to avoid
1443      * problems with the initial negotiation */
1444     vaggpad->info = info;
1445     gst_caps_replace (&vaggpad->priv->caps, caps);
1446     gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1447   } else {
1448     /* this pad already had caps but received new ones; keep the new caps
1449      * pending until we pick the next buffer from the queue, otherwise we
1450      * might use an old buffer with the new caps and crash */
1451     vaggpad->priv->pending_vinfo = info;
1452     gst_caps_replace (&vaggpad->priv->pending_caps, caps);
1453     GST_DEBUG_OBJECT (pad, "delaying caps change");
1454   }
1455   ret = TRUE;
1456 
1457   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1458 
1459 beach:
1460   return ret;
1461 }
1462 
1463 static gboolean
gst_video_aggregator_caps_has_alpha(GstCaps * caps)1464 gst_video_aggregator_caps_has_alpha (GstCaps * caps)
1465 {
1466   guint size = gst_caps_get_size (caps);
1467   guint i;
1468 
1469   for (i = 0; i < size; i++) {
1470     GstStructure *s = gst_caps_get_structure (caps, i);
1471     const GValue *formats = gst_structure_get_value (s, "format");
1472 
1473     if (formats) {
1474       const GstVideoFormatInfo *info;
1475 
1476       if (GST_VALUE_HOLDS_LIST (formats)) {
1477         guint list_size = gst_value_list_get_size (formats);
1478         guint index;
1479 
1480         for (index = 0; index < list_size; index++) {
1481           const GValue *list_item = gst_value_list_get_value (formats, index);
1482           info =
1483               gst_video_format_get_info (gst_video_format_from_string
1484               (g_value_get_string (list_item)));
1485           if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1486             return TRUE;
1487         }
1488 
1489       } else if (G_VALUE_HOLDS_STRING (formats)) {
1490         info =
1491             gst_video_format_get_info (gst_video_format_from_string
1492             (g_value_get_string (formats)));
1493         if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1494           return TRUE;
1495 
1496       } else {
1497         g_assert_not_reached ();
1498         GST_WARNING ("Unexpected type for video 'format' field: %s",
1499             G_VALUE_TYPE_NAME (formats));
1500       }
1501 
1502     } else {
1503       return TRUE;
1504     }
1505   }
1506   return FALSE;
1507 }
1508 
1509 static GstCaps *
_get_non_alpha_caps(GstCaps * caps)1510 _get_non_alpha_caps (GstCaps * caps)
1511 {
1512   GstCaps *result;
1513   guint i, size;
1514 
1515   size = gst_caps_get_size (caps);
1516   result = gst_caps_new_empty ();
1517   for (i = 0; i < size; i++) {
1518     GstStructure *s = gst_caps_get_structure (caps, i);
1519     const GValue *formats = gst_structure_get_value (s, "format");
1520     GValue new_formats = { 0, };
1521     gboolean has_format = FALSE;
1522 
1523     /* FIXME what to do if formats are missing? */
1524     if (formats) {
1525       const GstVideoFormatInfo *info;
1526 
1527       if (GST_VALUE_HOLDS_LIST (formats)) {
1528         guint list_size = gst_value_list_get_size (formats);
1529         guint index;
1530 
1531         g_value_init (&new_formats, GST_TYPE_LIST);
1532 
1533         for (index = 0; index < list_size; index++) {
1534           const GValue *list_item = gst_value_list_get_value (formats, index);
1535 
1536           info =
1537               gst_video_format_get_info (gst_video_format_from_string
1538               (g_value_get_string (list_item)));
1539           if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1540             has_format = TRUE;
1541             gst_value_list_append_value (&new_formats, list_item);
1542           }
1543         }
1544 
1545       } else if (G_VALUE_HOLDS_STRING (formats)) {
1546         info =
1547             gst_video_format_get_info (gst_video_format_from_string
1548             (g_value_get_string (formats)));
1549         if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1550           has_format = TRUE;
1551           gst_value_init_and_copy (&new_formats, formats);
1552         }
1553 
1554       } else {
1555         g_assert_not_reached ();
1556         GST_WARNING ("Unexpected type for video 'format' field: %s",
1557             G_VALUE_TYPE_NAME (formats));
1558       }
1559 
1560       if (has_format) {
1561         s = gst_structure_copy (s);
1562         gst_structure_take_value (s, "format", &new_formats);
1563         gst_caps_append_structure (result, s);
1564       }
1565 
1566     }
1567   }
1568 
1569   return result;
1570 }
1571 
1572 static GstCaps *
gst_video_aggregator_pad_sink_getcaps(GstPad * pad,GstVideoAggregator * vagg,GstCaps * filter)1573 gst_video_aggregator_pad_sink_getcaps (GstPad * pad, GstVideoAggregator * vagg,
1574     GstCaps * filter)
1575 {
1576   GstCaps *srccaps;
1577   GstCaps *template_caps, *sink_template_caps;
1578   GstCaps *returned_caps;
1579   GstStructure *s;
1580   gint i, n;
1581   GstAggregator *agg = GST_AGGREGATOR (vagg);
1582   GstPad *srcpad = GST_PAD (agg->srcpad);
1583   gboolean has_alpha;
1584   GstVideoInterlaceMode interlace_mode;
1585   gboolean has_interlace_mode;
1586 
1587   template_caps = gst_pad_get_pad_template_caps (srcpad);
1588 
1589   GST_DEBUG_OBJECT (pad, "Get caps with filter: %" GST_PTR_FORMAT, filter);
1590 
1591   srccaps = gst_pad_peer_query_caps (srcpad, template_caps);
1592   srccaps = gst_caps_make_writable (srccaps);
1593   has_alpha = gst_video_aggregator_caps_has_alpha (srccaps);
1594 
1595   has_interlace_mode =
1596       gst_video_aggregator_get_sinkpads_interlace_mode (vagg, NULL,
1597       &interlace_mode);
1598 
1599   n = gst_caps_get_size (srccaps);
1600   for (i = 0; i < n; i++) {
1601     s = gst_caps_get_structure (srccaps, i);
1602     gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
1603         1, NULL);
1604 
1605     if (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
1606       gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
1607           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
1608       gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
1609           "pixel-aspect-ratio", NULL);
1610     }
1611 
1612     if (has_interlace_mode)
1613       gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
1614           gst_video_interlace_mode_to_string (interlace_mode), NULL);
1615   }
1616 
1617   if (filter) {
1618     returned_caps = gst_caps_intersect (srccaps, filter);
1619     gst_caps_unref (srccaps);
1620   } else {
1621     returned_caps = srccaps;
1622   }
1623 
1624   sink_template_caps = gst_pad_get_pad_template_caps (pad);
1625   if (!has_alpha) {
1626     GstCaps *tmp = _get_non_alpha_caps (sink_template_caps);
1627     gst_caps_unref (sink_template_caps);
1628     sink_template_caps = tmp;
1629   }
1630 
1631   {
1632     GstCaps *intersect = gst_caps_intersect (returned_caps, sink_template_caps);
1633     gst_caps_unref (returned_caps);
1634     returned_caps = intersect;
1635   }
1636 
1637   gst_caps_unref (template_caps);
1638   gst_caps_unref (sink_template_caps);
1639 
1640   GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, returned_caps);
1641 
1642   return returned_caps;
1643 }
1644 
1645 static void
gst_video_aggregator_update_qos(GstVideoAggregator * vagg,gdouble proportion,GstClockTimeDiff diff,GstClockTime timestamp)1646 gst_video_aggregator_update_qos (GstVideoAggregator * vagg, gdouble proportion,
1647     GstClockTimeDiff diff, GstClockTime timestamp)
1648 {
1649   gboolean live;
1650 
1651   GST_DEBUG_OBJECT (vagg,
1652       "Updating QoS: proportion %lf, diff %" GST_STIME_FORMAT ", timestamp %"
1653       GST_TIME_FORMAT, proportion, GST_STIME_ARGS (diff),
1654       GST_TIME_ARGS (timestamp));
1655 
1656   live =
1657       GST_CLOCK_TIME_IS_VALID (gst_aggregator_get_latency (GST_AGGREGATOR
1658           (vagg)));
1659 
1660   GST_OBJECT_LOCK (vagg);
1661 
1662   vagg->priv->proportion = proportion;
1663   if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
1664     if (!live && G_UNLIKELY (diff > 0))
1665       vagg->priv->earliest_time =
1666           timestamp + 2 * diff + gst_util_uint64_scale_int_round (GST_SECOND,
1667           GST_VIDEO_INFO_FPS_D (&vagg->info),
1668           GST_VIDEO_INFO_FPS_N (&vagg->info));
1669     else
1670       vagg->priv->earliest_time = timestamp + diff;
1671   } else {
1672     vagg->priv->earliest_time = GST_CLOCK_TIME_NONE;
1673   }
1674   GST_OBJECT_UNLOCK (vagg);
1675 }
1676 
1677 static void
gst_video_aggregator_reset_qos(GstVideoAggregator * vagg)1678 gst_video_aggregator_reset_qos (GstVideoAggregator * vagg)
1679 {
1680   gst_video_aggregator_update_qos (vagg, 0.5, 0, GST_CLOCK_TIME_NONE);
1681   vagg->priv->qos_processed = vagg->priv->qos_dropped = 0;
1682 }
1683 
1684 static void
gst_video_aggregator_read_qos(GstVideoAggregator * vagg,gdouble * proportion,GstClockTime * time)1685 gst_video_aggregator_read_qos (GstVideoAggregator * vagg, gdouble * proportion,
1686     GstClockTime * time)
1687 {
1688   GST_OBJECT_LOCK (vagg);
1689   *proportion = vagg->priv->proportion;
1690   *time = vagg->priv->earliest_time;
1691   GST_OBJECT_UNLOCK (vagg);
1692 }
1693 
1694 static void
gst_video_aggregator_reset(GstVideoAggregator * vagg)1695 gst_video_aggregator_reset (GstVideoAggregator * vagg)
1696 {
1697   GstAggregator *agg = GST_AGGREGATOR (vagg);
1698   GList *l;
1699 
1700   GST_OBJECT_LOCK (vagg);
1701   gst_video_info_init (&vagg->info);
1702   GST_OBJECT_UNLOCK (vagg);
1703 
1704   vagg->priv->ts_offset = 0;
1705   vagg->priv->nframes = 0;
1706   vagg->priv->live = FALSE;
1707 
1708   GST_AGGREGATOR_PAD (agg->srcpad)->segment.position = -1;
1709 
1710   gst_video_aggregator_reset_qos (vagg);
1711 
1712   GST_OBJECT_LOCK (vagg);
1713   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1714     GstVideoAggregatorPad *p = l->data;
1715 
1716     gst_buffer_replace (&p->priv->buffer, NULL);
1717     gst_caps_replace (&p->priv->caps, NULL);
1718     p->priv->start_time = -1;
1719     p->priv->end_time = -1;
1720 
1721     gst_video_info_init (&p->info);
1722   }
1723   GST_OBJECT_UNLOCK (vagg);
1724 }
1725 
1726 static GstFlowReturn
gst_video_aggregator_fill_queues(GstVideoAggregator * vagg,GstClockTime output_start_running_time,GstClockTime output_end_running_time,gboolean timeout)1727 gst_video_aggregator_fill_queues (GstVideoAggregator * vagg,
1728     GstClockTime output_start_running_time,
1729     GstClockTime output_end_running_time, gboolean timeout)
1730 {
1731   GList *l;
1732   gboolean eos = TRUE;
1733   gboolean repeat_pad_eos = FALSE;
1734   gboolean has_no_repeat_pads = FALSE;
1735   gboolean need_more_data = FALSE;
1736   gboolean need_reconfigure = FALSE;
1737 
1738   /* get a set of buffers into pad->priv->buffer that are within output_start_running_time
1739    * and output_end_running_time taking into account finished and unresponsive pads */
1740 
1741   GST_OBJECT_LOCK (vagg);
1742   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1743     GstVideoAggregatorPad *pad = l->data;
1744     GstSegment segment;
1745     GstAggregatorPad *bpad;
1746     GstBuffer *buf;
1747     gboolean is_eos;
1748 
1749     bpad = GST_AGGREGATOR_PAD (pad);
1750 
1751     if (gst_aggregator_pad_is_inactive (bpad))
1752       continue;
1753 
1754     GST_OBJECT_LOCK (bpad);
1755     segment = bpad->segment;
1756     GST_OBJECT_UNLOCK (bpad);
1757     is_eos = gst_aggregator_pad_is_eos (bpad);
1758 
1759     if (!is_eos)
1760       eos = FALSE;
1761     if (!pad->priv->repeat_after_eos)
1762       has_no_repeat_pads = TRUE;
1763     buf = gst_aggregator_pad_peek_buffer (bpad);
1764     if (buf) {
1765       GstClockTime start_time, end_time;
1766       GstClockTime start_running_time, end_running_time;
1767 
1768     check_again:
1769       GST_TRACE_OBJECT (pad, "Next buffer %" GST_PTR_FORMAT, buf);
1770 
1771       start_time = GST_BUFFER_TIMESTAMP (buf);
1772       if (start_time == -1) {
1773         gst_buffer_unref (buf);
1774         GST_ERROR_OBJECT (pad, "Need timestamped buffers!");
1775         GST_OBJECT_UNLOCK (vagg);
1776         return GST_FLOW_ERROR;
1777       }
1778 
1779       end_time = GST_BUFFER_DURATION (buf);
1780 
1781       if (end_time == -1) {
1782         start_time = MAX (start_time, segment.start);
1783         start_time =
1784             gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1785 
1786         if (start_time >= output_end_running_time) {
1787           if (pad->priv->buffer) {
1788             GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1789                 "output_end_running_time. Keeping previous buffer");
1790           } else {
1791             GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1792                 "output_end_running_time. No previous buffer.");
1793           }
1794           gst_buffer_unref (buf);
1795           continue;
1796         } else if (start_time < output_start_running_time) {
1797           GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time < "
1798               "output_start_running_time.  Discarding old buffer");
1799           gst_buffer_replace (&pad->priv->buffer, buf);
1800           if (pad->priv->pending_vinfo.finfo) {
1801             gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1802             gst_caps_replace (&pad->priv->pending_caps, NULL);
1803             pad->info = pad->priv->pending_vinfo;
1804             need_reconfigure = TRUE;
1805             pad->priv->pending_vinfo.finfo = NULL;
1806           }
1807           gst_buffer_unref (buf);
1808           gst_aggregator_pad_drop_buffer (bpad);
1809           pad->priv->start_time = start_time;
1810           if (timeout) {
1811             /* If we're called for a timeout, we want to make sure we drain as
1812              * much as possible any late data */
1813             buf = gst_aggregator_pad_peek_buffer (bpad);
1814             if (buf)
1815               goto check_again;
1816           }
1817           need_more_data = TRUE;
1818           continue;
1819         }
1820         gst_buffer_unref (buf);
1821         buf = gst_aggregator_pad_pop_buffer (bpad);
1822         gst_buffer_replace (&pad->priv->buffer, buf);
1823         if (pad->priv->pending_vinfo.finfo) {
1824           gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1825           gst_caps_replace (&pad->priv->pending_caps, NULL);
1826           pad->info = pad->priv->pending_vinfo;
1827           need_reconfigure = TRUE;
1828           pad->priv->pending_vinfo.finfo = NULL;
1829         }
1830         /* FIXME: Set end_time to something here? */
1831         pad->priv->start_time = start_time;
1832         gst_buffer_unref (buf);
1833         GST_DEBUG_OBJECT (pad, "buffer duration is -1");
1834         continue;
1835       }
1836 
1837       g_assert (start_time != -1 && end_time != -1);
1838       end_time += start_time;   /* convert from duration to position */
1839 
1840       /* Check if it's inside the segment */
1841       if (start_time >= segment.stop || end_time < segment.start) {
1842         GST_DEBUG_OBJECT (pad,
1843             "Buffer outside the segment : segment: [%" GST_TIME_FORMAT " -- %"
1844             GST_TIME_FORMAT "]" " Buffer [%" GST_TIME_FORMAT " -- %"
1845             GST_TIME_FORMAT "]", GST_TIME_ARGS (segment.stop),
1846             GST_TIME_ARGS (segment.start), GST_TIME_ARGS (start_time),
1847             GST_TIME_ARGS (end_time));
1848 
1849         gst_buffer_unref (buf);
1850         gst_aggregator_pad_drop_buffer (bpad);
1851 
1852         need_more_data = TRUE;
1853         continue;
1854       }
1855 
1856       /* Clip to segment and convert to running time */
1857       start_time = MAX (start_time, segment.start);
1858       if (segment.stop != -1)
1859         end_time = MIN (end_time, segment.stop);
1860 
1861       if (segment.rate >= 0) {
1862         start_running_time =
1863             gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1864         end_running_time =
1865             gst_segment_to_running_time (&segment, GST_FORMAT_TIME, end_time);
1866       } else {
1867         start_running_time =
1868             gst_segment_to_running_time (&segment, GST_FORMAT_TIME, end_time);
1869         end_running_time =
1870             gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1871       }
1872       g_assert (start_running_time != -1 && end_running_time != -1);
1873 
1874       GST_TRACE_OBJECT (pad, "dealing with buffer %p start %" GST_TIME_FORMAT
1875           " end %" GST_TIME_FORMAT " out start %" GST_TIME_FORMAT
1876           " out end %" GST_TIME_FORMAT, buf, GST_TIME_ARGS (start_running_time),
1877           GST_TIME_ARGS (end_running_time),
1878           GST_TIME_ARGS (output_start_running_time),
1879           GST_TIME_ARGS (output_end_running_time));
1880 
1881       if (pad->priv->end_time != -1 && pad->priv->end_time > end_running_time) {
1882         GST_DEBUG_OBJECT (pad, "Buffer from the past, dropping");
1883         gst_buffer_unref (buf);
1884         gst_aggregator_pad_drop_buffer (bpad);
1885         continue;
1886       }
1887 
1888       if (end_running_time > output_start_running_time
1889           && start_running_time < output_end_running_time) {
1890         GST_DEBUG_OBJECT (pad,
1891             "Taking new buffer with start time %" GST_TIME_FORMAT,
1892             GST_TIME_ARGS (start_running_time));
1893         gst_buffer_replace (&pad->priv->buffer, buf);
1894         if (pad->priv->pending_vinfo.finfo) {
1895           gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1896           gst_caps_replace (&pad->priv->pending_caps, NULL);
1897           pad->info = pad->priv->pending_vinfo;
1898           need_reconfigure = TRUE;
1899           pad->priv->pending_vinfo.finfo = NULL;
1900         }
1901         pad->priv->start_time = start_running_time;
1902         pad->priv->end_time = end_running_time;
1903 
1904         gst_buffer_unref (buf);
1905         gst_aggregator_pad_drop_buffer (bpad);
1906         eos = FALSE;
1907       } else if (start_running_time >= output_end_running_time) {
1908         GST_DEBUG_OBJECT (pad, "Keeping buffer until %" GST_TIME_FORMAT,
1909             GST_TIME_ARGS (start_running_time));
1910         gst_buffer_unref (buf);
1911         eos = FALSE;
1912       } else {
1913         gst_buffer_replace (&pad->priv->buffer, buf);
1914         if (pad->priv->pending_vinfo.finfo) {
1915           gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1916           gst_caps_replace (&pad->priv->pending_caps, NULL);
1917           pad->info = pad->priv->pending_vinfo;
1918           need_reconfigure = TRUE;
1919           pad->priv->pending_vinfo.finfo = NULL;
1920         }
1921         pad->priv->start_time = start_running_time;
1922         pad->priv->end_time = end_running_time;
1923         GST_DEBUG_OBJECT (pad,
1924             "replacing old buffer with a newer buffer, start %" GST_TIME_FORMAT
1925             " out end %" GST_TIME_FORMAT, GST_TIME_ARGS (start_running_time),
1926             GST_TIME_ARGS (output_end_running_time));
1927         gst_buffer_unref (buf);
1928         gst_aggregator_pad_drop_buffer (bpad);
1929 
1930         need_more_data = TRUE;
1931         continue;
1932       }
1933     } else {
1934       if (is_eos && pad->priv->repeat_after_eos) {
1935         repeat_pad_eos = TRUE;
1936         GST_DEBUG_OBJECT (pad, "ignoring EOS and re-using previous buffer");
1937         continue;
1938       }
1939 
1940       if (pad->priv->end_time != -1) {
1941         if (pad->priv->end_time <= output_start_running_time) {
1942           if (!is_eos) {
1943             GST_DEBUG_OBJECT (pad, "I just need more data");
1944             if (GST_CLOCK_TIME_IS_VALID (pad->priv->max_last_buffer_repeat)) {
1945               if (output_start_running_time - pad->priv->end_time >
1946                   pad->priv->max_last_buffer_repeat) {
1947                 pad->priv->start_time = pad->priv->end_time = -1;
1948                 gst_buffer_replace (&pad->priv->buffer, NULL);
1949                 gst_caps_replace (&pad->priv->caps, NULL);
1950               }
1951             } else {
1952               pad->priv->start_time = pad->priv->end_time = -1;
1953             }
1954             need_more_data = TRUE;
1955           } else {
1956             gst_buffer_replace (&pad->priv->buffer, NULL);
1957             gst_caps_replace (&pad->priv->caps, NULL);
1958             pad->priv->start_time = pad->priv->end_time = -1;
1959           }
1960         } else if (is_eos) {
1961           eos = FALSE;
1962         }
1963       } else if (is_eos) {
1964         gst_buffer_replace (&pad->priv->buffer, NULL);
1965         gst_caps_replace (&pad->priv->caps, NULL);
1966       } else if (pad->priv->start_time != -1) {
1967         /* When the current buffer didn't have a duration, but
1968          * max-last-buffer-repeat was set, we use start_time as
1969          * the comparison point
1970          */
1971         if (pad->priv->start_time <= output_start_running_time) {
1972           if (GST_CLOCK_TIME_IS_VALID (pad->priv->max_last_buffer_repeat)) {
1973             if (output_start_running_time - pad->priv->start_time >
1974                 pad->priv->max_last_buffer_repeat) {
1975               pad->priv->start_time = pad->priv->end_time = -1;
1976               gst_buffer_replace (&pad->priv->buffer, NULL);
1977               gst_caps_replace (&pad->priv->caps, NULL);
1978             }
1979           }
1980         }
1981       }
1982     }
1983   }
1984   GST_OBJECT_UNLOCK (vagg);
1985 
1986   if (need_reconfigure)
1987     gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1988 
1989   if (need_more_data)
1990     return GST_AGGREGATOR_FLOW_NEED_DATA;
1991   if (eos && !has_no_repeat_pads && repeat_pad_eos)
1992     eos = FALSE;
1993   if (eos)
1994     return GST_FLOW_EOS;
1995 
1996   return GST_FLOW_OK;
1997 }
1998 
1999 static gboolean
sync_pad_values(GstElement * vagg,GstPad * pad,gpointer user_data)2000 sync_pad_values (GstElement * vagg, GstPad * pad, gpointer user_data)
2001 {
2002   gint64 *out_stream_time = user_data;
2003 
2004   /* sync object properties on stream time */
2005   if (GST_CLOCK_TIME_IS_VALID (*out_stream_time))
2006     gst_object_sync_values (GST_OBJECT_CAST (pad), *out_stream_time);
2007 
2008   return TRUE;
2009 }
2010 
2011 static gboolean
prepare_frames_start(GstElement * agg,GstPad * pad,gpointer user_data)2012 prepare_frames_start (GstElement * agg, GstPad * pad, gpointer user_data)
2013 {
2014   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
2015   GstVideoAggregatorPadClass *vaggpad_class =
2016       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
2017 
2018   memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
2019 
2020   if (vpad->priv->buffer == NULL || !vaggpad_class->prepare_frame_start)
2021     return TRUE;
2022 
2023   /* GAP event, nothing to do */
2024   if (vpad->priv->buffer &&
2025       gst_buffer_get_size (vpad->priv->buffer) == 0 &&
2026       GST_BUFFER_FLAG_IS_SET (vpad->priv->buffer, GST_BUFFER_FLAG_GAP)) {
2027     return TRUE;
2028   }
2029 
2030   g_return_val_if_fail (vaggpad_class->prepare_frame_start
2031       && vaggpad_class->prepare_frame_finish, TRUE);
2032 
2033   vaggpad_class->prepare_frame_start (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
2034       vpad->priv->buffer, &vpad->priv->prepared_frame);
2035 
2036   return TRUE;
2037 }
2038 
2039 static gboolean
prepare_frames_finish(GstElement * agg,GstPad * pad,gpointer user_data)2040 prepare_frames_finish (GstElement * agg, GstPad * pad, gpointer user_data)
2041 {
2042   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
2043   GstVideoAggregatorPadClass *vaggpad_class =
2044       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
2045 
2046   if (vpad->priv->buffer == NULL || (!vaggpad_class->prepare_frame
2047           && !vaggpad_class->prepare_frame_start))
2048     return TRUE;
2049 
2050   /* GAP event, nothing to do */
2051   if (vpad->priv->buffer &&
2052       gst_buffer_get_size (vpad->priv->buffer) == 0 &&
2053       GST_BUFFER_FLAG_IS_SET (vpad->priv->buffer, GST_BUFFER_FLAG_GAP)) {
2054     return TRUE;
2055   }
2056 
2057   if (vaggpad_class->prepare_frame_start && vaggpad_class->prepare_frame_finish) {
2058     vaggpad_class->prepare_frame_finish (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
2059         &vpad->priv->prepared_frame);
2060     return TRUE;
2061   } else {
2062     return vaggpad_class->prepare_frame (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
2063         vpad->priv->buffer, &vpad->priv->prepared_frame);
2064   }
2065 }
2066 
2067 static gboolean
clean_pad(GstElement * agg,GstPad * pad,gpointer user_data)2068 clean_pad (GstElement * agg, GstPad * pad, gpointer user_data)
2069 {
2070   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR_CAST (agg);
2071   GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
2072   GstVideoAggregatorPadClass *vaggpad_class =
2073       GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
2074 
2075   if (vaggpad_class->clean_frame)
2076     vaggpad_class->clean_frame (vpad, vagg, &vpad->priv->prepared_frame);
2077 
2078   memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
2079 
2080   return TRUE;
2081 }
2082 
2083 static GstFlowReturn
gst_video_aggregator_do_aggregate(GstVideoAggregator * vagg,GstClockTime output_start_time,GstClockTime output_end_time,GstBuffer ** outbuf)2084 gst_video_aggregator_do_aggregate (GstVideoAggregator * vagg,
2085     GstClockTime output_start_time, GstClockTime output_end_time,
2086     GstBuffer ** outbuf)
2087 {
2088   GstAggregator *agg = GST_AGGREGATOR (vagg);
2089   GstFlowReturn ret = GST_FLOW_OK;
2090   GstElementClass *klass = GST_ELEMENT_GET_CLASS (vagg);
2091   GstVideoAggregatorClass *vagg_klass = (GstVideoAggregatorClass *) klass;
2092   GstClockTime out_stream_time;
2093   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2094 
2095   g_assert (vagg_klass->aggregate_frames != NULL);
2096   g_assert (vagg_klass->create_output_buffer != NULL);
2097 
2098   if ((ret = vagg_klass->create_output_buffer (vagg, outbuf)) != GST_FLOW_OK) {
2099     GST_WARNING_OBJECT (vagg, "Could not get an output buffer, reason: %s",
2100         gst_flow_get_name (ret));
2101     return ret;
2102   }
2103   if (*outbuf == NULL) {
2104     /* sub-class doesn't want to generate output right now */
2105     return GST_FLOW_OK;
2106   }
2107 
2108   GST_OBJECT_LOCK (agg->srcpad);
2109   if (agg_segment->rate >= 0) {
2110     GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
2111     GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
2112     out_stream_time = gst_segment_to_stream_time (agg_segment,
2113         GST_FORMAT_TIME, output_start_time);
2114   } else {
2115     GST_BUFFER_TIMESTAMP (*outbuf) = output_end_time;
2116     GST_BUFFER_DURATION (*outbuf) = output_start_time - output_end_time;
2117     out_stream_time = gst_segment_to_stream_time (agg_segment,
2118         GST_FORMAT_TIME, output_end_time);
2119   }
2120   GST_OBJECT_UNLOCK (agg->srcpad);
2121 
2122   /* Sync pad properties to the stream time */
2123   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), sync_pad_values,
2124       &out_stream_time);
2125 
2126   /* Let the application know that input buffers have been staged */
2127   gst_aggregator_selected_samples (agg, GST_BUFFER_PTS (*outbuf),
2128       GST_BUFFER_DTS (*outbuf), GST_BUFFER_DURATION (*outbuf), NULL);
2129 
2130   /* Convert all the frames the subclass has before aggregating */
2131   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), prepare_frames_start,
2132       NULL);
2133   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), prepare_frames_finish,
2134       NULL);
2135 
2136   ret = vagg_klass->aggregate_frames (vagg, *outbuf);
2137 
2138   gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), clean_pad, NULL);
2139 
2140   return ret;
2141 }
2142 
2143 /* Perform qos calculations before processing the next frame. Returns TRUE if
2144  * the frame should be processed, FALSE if the frame can be dropped entirely */
2145 static gint64
gst_video_aggregator_do_qos(GstVideoAggregator * vagg,GstClockTime timestamp)2146 gst_video_aggregator_do_qos (GstVideoAggregator * vagg, GstClockTime timestamp)
2147 {
2148   GstAggregator *agg = GST_AGGREGATOR (vagg);
2149   GstClockTime qostime, earliest_time;
2150   gdouble proportion;
2151   gint64 jitter;
2152 
2153   /* no timestamp, can't do QoS => process frame */
2154   if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
2155     GST_LOG_OBJECT (vagg, "invalid timestamp, can't do QoS, process frame");
2156     return -1;
2157   }
2158 
2159   /* get latest QoS observation values */
2160   gst_video_aggregator_read_qos (vagg, &proportion, &earliest_time);
2161 
2162   /* skip qos if we have no observation (yet) => process frame */
2163   if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
2164     GST_LOG_OBJECT (vagg, "no observation yet, process frame");
2165     return -1;
2166   }
2167 
2168   /* qos is done on running time */
2169   qostime =
2170       gst_segment_to_running_time (&GST_AGGREGATOR_PAD (agg->srcpad)->segment,
2171       GST_FORMAT_TIME, timestamp);
2172 
2173   /* see how our next timestamp relates to the latest qos timestamp */
2174   GST_LOG_OBJECT (vagg, "qostime %" GST_TIME_FORMAT ", earliest %"
2175       GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
2176 
2177   jitter = GST_CLOCK_DIFF (qostime, earliest_time);
2178   if (qostime != GST_CLOCK_TIME_NONE && jitter > 0) {
2179     GST_DEBUG_OBJECT (vagg, "we are late, drop frame");
2180     return jitter;
2181   }
2182 
2183   GST_LOG_OBJECT (vagg, "process frame");
2184   return jitter;
2185 }
2186 
2187 static void
gst_video_aggregator_advance_on_timeout(GstVideoAggregator * vagg)2188 gst_video_aggregator_advance_on_timeout (GstVideoAggregator * vagg)
2189 {
2190   GstAggregator *agg = GST_AGGREGATOR (vagg);
2191   guint64 frame_duration;
2192   gint fps_d, fps_n;
2193   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2194 
2195   GST_OBJECT_LOCK (agg);
2196   if (agg_segment->position == -1) {
2197     if (agg_segment->rate > 0.0)
2198       agg_segment->position = agg_segment->start;
2199     else
2200       agg_segment->position = agg_segment->stop;
2201   }
2202 
2203   /* Advance position */
2204   fps_d = GST_VIDEO_INFO_FPS_D (&vagg->info) ?
2205       GST_VIDEO_INFO_FPS_D (&vagg->info) : 1;
2206   fps_n = GST_VIDEO_INFO_FPS_N (&vagg->info) ?
2207       GST_VIDEO_INFO_FPS_N (&vagg->info) : 25;
2208   /* Default to 25/1 if no "best fps" is known */
2209   frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
2210   if (agg_segment->rate > 0.0)
2211     agg_segment->position += frame_duration;
2212   else if (agg_segment->position > frame_duration)
2213     agg_segment->position -= frame_duration;
2214   else
2215     agg_segment->position = 0;
2216   vagg->priv->nframes++;
2217   GST_OBJECT_UNLOCK (agg);
2218 }
2219 
2220 static GstFlowReturn
gst_video_aggregator_aggregate(GstAggregator * agg,gboolean timeout)2221 gst_video_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
2222 {
2223   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2224   GstClockTime output_start_time, output_end_time;
2225   GstClockTime output_start_running_time, output_end_running_time;
2226   GstBuffer *outbuf = NULL;
2227   GstFlowReturn flow_ret;
2228   gint64 jitter;
2229   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2230 
2231   GST_VIDEO_AGGREGATOR_LOCK (vagg);
2232 
2233   if (GST_VIDEO_INFO_FORMAT (&vagg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
2234     if (timeout)
2235       gst_video_aggregator_advance_on_timeout (vagg);
2236     flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
2237     goto unlock_and_return;
2238   }
2239 
2240   if (agg_segment->rate < 0 && !GST_CLOCK_TIME_IS_VALID (agg_segment->stop)) {
2241     GST_ERROR_OBJECT (vagg, "Unknown segment.stop for negative rate");
2242     flow_ret = GST_FLOW_ERROR;
2243     goto unlock_and_return;
2244   }
2245 
2246   output_start_time = agg_segment->position;
2247   if (agg_segment->rate >= 0) {
2248     if (agg_segment->position == -1 ||
2249         agg_segment->position < agg_segment->start) {
2250       output_start_time = agg_segment->start;
2251     }
2252   } else {
2253     if (agg_segment->position == -1 ||
2254         agg_segment->position > agg_segment->stop) {
2255       output_start_time = agg_segment->stop;
2256     }
2257   }
2258 
2259   if (vagg->priv->nframes == 0) {
2260     vagg->priv->ts_offset = output_start_time;
2261     GST_DEBUG_OBJECT (vagg, "New ts offset %" GST_TIME_FORMAT,
2262         GST_TIME_ARGS (output_start_time));
2263   }
2264 
2265   if (GST_VIDEO_INFO_FPS_N (&vagg->info) == 0) {
2266     output_end_time = -1;
2267   } else {
2268     guint64 dur = gst_util_uint64_scale (vagg->priv->nframes + 1,
2269         GST_SECOND * GST_VIDEO_INFO_FPS_D (&vagg->info),
2270         GST_VIDEO_INFO_FPS_N (&vagg->info));
2271 
2272     if (agg_segment->rate >= 0)
2273       output_end_time = vagg->priv->ts_offset + dur;
2274     else if (vagg->priv->ts_offset >= dur)
2275       output_end_time = vagg->priv->ts_offset - dur;
2276     else
2277       output_end_time = -1;
2278   }
2279 
2280   if (agg_segment->rate >= 0) {
2281     if (agg_segment->stop != -1)
2282       output_end_time = MIN (output_end_time, agg_segment->stop);
2283   } else {
2284     if (agg_segment->start != -1)
2285       output_end_time = MAX (output_end_time, agg_segment->start);
2286   }
2287 
2288   output_start_running_time =
2289       gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
2290       output_start_time);
2291   output_end_running_time =
2292       gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
2293       output_end_time);
2294 
2295   if (output_end_time == output_start_time) {
2296     flow_ret = GST_FLOW_EOS;
2297   } else {
2298     flow_ret =
2299         gst_video_aggregator_fill_queues (vagg, output_start_running_time,
2300         output_end_running_time, timeout);
2301   }
2302 
2303   if (flow_ret == GST_AGGREGATOR_FLOW_NEED_DATA && !timeout) {
2304     GST_DEBUG_OBJECT (vagg, "Need more data for decisions");
2305     goto unlock_and_return;
2306   } else if (flow_ret == GST_FLOW_EOS) {
2307     GST_DEBUG_OBJECT (vagg, "All sinkpads are EOS -- forwarding");
2308     goto unlock_and_return;
2309   } else if (flow_ret == GST_FLOW_ERROR) {
2310     GST_WARNING_OBJECT (vagg, "Error collecting buffers");
2311     goto unlock_and_return;
2312   }
2313 
2314   /* It is possible that gst_video_aggregator_fill_queues() marked the pad
2315    * for reconfiguration. In this case we have to reconfigure before continuing
2316    * because we have picked a new buffer with different caps than before from
2317    * one one of the sink pads and continuing here may lead to a crash.
2318    * https://bugzilla.gnome.org/show_bug.cgi?id=780682
2319    */
2320   if (gst_pad_needs_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg))) {
2321     GST_DEBUG_OBJECT (vagg, "Need reconfigure");
2322     flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
2323     goto unlock_and_return;
2324   }
2325 
2326   GST_DEBUG_OBJECT (vagg,
2327       "Producing buffer for %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
2328       ", running time start %" GST_TIME_FORMAT ", running time end %"
2329       GST_TIME_FORMAT, GST_TIME_ARGS (output_start_time),
2330       GST_TIME_ARGS (output_end_time),
2331       GST_TIME_ARGS (output_start_running_time),
2332       GST_TIME_ARGS (output_end_running_time));
2333 
2334   jitter = gst_video_aggregator_do_qos (vagg, output_start_time);
2335   if (jitter <= 0) {
2336     flow_ret = gst_video_aggregator_do_aggregate (vagg, output_start_time,
2337         output_end_time, &outbuf);
2338     if (flow_ret != GST_FLOW_OK)
2339       goto done;
2340     vagg->priv->qos_processed++;
2341   } else {
2342     GstMessage *msg;
2343 
2344     vagg->priv->qos_dropped++;
2345 
2346     msg =
2347         gst_message_new_qos (GST_OBJECT_CAST (vagg), vagg->priv->live,
2348         output_start_running_time, gst_segment_to_stream_time (agg_segment,
2349             GST_FORMAT_TIME, output_start_time), output_start_time,
2350         output_end_time - output_start_time);
2351     gst_message_set_qos_values (msg, jitter, vagg->priv->proportion, 1000000);
2352     gst_message_set_qos_stats (msg, GST_FORMAT_BUFFERS,
2353         vagg->priv->qos_processed, vagg->priv->qos_dropped);
2354     gst_element_post_message (GST_ELEMENT_CAST (vagg), msg);
2355 
2356     flow_ret = GST_FLOW_OK;
2357   }
2358 
2359   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2360   if (outbuf) {
2361     GST_DEBUG_OBJECT (vagg,
2362         "Pushing buffer with ts %" GST_TIME_FORMAT " and duration %"
2363         GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
2364         GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
2365 
2366     flow_ret = gst_aggregator_finish_buffer (agg, outbuf);
2367   }
2368 
2369   GST_VIDEO_AGGREGATOR_LOCK (vagg);
2370   vagg->priv->nframes++;
2371   agg_segment->position = output_end_time;
2372   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2373 
2374   return flow_ret;
2375 
2376 done:
2377   if (outbuf)
2378     gst_buffer_unref (outbuf);
2379 unlock_and_return:
2380   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2381   return flow_ret;
2382 }
2383 
2384 /* FIXME, the duration query should reflect how long you will produce
2385  * data, that is the amount of stream time until you will emit EOS.
2386  *
2387  * For synchronized aggregating this is always the max of all the durations
2388  * of upstream since we emit EOS when all of them finished.
2389  *
2390  * We don't do synchronized aggregating so this really depends on where the
2391  * streams where punched in and what their relative offsets are against
2392  * each other which we can get from the first timestamps we see.
2393  *
2394  * When we add a new stream (or remove a stream) the duration might
2395  * also become invalid again and we need to post a new DURATION
2396  * message to notify this fact to the parent.
2397  * For now we take the max of all the upstream elements so the simple
2398  * cases work at least somewhat.
2399  */
2400 static gboolean
gst_video_aggregator_query_duration(GstVideoAggregator * vagg,GstQuery * query)2401 gst_video_aggregator_query_duration (GstVideoAggregator * vagg,
2402     GstQuery * query)
2403 {
2404   GValue item = { 0 };
2405   gint64 max;
2406   gboolean res;
2407   GstFormat format;
2408   GstIterator *it;
2409   gboolean done;
2410 
2411   /* parse format */
2412   gst_query_parse_duration (query, &format, NULL);
2413 
2414   max = -1;
2415   res = TRUE;
2416   done = FALSE;
2417 
2418   /* Take maximum of all durations */
2419   it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (vagg));
2420   while (!done) {
2421     switch (gst_iterator_next (it, &item)) {
2422       case GST_ITERATOR_DONE:
2423         done = TRUE;
2424         break;
2425       case GST_ITERATOR_OK:
2426       {
2427         GstPad *pad;
2428         gint64 duration;
2429 
2430         pad = g_value_get_object (&item);
2431 
2432         /* ask sink peer for duration */
2433         res &= gst_pad_peer_query_duration (pad, format, &duration);
2434         /* take max from all valid return values */
2435         if (res) {
2436           /* valid unknown length, stop searching */
2437           if (duration == -1) {
2438             max = duration;
2439             done = TRUE;
2440           }
2441           /* else see if bigger than current max */
2442           else if (duration > max)
2443             max = duration;
2444         }
2445         g_value_reset (&item);
2446         break;
2447       }
2448       case GST_ITERATOR_RESYNC:
2449         max = -1;
2450         res = TRUE;
2451         gst_iterator_resync (it);
2452         break;
2453       default:
2454         res = FALSE;
2455         done = TRUE;
2456         break;
2457     }
2458   }
2459   g_value_unset (&item);
2460   gst_iterator_free (it);
2461 
2462   if (res) {
2463     /* and store the max */
2464     GST_DEBUG_OBJECT (vagg, "Total duration in format %s: %"
2465         GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
2466     gst_query_set_duration (query, format, max);
2467   }
2468 
2469   return res;
2470 }
2471 
2472 static gboolean
gst_video_aggregator_src_query(GstAggregator * agg,GstQuery * query)2473 gst_video_aggregator_src_query (GstAggregator * agg, GstQuery * query)
2474 {
2475   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2476   gboolean res = FALSE;
2477   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2478 
2479   switch (GST_QUERY_TYPE (query)) {
2480     case GST_QUERY_POSITION:
2481     {
2482       GstFormat format;
2483 
2484       gst_query_parse_position (query, &format, NULL);
2485 
2486       switch (format) {
2487         case GST_FORMAT_TIME:
2488           gst_query_set_position (query, format,
2489               gst_segment_to_stream_time (agg_segment, GST_FORMAT_TIME,
2490                   agg_segment->position));
2491           res = TRUE;
2492           break;
2493         default:
2494           break;
2495       }
2496       break;
2497     }
2498     case GST_QUERY_DURATION:
2499       res = gst_video_aggregator_query_duration (vagg, query);
2500       break;
2501     case GST_QUERY_LATENCY:
2502       res =
2503           GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2504           (agg, query);
2505 
2506       if (res) {
2507         gst_query_parse_latency (query, &vagg->priv->live, NULL, NULL);
2508       }
2509       break;
2510     default:
2511       res =
2512           GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2513           (agg, query);
2514       break;
2515   }
2516   return res;
2517 }
2518 
2519 static gboolean
gst_video_aggregator_src_event(GstAggregator * agg,GstEvent * event)2520 gst_video_aggregator_src_event (GstAggregator * agg, GstEvent * event)
2521 {
2522   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2523 
2524   switch (GST_EVENT_TYPE (event)) {
2525     case GST_EVENT_QOS:
2526     {
2527       GstQOSType type;
2528       GstClockTimeDiff diff;
2529       GstClockTime timestamp;
2530       gdouble proportion;
2531 
2532       gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
2533       gst_video_aggregator_update_qos (vagg, proportion, diff, timestamp);
2534       break;
2535     }
2536     case GST_EVENT_SEEK:
2537     {
2538       GST_DEBUG_OBJECT (vagg, "Handling SEEK event");
2539     }
2540     default:
2541       break;
2542   }
2543 
2544   return
2545       GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_event (agg,
2546       event);
2547 }
2548 
2549 static GstFlowReturn
gst_video_aggregator_flush(GstAggregator * agg)2550 gst_video_aggregator_flush (GstAggregator * agg)
2551 {
2552   GList *l;
2553   gdouble abs_rate;
2554   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2555   GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2556 
2557   GST_INFO_OBJECT (agg, "Flushing");
2558   GST_OBJECT_LOCK (vagg);
2559   abs_rate = ABS (agg_segment->rate);
2560   for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
2561     GstVideoAggregatorPad *p = l->data;
2562 
2563     /* Convert to the output segment rate */
2564     if (ABS (agg_segment->rate) != abs_rate) {
2565       if (ABS (agg_segment->rate) != 1.0 && p->priv->buffer) {
2566         p->priv->start_time /= ABS (agg_segment->rate);
2567         p->priv->end_time /= ABS (agg_segment->rate);
2568       }
2569       if (abs_rate != 1.0 && p->priv->buffer) {
2570         p->priv->start_time *= abs_rate;
2571         p->priv->end_time *= abs_rate;
2572       }
2573     }
2574   }
2575   GST_OBJECT_UNLOCK (vagg);
2576 
2577   agg_segment->position = -1;
2578   vagg->priv->ts_offset = 0;
2579   vagg->priv->nframes = 0;
2580 
2581   gst_video_aggregator_reset_qos (vagg);
2582   return GST_FLOW_OK;
2583 }
2584 
2585 static gboolean
gst_video_aggregator_sink_event(GstAggregator * agg,GstAggregatorPad * bpad,GstEvent * event)2586 gst_video_aggregator_sink_event (GstAggregator * agg, GstAggregatorPad * bpad,
2587     GstEvent * event)
2588 {
2589   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2590   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2591   gboolean ret = TRUE;
2592 
2593   GST_DEBUG_OBJECT (pad, "Got %s event on pad %s:%s",
2594       GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad));
2595 
2596   switch (GST_EVENT_TYPE (event)) {
2597     case GST_EVENT_CAPS:
2598     {
2599       GstCaps *caps;
2600 
2601       gst_event_parse_caps (event, &caps);
2602       ret =
2603           gst_video_aggregator_pad_sink_setcaps (GST_PAD (pad),
2604           GST_OBJECT (vagg), caps);
2605       gst_event_unref (event);
2606       event = NULL;
2607       break;
2608     }
2609     case GST_EVENT_SEGMENT:{
2610       GstSegment seg;
2611       gst_event_copy_segment (event, &seg);
2612 
2613       g_assert (seg.format == GST_FORMAT_TIME);
2614       gst_video_aggregator_reset_qos (vagg);
2615       break;
2616     }
2617     default:
2618       break;
2619   }
2620 
2621   if (event != NULL)
2622     return GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_event
2623         (agg, bpad, event);
2624 
2625   return ret;
2626 }
2627 
2628 static gboolean
gst_video_aggregator_start(GstAggregator * agg)2629 gst_video_aggregator_start (GstAggregator * agg)
2630 {
2631   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2632 
2633   gst_caps_replace (&vagg->priv->current_caps, NULL);
2634 
2635   return TRUE;
2636 }
2637 
2638 static gboolean
gst_video_aggregator_stop(GstAggregator * agg)2639 gst_video_aggregator_stop (GstAggregator * agg)
2640 {
2641   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2642 
2643   gst_video_aggregator_reset (vagg);
2644 
2645   return TRUE;
2646 }
2647 
2648 /* GstElement vmethods */
2649 static GstPad *
gst_video_aggregator_request_new_pad(GstElement * element,GstPadTemplate * templ,const gchar * req_name,const GstCaps * caps)2650 gst_video_aggregator_request_new_pad (GstElement * element,
2651     GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
2652 {
2653   GstVideoAggregator *vagg;
2654   GstVideoAggregatorPad *vaggpad;
2655 
2656   vagg = GST_VIDEO_AGGREGATOR (element);
2657 
2658   vaggpad = (GstVideoAggregatorPad *)
2659       GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->request_new_pad
2660       (element, templ, req_name, caps);
2661 
2662   if (vaggpad == NULL)
2663     return NULL;
2664 
2665   GST_OBJECT_LOCK (vagg);
2666   vaggpad->priv->zorder = GST_ELEMENT (vagg)->numsinkpads;
2667   vaggpad->priv->start_time = -1;
2668   vaggpad->priv->end_time = -1;
2669   element->sinkpads = g_list_sort (element->sinkpads,
2670       (GCompareFunc) pad_zorder_compare);
2671   GST_OBJECT_UNLOCK (vagg);
2672 
2673   return GST_PAD (vaggpad);
2674 }
2675 
2676 static void
gst_video_aggregator_release_pad(GstElement * element,GstPad * pad)2677 gst_video_aggregator_release_pad (GstElement * element, GstPad * pad)
2678 {
2679   GstVideoAggregator *vagg = NULL;
2680   GstVideoAggregatorPad *vaggpad;
2681   gboolean last_pad;
2682 
2683   vagg = GST_VIDEO_AGGREGATOR (element);
2684   vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
2685 
2686   GST_VIDEO_AGGREGATOR_LOCK (vagg);
2687 
2688   GST_OBJECT_LOCK (vagg);
2689   last_pad = (GST_ELEMENT (vagg)->numsinkpads - 1 == 0);
2690   GST_OBJECT_UNLOCK (vagg);
2691 
2692   if (last_pad)
2693     gst_video_aggregator_reset (vagg);
2694 
2695   gst_buffer_replace (&vaggpad->priv->buffer, NULL);
2696   gst_caps_replace (&vaggpad->priv->caps, NULL);
2697   gst_caps_replace (&vaggpad->priv->pending_caps, NULL);
2698 
2699   GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->release_pad
2700       (GST_ELEMENT (vagg), pad);
2701 
2702   gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
2703 
2704   GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2705   return;
2706 }
2707 
2708 static gboolean
gst_video_aggregator_propose_allocation(GstAggregator * agg,GstAggregatorPad * pad,GstQuery * decide_query,GstQuery * query)2709 gst_video_aggregator_propose_allocation (GstAggregator * agg,
2710     GstAggregatorPad * pad, GstQuery * decide_query, GstQuery * query)
2711 {
2712   gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
2713 
2714   return TRUE;
2715 }
2716 
2717 static gboolean
gst_video_aggregator_decide_allocation(GstAggregator * agg,GstQuery * query)2718 gst_video_aggregator_decide_allocation (GstAggregator * agg, GstQuery * query)
2719 {
2720   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2721   GstAllocationParams params = { 0, 15, 0, 0 };
2722   guint i;
2723   GstBufferPool *pool;
2724   GstAllocator *allocator;
2725   guint size, min, max;
2726   gboolean update = FALSE;
2727   GstStructure *config = NULL;
2728   GstCaps *caps = NULL;
2729 
2730   if (gst_query_get_n_allocation_params (query) == 0) {
2731     gst_query_add_allocation_param (query, NULL, &params);
2732   } else {
2733     for (i = 0; i < gst_query_get_n_allocation_params (query); i++) {
2734       GstAllocator *allocator;
2735 
2736       gst_query_parse_nth_allocation_param (query, i, &allocator, &params);
2737       params.align = MAX (params.align, 15);
2738       gst_query_set_nth_allocation_param (query, i, allocator, &params);
2739     }
2740   }
2741 
2742   gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
2743 
2744   if (gst_query_get_n_allocation_pools (query) > 0) {
2745     gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
2746 
2747     /* adjust size */
2748     size = MAX (size, vagg->info.size);
2749     update = TRUE;
2750   } else {
2751     pool = NULL;
2752     size = vagg->info.size;
2753     min = max = 0;
2754     update = FALSE;
2755   }
2756 
2757   gst_query_parse_allocation (query, &caps, NULL);
2758 
2759   /* no downstream pool, make our own */
2760   if (pool == NULL)
2761     pool = gst_video_buffer_pool_new ();
2762 
2763   config = gst_buffer_pool_get_config (pool);
2764 
2765   gst_buffer_pool_config_set_params (config, caps, size, min, max);
2766   gst_buffer_pool_config_set_allocator (config, allocator, &params);
2767   if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2768     gst_buffer_pool_config_add_option (config,
2769         GST_BUFFER_POOL_OPTION_VIDEO_META);
2770   }
2771 
2772   /* buffer pool may have to do some changes */
2773   if (!gst_buffer_pool_set_config (pool, config)) {
2774     config = gst_buffer_pool_get_config (pool);
2775 
2776     /* If change are not acceptable, fallback to generic pool */
2777     if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) {
2778       GST_DEBUG_OBJECT (agg, "unsupported pool, making new pool");
2779 
2780       gst_object_unref (pool);
2781       pool = gst_video_buffer_pool_new ();
2782       gst_buffer_pool_config_set_params (config, caps, size, min, max);
2783       gst_buffer_pool_config_set_allocator (config, allocator, &params);
2784 
2785       if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2786         gst_buffer_pool_config_add_option (config,
2787             GST_BUFFER_POOL_OPTION_VIDEO_META);
2788       }
2789     }
2790 
2791     if (!gst_buffer_pool_set_config (pool, config))
2792       goto config_failed;
2793   }
2794 
2795   if (update)
2796     gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
2797   else
2798     gst_query_add_allocation_pool (query, pool, size, min, max);
2799 
2800   if (pool)
2801     gst_object_unref (pool);
2802   if (allocator)
2803     gst_object_unref (allocator);
2804 
2805   return TRUE;
2806 
2807 config_failed:
2808   if (pool)
2809     gst_object_unref (pool);
2810   if (allocator)
2811     gst_object_unref (allocator);
2812 
2813   GST_ELEMENT_ERROR (agg, RESOURCE, SETTINGS,
2814       ("Failed to configure the buffer pool"),
2815       ("Configuration is most likely invalid, please report this issue."));
2816   return FALSE;
2817 }
2818 
2819 static GstFlowReturn
gst_video_aggregator_create_output_buffer(GstVideoAggregator * videoaggregator,GstBuffer ** outbuf)2820 gst_video_aggregator_create_output_buffer (GstVideoAggregator * videoaggregator,
2821     GstBuffer ** outbuf)
2822 {
2823   GstAggregator *aggregator = GST_AGGREGATOR (videoaggregator);
2824   GstBufferPool *pool;
2825   GstFlowReturn ret = GST_FLOW_OK;
2826 
2827   pool = gst_aggregator_get_buffer_pool (aggregator);
2828 
2829   if (pool) {
2830     if (!gst_buffer_pool_is_active (pool)) {
2831       if (!gst_buffer_pool_set_active (pool, TRUE)) {
2832         GST_ELEMENT_ERROR (videoaggregator, RESOURCE, SETTINGS,
2833             ("failed to activate bufferpool"),
2834             ("failed to activate bufferpool"));
2835         return GST_FLOW_ERROR;
2836       }
2837     }
2838 
2839     ret = gst_buffer_pool_acquire_buffer (pool, outbuf, NULL);
2840     gst_object_unref (pool);
2841   } else {
2842     guint outsize;
2843     GstAllocator *allocator;
2844     GstAllocationParams params;
2845 
2846     gst_aggregator_get_allocator (aggregator, &allocator, &params);
2847 
2848     outsize = GST_VIDEO_INFO_SIZE (&videoaggregator->info);
2849     *outbuf = gst_buffer_new_allocate (allocator, outsize, &params);
2850 
2851     if (allocator)
2852       gst_object_unref (allocator);
2853 
2854     if (*outbuf == NULL) {
2855       GST_ELEMENT_ERROR (videoaggregator, RESOURCE, NO_SPACE_LEFT,
2856           (NULL), ("Could not acquire buffer of size: %d", outsize));
2857       ret = GST_FLOW_ERROR;
2858     }
2859   }
2860   return ret;
2861 }
2862 
2863 static gboolean
gst_video_aggregator_pad_sink_acceptcaps(GstPad * pad,GstVideoAggregator * vagg,GstCaps * caps)2864 gst_video_aggregator_pad_sink_acceptcaps (GstPad * pad,
2865     GstVideoAggregator * vagg, GstCaps * caps)
2866 {
2867   gboolean ret;
2868   GstCaps *accepted_caps;
2869   gint i, n;
2870   GstStructure *s;
2871   GstAggregator *agg = GST_AGGREGATOR (vagg);
2872 
2873   GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
2874 
2875   accepted_caps = gst_pad_get_current_caps (GST_PAD (agg->srcpad));
2876 
2877   if (accepted_caps == NULL)
2878     accepted_caps = gst_pad_get_pad_template_caps (GST_PAD (agg->srcpad));
2879 
2880   accepted_caps = gst_caps_make_writable (accepted_caps);
2881 
2882   GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
2883 
2884   n = gst_caps_get_size (accepted_caps);
2885   for (i = 0; i < n; i++) {
2886     s = gst_caps_get_structure (accepted_caps, i);
2887     gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2888         1, NULL);
2889 
2890     if (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
2891       gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
2892           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
2893       gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
2894           "pixel-aspect-ratio", NULL);
2895     }
2896   }
2897 
2898   ret = gst_caps_can_intersect (caps, accepted_caps);
2899   GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT,
2900       (ret ? "" : "not "), caps);
2901   gst_caps_unref (accepted_caps);
2902   return ret;
2903 }
2904 
2905 static gboolean
gst_video_aggregator_sink_query(GstAggregator * agg,GstAggregatorPad * bpad,GstQuery * query)2906 gst_video_aggregator_sink_query (GstAggregator * agg, GstAggregatorPad * bpad,
2907     GstQuery * query)
2908 {
2909   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2910   GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2911   gboolean ret = FALSE;
2912 
2913   switch (GST_QUERY_TYPE (query)) {
2914     case GST_QUERY_CAPS:
2915     {
2916       GstCaps *filter, *caps;
2917 
2918       gst_query_parse_caps (query, &filter);
2919       caps =
2920           gst_video_aggregator_pad_sink_getcaps (GST_PAD (pad), vagg, filter);
2921       gst_query_set_caps_result (query, caps);
2922       gst_caps_unref (caps);
2923       ret = TRUE;
2924       break;
2925     }
2926     case GST_QUERY_ACCEPT_CAPS:
2927     {
2928       GstCaps *caps;
2929 
2930       gst_query_parse_accept_caps (query, &caps);
2931       ret =
2932           gst_video_aggregator_pad_sink_acceptcaps (GST_PAD (pad), vagg, caps);
2933       gst_query_set_accept_caps_result (query, ret);
2934       ret = TRUE;
2935       break;
2936     }
2937     default:
2938       ret =
2939           GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_query
2940           (agg, bpad, query);
2941       break;
2942   }
2943   return ret;
2944 }
2945 
2946 /**
2947  * gst_video_aggregator_get_execution_task_pool:
2948  * @vagg: the #GstVideoAggregator
2949  *
2950  * The returned #GstTaskPool is used internally for performing parallel
2951  * video format conversions/scaling/etc during the
2952  * #GstVideoAggregatorPadClass::prepare_frame_start() process.
2953  * Subclasses can add their own operation to perform using the returned
2954  * #GstTaskPool during #GstVideoAggregatorClass::aggregate_frames().
2955  *
2956  * Returns: (transfer full): the #GstTaskPool that can be used by subclasses
2957  *     for performing concurrent operations
2958  *
2959  * Since: 1.20
2960  */
2961 GstTaskPool *
gst_video_aggregator_get_execution_task_pool(GstVideoAggregator * vagg)2962 gst_video_aggregator_get_execution_task_pool (GstVideoAggregator * vagg)
2963 {
2964   g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR (vagg), NULL);
2965 
2966   return gst_object_ref (vagg->priv->task_pool);
2967 }
2968 
2969 /* GObject vmethods */
2970 static void
gst_video_aggregator_finalize(GObject * o)2971 gst_video_aggregator_finalize (GObject * o)
2972 {
2973   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2974 
2975   g_mutex_clear (&vagg->priv->lock);
2976   g_ptr_array_unref (vagg->priv->supported_formats);
2977 
2978   if (vagg->priv->task_pool)
2979     gst_task_pool_cleanup (vagg->priv->task_pool);
2980   gst_clear_object (&vagg->priv->task_pool);
2981 
2982   G_OBJECT_CLASS (gst_video_aggregator_parent_class)->finalize (o);
2983 }
2984 
2985 static void
gst_video_aggregator_dispose(GObject * o)2986 gst_video_aggregator_dispose (GObject * o)
2987 {
2988   GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2989 
2990   gst_caps_replace (&vagg->priv->current_caps, NULL);
2991 
2992   G_OBJECT_CLASS (gst_video_aggregator_parent_class)->dispose (o);
2993 }
2994 
2995 static void
gst_video_aggregator_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)2996 gst_video_aggregator_get_property (GObject * object,
2997     guint prop_id, GValue * value, GParamSpec * pspec)
2998 {
2999   switch (prop_id) {
3000     default:
3001       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
3002       break;
3003   }
3004 }
3005 
3006 static void
gst_video_aggregator_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)3007 gst_video_aggregator_set_property (GObject * object,
3008     guint prop_id, const GValue * value, GParamSpec * pspec)
3009 {
3010   switch (prop_id) {
3011     default:
3012       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
3013       break;
3014   }
3015 }
3016 
3017 /* GObject boilerplate */
3018 static void
gst_video_aggregator_class_init(GstVideoAggregatorClass * klass)3019 gst_video_aggregator_class_init (GstVideoAggregatorClass * klass)
3020 {
3021   GObjectClass *gobject_class = (GObjectClass *) klass;
3022   GstElementClass *gstelement_class = (GstElementClass *) klass;
3023   GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
3024 
3025   GST_DEBUG_CATEGORY_INIT (gst_video_aggregator_debug, "videoaggregator", 0,
3026       "base video aggregator");
3027 
3028   gst_video_aggregator_parent_class = g_type_class_peek_parent (klass);
3029 
3030   if (video_aggregator_private_offset != 0)
3031     g_type_class_adjust_private_offset (klass,
3032         &video_aggregator_private_offset);
3033 
3034   gobject_class->finalize = gst_video_aggregator_finalize;
3035   gobject_class->dispose = gst_video_aggregator_dispose;
3036 
3037   gobject_class->get_property = gst_video_aggregator_get_property;
3038   gobject_class->set_property = gst_video_aggregator_set_property;
3039 
3040   gstelement_class->request_new_pad =
3041       GST_DEBUG_FUNCPTR (gst_video_aggregator_request_new_pad);
3042   gstelement_class->release_pad =
3043       GST_DEBUG_FUNCPTR (gst_video_aggregator_release_pad);
3044 
3045   agg_class->start = gst_video_aggregator_start;
3046   agg_class->stop = gst_video_aggregator_stop;
3047   agg_class->sink_query = gst_video_aggregator_sink_query;
3048   agg_class->sink_event = gst_video_aggregator_sink_event;
3049   agg_class->flush = gst_video_aggregator_flush;
3050   agg_class->aggregate = gst_video_aggregator_aggregate;
3051   agg_class->src_event = gst_video_aggregator_src_event;
3052   agg_class->src_query = gst_video_aggregator_src_query;
3053   agg_class->get_next_time = gst_aggregator_simple_get_next_time;
3054   agg_class->update_src_caps = gst_video_aggregator_default_update_src_caps;
3055   agg_class->fixate_src_caps = gst_video_aggregator_default_fixate_src_caps;
3056   agg_class->negotiated_src_caps =
3057       gst_video_aggregator_default_negotiated_src_caps;
3058   agg_class->decide_allocation = gst_video_aggregator_decide_allocation;
3059   agg_class->propose_allocation = gst_video_aggregator_propose_allocation;
3060   agg_class->peek_next_sample = gst_video_aggregator_peek_next_sample;
3061 
3062   klass->find_best_format = gst_video_aggregator_find_best_format;
3063   klass->create_output_buffer = gst_video_aggregator_create_output_buffer;
3064   klass->update_caps = gst_video_aggregator_default_update_caps;
3065 
3066   /* Register the pad class */
3067   g_type_class_ref (GST_TYPE_VIDEO_AGGREGATOR_PAD);
3068 }
3069 
3070 static void
gst_video_aggregator_init(GstVideoAggregator * vagg,GstVideoAggregatorClass * klass)3071 gst_video_aggregator_init (GstVideoAggregator * vagg,
3072     GstVideoAggregatorClass * klass)
3073 {
3074   GstCaps *src_template;
3075   GstPadTemplate *pad_template;
3076   gint i;
3077 
3078   vagg->priv = gst_video_aggregator_get_instance_private (vagg);
3079   vagg->priv->current_caps = NULL;
3080 
3081   g_mutex_init (&vagg->priv->lock);
3082 
3083   /* initialize variables */
3084   gst_video_aggregator_reset (vagg);
3085 
3086   /* Finding all supported formats */
3087   vagg->priv->supported_formats = g_ptr_array_new ();
3088   pad_template =
3089       gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
3090   src_template = gst_pad_template_get_caps (pad_template);
3091   for (i = 0; i < gst_caps_get_size (src_template); i++) {
3092     const GValue *v =
3093         gst_structure_get_value (gst_caps_get_structure (src_template, i),
3094         "format");
3095 
3096     if (G_VALUE_HOLDS_STRING (v)) {
3097       GstVideoFormat f = gst_video_format_from_string (g_value_get_string (v));
3098       GstVideoFormatInfo *format_info =
3099           (GstVideoFormatInfo *) gst_video_format_get_info (f);
3100       g_ptr_array_add (vagg->priv->supported_formats, format_info);
3101       continue;
3102     }
3103 
3104     if (GST_VALUE_HOLDS_LIST (v)) {
3105       gint j;
3106 
3107       for (j = 0; j < gst_value_list_get_size (v); j++) {
3108         const GValue *v1 = gst_value_list_get_value (v, j);
3109         GstVideoFormat f =
3110             gst_video_format_from_string (g_value_get_string (v1));
3111         GstVideoFormatInfo *format_info =
3112             (GstVideoFormatInfo *) gst_video_format_get_info (f);
3113         g_ptr_array_add (vagg->priv->supported_formats, format_info);
3114       }
3115     }
3116   }
3117 
3118   gst_caps_unref (src_template);
3119 
3120   vagg->priv->task_pool = gst_shared_task_pool_new ();
3121   gst_shared_task_pool_set_max_threads (GST_SHARED_TASK_POOL (vagg->
3122           priv->task_pool), g_get_num_processors ());
3123   gst_task_pool_prepare (vagg->priv->task_pool, NULL);
3124 }
3125