1 /*
2 * Combine video streams to 3D stereo
3 *
4 * GStreamer
5 * Copyright (C) 2009 Julien Isorce <julien.isorce@gmail.com>
6 * Copyright (C) 2014 Jan Schmidt <jan@noraisin.net>
7 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Library General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Library General Public License for more details.
17 *
18 * You should have received a copy of the GNU Library General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
21 * Boston, MA 02110-1301, USA.
22 */
23
24 /**
25 * SECTION:element-glstereomix
26 * @title: glstereomix
27 *
28 * Combine 2 input streams to produce a stereoscopic output
29 * stream. Input views are taken from the left pad and right pad
30 * respectively, and mixed according to their timelines.
31 *
32 * If either input stream is stereoscopic, the approproriate view
33 * (left or right) is taken from each stream and placed into the output.
34 *
35 * The multiview representation on the output is chosen according to
36 * the downstream caps.
37 *
38 * ## Examples
39 * |[
40 * gst-launch-1.0 -v videotestsrc pattern=ball name=left \
41 * videotestsrc name=right glstereomix name=mix \
42 * left. ! vid/x-raw,width=640,height=480! glupload ! mix. \
43 * right. ! video/x-raw,width=640,height=480! glupload ! mix. \
44 * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=side-by-side ! \
45 * queue ! glimagesink output-multiview-mode=side-by-side
46 * ]| Mix 2 different videotestsrc patterns into a side-by-side stereo image and display it.
47 * |[
48 * gst-launch-1.0 -ev v4l2src name=left \
49 * videotestsrc name=right \
50 * glstereomix name=mix \
51 * left. ! video/x-raw,width=640,height=480 ! glupload ! glcolorconvert ! mix. \
52 * right. ! video/x-raw,width=640,height=480 ! glupload ! mix. \
53 * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=top-bottom ! \
54 * glcolorconvert ! gldownload ! queue ! x264enc ! h264parse ! \
55 * mp4mux ! progressreport ! filesink location=output.mp4
56 * ]| Mix the input from a camera to the left view, and videotestsrc to the right view,
57 * and encode as a top-bottom frame packed H.264 video.
58 *
59 */
60 #ifdef HAVE_CONFIG_H
61 #include "config.h"
62 #endif
63
64 #include "gstglelements.h"
65 #include "gstglstereomix.h"
66
67 #define GST_CAT_DEFAULT gst_gl_stereo_mix_debug
68 GST_DEBUG_CATEGORY (gst_gl_stereo_mix_debug);
69
70 G_DEFINE_TYPE (GstGLStereoMixPad, gst_gl_stereo_mix_pad, GST_TYPE_GL_MIXER_PAD);
71
72 static void
gst_gl_stereo_mix_pad_class_init(GstGLStereoMixPadClass * klass)73 gst_gl_stereo_mix_pad_class_init (GstGLStereoMixPadClass * klass)
74 {
75 }
76
77 static void
gst_gl_stereo_mix_pad_init(GstGLStereoMixPad * pad)78 gst_gl_stereo_mix_pad_init (GstGLStereoMixPad * pad)
79 {
80 }
81
82 static void gst_gl_stereo_mix_child_proxy_init (gpointer g_iface,
83 gpointer iface_data);
84
85 #define gst_gl_stereo_mix_parent_class parent_class
86 G_DEFINE_TYPE_WITH_CODE (GstGLStereoMix, gst_gl_stereo_mix, GST_TYPE_GL_MIXER,
87 G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
88 gst_gl_stereo_mix_child_proxy_init));
89 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (glstereomix, "glstereomix",
90 GST_RANK_NONE, GST_TYPE_GL_STEREO_MIX, gl_element_init (plugin));
91
92 static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps);
93 static gboolean _negotiated_caps (GstAggregator * aggregator, GstCaps * caps);
94 static gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix);
95 static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer);
96
97 #define DEFAULT_DOWNMIX GST_GL_STEREO_DOWNMIX_ANAGLYPH_GREEN_MAGENTA_DUBOIS
98
99 /* GLStereoMix signals and args */
100 enum
101 {
102 /* FILL ME */
103 LAST_SIGNAL
104 };
105
106 enum
107 {
108 PROP_0,
109 PROP_DOWNMIX_MODE
110 };
111
112 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
113 GST_PAD_SRC,
114 GST_PAD_ALWAYS,
115 GST_STATIC_CAPS ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), "
116 "format = (string) RGBA, "
117 "width = " GST_VIDEO_SIZE_RANGE ", "
118 "height = " GST_VIDEO_SIZE_RANGE ", "
119 "framerate = " GST_VIDEO_FPS_RANGE ","
120 "texture-target = (string) 2D"
121 "; "
122 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
123 (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
124 "RGBA")
125 "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS))
126 );
127
128 static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
129 GST_PAD_SINK,
130 GST_PAD_REQUEST,
131 GST_STATIC_CAPS ("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), "
132 "format = (string) RGBA, "
133 "width = " GST_VIDEO_SIZE_RANGE ", "
134 "height = " GST_VIDEO_SIZE_RANGE ", "
135 "framerate = " GST_VIDEO_FPS_RANGE ","
136 "texture-target = (string) 2D"
137 "; "
138 GST_VIDEO_CAPS_MAKE_WITH_FEATURES
139 (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
140 "RGBA")
141 "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS))
142 );
143
144 static GstPad *gst_gl_stereo_mix_request_new_pad (GstElement * element,
145 GstPadTemplate * temp, const gchar * req_name, const GstCaps * caps);
146 static void gst_gl_stereo_mix_release_pad (GstElement * element, GstPad * pad);
147
148 static GstFlowReturn gst_gl_stereo_mix_create_output_buffer (GstVideoAggregator
149 * videoaggregator, GstBuffer ** outbuf);
150 static gboolean gst_gl_stereo_mix_stop (GstAggregator * agg);
151 static gboolean gst_gl_stereo_mix_start (GstAggregator * agg);
152 static gboolean gst_gl_stereo_mix_src_query (GstAggregator * agg,
153 GstQuery * query);
154
155 static void gst_gl_stereo_mix_set_property (GObject * object, guint prop_id,
156 const GValue * value, GParamSpec * pspec);
157 static void gst_gl_stereo_mix_get_property (GObject * object, guint prop_id,
158 GValue * value, GParamSpec * pspec);
159
160 static void gst_gl_stereo_mix_finalize (GObject * object);
161
162 static GstFlowReturn
163 gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg,
164 GstBuffer * outbuffer);
165
166 static void
gst_gl_stereo_mix_class_init(GstGLStereoMixClass * klass)167 gst_gl_stereo_mix_class_init (GstGLStereoMixClass * klass)
168 {
169 GObjectClass *gobject_class = (GObjectClass *) klass;
170 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
171 GstVideoAggregatorClass *videoaggregator_class =
172 (GstVideoAggregatorClass *) klass;
173 GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
174 GstGLBaseMixerClass *base_mix_class = (GstGLBaseMixerClass *) klass;
175
176 GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "glstereomixer", 0,
177 "opengl stereoscopic mixer");
178
179 gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_finalize);
180
181 gobject_class->get_property = gst_gl_stereo_mix_get_property;
182 gobject_class->set_property = gst_gl_stereo_mix_set_property;
183
184 gst_element_class_set_metadata (element_class, "OpenGL stereo video combiner",
185 "Filter/Effect/Video", "OpenGL stereo video combiner",
186 "Jan Schmidt <jan@centricular.com>");
187
188 g_object_class_install_property (gobject_class, PROP_DOWNMIX_MODE,
189 g_param_spec_enum ("downmix-mode", "Mode for mono downmixed output",
190 "Output anaglyph type to generate when downmixing to mono",
191 GST_TYPE_GL_STEREO_DOWNMIX, DEFAULT_DOWNMIX,
192 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
193
194 gst_element_class_add_static_pad_template_with_gtype (element_class,
195 &src_factory, GST_TYPE_AGGREGATOR_PAD);
196 gst_element_class_add_static_pad_template_with_gtype (element_class,
197 &sink_factory, GST_TYPE_GL_STEREO_MIX_PAD);
198
199 element_class->request_new_pad =
200 GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_request_new_pad);
201 element_class->release_pad =
202 GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_release_pad);
203
204 agg_class->stop = gst_gl_stereo_mix_stop;
205 agg_class->start = gst_gl_stereo_mix_start;
206 agg_class->src_query = gst_gl_stereo_mix_src_query;
207 agg_class->negotiated_src_caps = _negotiated_caps;
208
209 videoaggregator_class->aggregate_frames = gst_gl_stereo_mix_aggregate_frames;
210 videoaggregator_class->update_caps = _update_caps;
211 videoaggregator_class->create_output_buffer =
212 gst_gl_stereo_mix_create_output_buffer;
213
214 base_mix_class->supported_gl_api =
215 GST_GL_API_GLES2 | GST_GL_API_OPENGL | GST_GL_API_OPENGL3;
216
217 gst_type_mark_as_plugin_api (GST_TYPE_GL_STEREO_DOWNMIX, 0);
218 gst_type_mark_as_plugin_api (GST_TYPE_GL_STEREO_MIX_PAD, 0);
219 }
220
221 static void
gst_gl_stereo_mix_init(GstGLStereoMix * mix)222 gst_gl_stereo_mix_init (GstGLStereoMix * mix)
223 {
224 }
225
226 static void
gst_gl_stereo_mix_finalize(GObject * object)227 gst_gl_stereo_mix_finalize (GObject * object)
228 {
229 //GstGLStereoMix *mix = GST_GL_STEREO_MIX (object);
230
231 G_OBJECT_CLASS (parent_class)->finalize (object);
232 }
233
234 static gboolean
gst_gl_stereo_mix_query_caps(GstPad * pad,GstAggregator * agg,GstQuery * query)235 gst_gl_stereo_mix_query_caps (GstPad * pad, GstAggregator * agg,
236 GstQuery * query)
237 {
238 GstCaps *filter, *caps;
239
240 gst_query_parse_caps (query, &filter);
241
242 caps = gst_pad_get_current_caps (agg->srcpad);
243 if (caps == NULL) {
244 caps = gst_pad_get_pad_template_caps (agg->srcpad);
245 }
246
247 if (filter)
248 caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
249
250 gst_query_set_caps_result (query, caps);
251 gst_caps_unref (caps);
252
253 return TRUE;
254 }
255
256 static gboolean
gst_gl_stereo_mix_src_query(GstAggregator * agg,GstQuery * query)257 gst_gl_stereo_mix_src_query (GstAggregator * agg, GstQuery * query)
258 {
259 switch (GST_QUERY_TYPE (query)) {
260 case GST_QUERY_CAPS:
261 return gst_gl_stereo_mix_query_caps (agg->srcpad, agg, query);
262 break;
263 default:
264 break;
265 }
266
267 return GST_AGGREGATOR_CLASS (parent_class)->src_query (agg, query);
268 }
269
270
271 static GstFlowReturn
gst_gl_stereo_mix_create_output_buffer(GstVideoAggregator * videoaggregator,GstBuffer ** outbuf)272 gst_gl_stereo_mix_create_output_buffer (GstVideoAggregator * videoaggregator,
273 GstBuffer ** outbuf)
274 {
275 GstGLStereoMix *mix = GST_GL_STEREO_MIX (videoaggregator);
276 GstFlowReturn ret = GST_FLOW_OK;
277
278 #if 0
279
280 if (!mix->priv->pool_active) {
281 if (!gst_buffer_pool_set_active (mix->priv->pool, TRUE)) {
282 GST_ELEMENT_ERROR (mix, RESOURCE, SETTINGS,
283 ("failed to activate bufferpool"), ("failed to activate bufferpool"));
284 return GST_FLOW_ERROR;
285 }
286 mix->priv->pool_active = TRUE;
287 }
288
289 return gst_buffer_pool_acquire_buffer (mix->priv->pool, outbuf, NULL);
290 #endif
291
292 if (!gst_gl_stereo_mix_make_output (mix)) {
293 gst_buffer_replace (&mix->primary_out, NULL);
294 gst_buffer_replace (&mix->auxilliary_out, NULL);
295 GST_ELEMENT_ERROR (mix, RESOURCE, SETTINGS,
296 ("Failed to generate output"), ("failed to generate output"));
297 ret = GST_FLOW_ERROR;
298 }
299
300 if (mix->auxilliary_out) {
301 *outbuf = mix->auxilliary_out;
302 mix->auxilliary_out = NULL;
303 } else {
304 *outbuf = mix->primary_out;
305 mix->primary_out = NULL;
306 }
307 return ret;
308 }
309
310 static gboolean
gst_gl_stereo_mix_make_output(GstGLStereoMix * mix)311 gst_gl_stereo_mix_make_output (GstGLStereoMix * mix)
312 {
313 GList *walk;
314 gboolean res = FALSE;
315 GstElement *element = GST_ELEMENT (mix);
316 gboolean missing_buffer = FALSE;
317
318 GST_LOG_OBJECT (mix, "Processing buffers");
319
320 GST_OBJECT_LOCK (mix);
321 walk = element->sinkpads;
322 while (walk) {
323 GstVideoAggregatorPad *vaggpad = walk->data;
324 GstGLStereoMixPad *pad = walk->data;
325 GstBuffer *buffer = gst_video_aggregator_pad_get_current_buffer (vaggpad);
326
327 GST_LOG_OBJECT (mix, "Checking pad %" GST_PTR_FORMAT, vaggpad);
328
329 if (buffer != NULL) {
330 pad->current_buffer = buffer;
331
332 GST_DEBUG_OBJECT (pad, "Got buffer %" GST_PTR_FORMAT,
333 pad->current_buffer);
334 } else {
335 GST_LOG_OBJECT (mix, "No buffer on pad %" GST_PTR_FORMAT, vaggpad);
336 pad->current_buffer = NULL;
337 missing_buffer = TRUE;
338 }
339 walk = g_list_next (walk);
340 }
341 if (missing_buffer) {
342 /* We're still waiting for a buffer to turn up on at least one input */
343 GST_WARNING_OBJECT (mix, "Not generating output - need more input buffers");
344 res = TRUE;
345 goto out;
346 }
347
348 /* Copy GL memory from each input frame to the output */
349 if (!gst_gl_stereo_mix_process_frames (mix)) {
350 GST_LOG_OBJECT (mix, "Failed to process frames to output");
351 goto out;
352 }
353
354 if (mix->primary_out == NULL)
355 goto out;
356
357 res = TRUE;
358
359 out:
360 GST_OBJECT_UNLOCK (mix);
361
362 return res;
363 }
364
365 static GstFlowReturn
gst_gl_stereo_mix_aggregate_frames(GstVideoAggregator * vagg,GstBuffer * outbuf)366 gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg,
367 GstBuffer * outbuf)
368 {
369 GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
370 /* If we're operating in frame-by-frame mode, push
371 * the primary view now, and let the parent class
372 * push the remaining auxiliary view */
373 if (GST_VIDEO_INFO_MULTIVIEW_MODE (&vagg->info) ==
374 GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
375 /* Transfer the timestamps video-agg put on the aux buffer */
376 gst_buffer_copy_into (mix->primary_out, outbuf,
377 GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
378 gst_aggregator_finish_buffer (GST_AGGREGATOR (vagg), mix->primary_out);
379 mix->primary_out = NULL;
380
381 /* And actually, we don't want timestamps on the aux buffer */
382 GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE;
383 GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
384 }
385 return GST_FLOW_OK;
386 }
387
388 static void
gst_gl_stereo_mix_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)389 gst_gl_stereo_mix_get_property (GObject * object,
390 guint prop_id, GValue * value, GParamSpec * pspec)
391 {
392 GstGLStereoMix *mix = GST_GL_STEREO_MIX (object);
393
394 switch (prop_id) {
395 case PROP_DOWNMIX_MODE:
396 g_value_set_enum (value, mix->downmix_mode);
397 break;
398 default:
399 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
400 break;
401 }
402 }
403
404 static void
gst_gl_stereo_mix_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)405 gst_gl_stereo_mix_set_property (GObject * object,
406 guint prop_id, const GValue * value, GParamSpec * pspec)
407 {
408 GstGLStereoMix *mix = GST_GL_STEREO_MIX (object);
409
410 switch (prop_id) {
411 case PROP_DOWNMIX_MODE:
412 mix->downmix_mode = g_value_get_enum (value);
413 if (mix->viewconvert)
414 g_object_set_property (G_OBJECT (mix->viewconvert), "downmix-mode",
415 value);
416 break;
417 default:
418 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
419 break;
420 }
421 }
422
423 static GstPad *
gst_gl_stereo_mix_request_new_pad(GstElement * element,GstPadTemplate * templ,const gchar * req_name,const GstCaps * caps)424 gst_gl_stereo_mix_request_new_pad (GstElement * element, GstPadTemplate * templ,
425 const gchar * req_name, const GstCaps * caps)
426 {
427 GstPad *newpad;
428
429 newpad = (GstPad *)
430 GST_ELEMENT_CLASS (parent_class)->request_new_pad (element,
431 templ, req_name, caps);
432
433 if (newpad == NULL)
434 goto could_not_create;
435
436 gst_child_proxy_child_added (GST_CHILD_PROXY (element), G_OBJECT (newpad),
437 GST_OBJECT_NAME (newpad));
438
439 return GST_PAD_CAST (newpad);
440
441 could_not_create:
442 {
443 GST_DEBUG_OBJECT (element, "could not create/add pad");
444 return NULL;
445 }
446 }
447
448 static void
gst_gl_stereo_mix_release_pad(GstElement * element,GstPad * pad)449 gst_gl_stereo_mix_release_pad (GstElement * element, GstPad * pad)
450 {
451 GST_DEBUG_OBJECT (element, "release pad %s:%s", GST_DEBUG_PAD_NAME (pad));
452
453 gst_child_proxy_child_removed (GST_CHILD_PROXY (element), G_OBJECT (pad),
454 GST_OBJECT_NAME (pad));
455
456 GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad);
457 }
458
459 static gboolean
gst_gl_stereo_mix_start(GstAggregator * agg)460 gst_gl_stereo_mix_start (GstAggregator * agg)
461 {
462 GstGLStereoMix *mix = GST_GL_STEREO_MIX (agg);
463
464 if (!GST_AGGREGATOR_CLASS (parent_class)->start (agg))
465 return FALSE;
466
467 GST_OBJECT_LOCK (mix);
468 mix->viewconvert = gst_gl_view_convert_new ();
469 g_object_set (G_OBJECT (mix->viewconvert), "downmix-mode",
470 mix->downmix_mode, NULL);
471 GST_OBJECT_UNLOCK (mix);
472
473 return TRUE;
474 }
475
476 static gboolean
gst_gl_stereo_mix_stop(GstAggregator * agg)477 gst_gl_stereo_mix_stop (GstAggregator * agg)
478 {
479 GstGLStereoMix *mix = GST_GL_STEREO_MIX (agg);
480
481 if (!GST_AGGREGATOR_CLASS (parent_class)->stop (agg))
482 return FALSE;
483
484 if (mix->viewconvert) {
485 gst_object_unref (mix->viewconvert);
486 mix->viewconvert = NULL;
487 }
488
489 return TRUE;
490 }
491
492 /* Convert to caps that can be accepted by this element... */
493 static GstCaps *
get_converted_caps(GstGLStereoMix * mix,GstCaps * caps)494 get_converted_caps (GstGLStereoMix * mix, GstCaps * caps)
495 {
496 #if 0
497 GstGLContext *context = GST_GL_BASE_MIXER (mix)->context;
498 GstCaps *result, *tmp;
499
500 GST_LOG_OBJECT (mix, "Converting caps %" GST_PTR_FORMAT, caps);
501 result = gst_gl_upload_transform_caps (context, GST_PAD_SINK, caps, NULL);
502 tmp = result;
503 GST_TRACE_OBJECT (mix, "transfer returned caps %" GST_PTR_FORMAT, tmp);
504
505 result =
506 gst_gl_color_convert_transform_caps (context, GST_PAD_SINK, tmp, NULL);
507 gst_caps_unref (tmp);
508 GST_TRACE_OBJECT (mix, "convert returned caps %" GST_PTR_FORMAT, tmp);
509
510 tmp = result;
511 result = gst_gl_view_convert_transform_caps (mix->viewconvert,
512 GST_PAD_SINK, tmp, NULL);
513 gst_caps_unref (tmp);
514 #else
515 GstCaps *result;
516
517 GST_LOG_OBJECT (mix, "Converting caps %" GST_PTR_FORMAT, caps);
518 result = gst_gl_view_convert_transform_caps (mix->viewconvert,
519 GST_PAD_SINK, caps, NULL);
520 #endif
521
522 GST_LOG_OBJECT (mix, "returning caps %" GST_PTR_FORMAT, result);
523
524 return result;
525 }
526
527 /* Return the possible output caps based on inputs and downstream prefs */
528 static GstCaps *
_update_caps(GstVideoAggregator * vagg,GstCaps * caps)529 _update_caps (GstVideoAggregator * vagg, GstCaps * caps)
530 {
531 GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
532 GList *l;
533 gint best_width = -1, best_height = -1;
534 gdouble best_fps = -1, cur_fps;
535 gint best_fps_n = 0, best_fps_d = 1;
536 GstVideoInfo *mix_info;
537 GstCaps *blend_caps, *tmp_caps;
538 GstCaps *out_caps;
539
540 GST_OBJECT_LOCK (vagg);
541
542 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
543 GstVideoAggregatorPad *pad = l->data;
544 GstVideoInfo tmp = pad->info;
545 gint this_width, this_height;
546 gint fps_n, fps_d;
547
548 if (!pad->info.finfo)
549 continue;
550
551 /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
552 if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
553 continue;
554
555 /* Convert to per-view width/height for unpacked forms */
556 gst_video_multiview_video_info_change_mode (&tmp,
557 GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE);
558
559 this_width = GST_VIDEO_INFO_WIDTH (&tmp);
560 this_height = GST_VIDEO_INFO_HEIGHT (&tmp);
561 fps_n = GST_VIDEO_INFO_FPS_N (&tmp);
562 fps_d = GST_VIDEO_INFO_FPS_D (&tmp);
563
564 GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT
565 " w %u h %u", pad, this_width, this_height);
566
567 if (this_width == 0 || this_height == 0)
568 continue;
569
570 if (best_width < this_width)
571 best_width = this_width;
572 if (best_height < this_height)
573 best_height = this_height;
574
575 if (fps_d == 0)
576 cur_fps = 0.0;
577 else
578 gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
579
580 if (best_fps < cur_fps) {
581 best_fps = cur_fps;
582 best_fps_n = fps_n;
583 best_fps_d = fps_d;
584 }
585
586 /* FIXME: Preserve PAR for at least one input when different sized inputs */
587 }
588 GST_OBJECT_UNLOCK (vagg);
589
590 mix_info = &mix->mix_info;
591 gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width,
592 best_height);
593
594 GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n;
595 GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d;
596
597 GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED;
598 GST_VIDEO_INFO_VIEWS (mix_info) = 2;
599
600 /* FIXME: If input is marked as flipped or flopped, preserve those flags */
601 GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
602
603 /* Choose our output format based on downstream preferences */
604 blend_caps = gst_video_info_to_caps (mix_info);
605
606 gst_caps_set_features (blend_caps, 0,
607 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
608
609 tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps);
610 gst_caps_unref (blend_caps);
611
612 out_caps = gst_caps_intersect (caps, tmp_caps);
613 gst_caps_unref (tmp_caps);
614
615 GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, out_caps);
616
617 return out_caps;
618 }
619
620 /* Called after videoaggregator fixates our caps */
621 static gboolean
_negotiated_caps(GstAggregator * agg,GstCaps * caps)622 _negotiated_caps (GstAggregator * agg, GstCaps * caps)
623 {
624 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
625 GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
626 GstCaps *in_caps;
627
628 GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps);
629
630 if (GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps)
631 if (!GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps))
632 return FALSE;
633
634 /* Update the glview_convert output */
635
636 /* We can configure the view_converter now */
637 gst_gl_view_convert_set_context (mix->viewconvert,
638 GST_GL_BASE_MIXER (mix)->context);
639
640 in_caps = gst_video_info_to_caps (&mix->mix_info);
641 gst_caps_set_features (in_caps, 0,
642 gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
643 gst_caps_set_simple (in_caps, "texture-target", G_TYPE_STRING,
644 GST_GL_TEXTURE_TARGET_2D_STR, NULL);
645
646 gst_gl_view_convert_set_caps (mix->viewconvert, in_caps, caps);
647 gst_caps_unref (in_caps);
648
649 return TRUE;
650 }
651
652 /* called with the object lock held */
653 static gboolean
gst_gl_stereo_mix_process_frames(GstGLStereoMix * mixer)654 gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer)
655 {
656 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer);
657 GstBuffer *converted_buffer, *inbuf;
658 GstVideoInfo *out_info = &vagg->info;
659 #ifndef G_DISABLE_ASSERT
660 gint n;
661 #endif
662 gint v, views;
663 gint valid_views = 0;
664 GList *walk;
665
666 inbuf = gst_buffer_new ();
667 walk = GST_ELEMENT (mixer)->sinkpads;
668 while (walk) {
669 GstGLStereoMixPad *pad = walk->data;
670 GstMemory *in_mem;
671
672 GST_LOG_OBJECT (mixer, "Handling frame %d", valid_views);
673
674 if (!pad || !pad->current_buffer) {
675 GST_DEBUG ("skipping texture, null frame");
676 walk = g_list_next (walk);
677 continue;
678 }
679
680 in_mem = gst_buffer_get_memory (pad->current_buffer, 0);
681
682 GST_LOG_OBJECT (mixer,
683 "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem);
684 /* Appending the memory to a 2nd buffer locks it
685 * exclusive a 2nd time, which will mark it for
686 * copy-on-write. The ref will keep the memory
687 * alive but we add a parent_buffer_meta to also
688 * prevent the input buffer from returning to any buffer
689 * pool it might belong to
690 */
691 gst_buffer_append_memory (inbuf, in_mem);
692 /* Use parent buffer meta to keep input buffer alive */
693 gst_buffer_add_parent_buffer_meta (inbuf, pad->current_buffer);
694
695 valid_views++;
696 walk = g_list_next (walk);
697 }
698
699 if (mixer->mix_info.views != valid_views) {
700 GST_WARNING_OBJECT (mixer, "Not enough input views to process");
701 return FALSE;
702 }
703
704 if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
705 GST_VIDEO_MULTIVIEW_MODE_SEPARATED)
706 views = out_info->views;
707 else
708 views = 1;
709
710 /* We can configure the view_converter now */
711 gst_gl_view_convert_set_context (mixer->viewconvert,
712 GST_GL_BASE_MIXER (mixer)->context);
713
714 if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert,
715 FALSE, inbuf) != GST_FLOW_OK)
716 return FALSE;
717
718 /* Clear any existing buffers, just in case */
719 gst_buffer_replace (&mixer->primary_out, NULL);
720 gst_buffer_replace (&mixer->auxilliary_out, NULL);
721
722 if (gst_gl_view_convert_get_output (mixer->viewconvert,
723 &mixer->primary_out) != GST_FLOW_OK)
724 return FALSE;
725
726 if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) ==
727 GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) {
728 if (gst_gl_view_convert_get_output (mixer->viewconvert,
729 &mixer->auxilliary_out) != GST_FLOW_OK)
730 return FALSE;
731 }
732
733 if (mixer->primary_out == NULL)
734 return FALSE;
735
736 converted_buffer = mixer->primary_out;
737
738 #ifndef G_DISABLE_ASSERT
739 n = gst_buffer_n_memory (converted_buffer);
740 g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views);
741 #endif
742
743 for (v = 0; v < views; v++) {
744 gst_buffer_add_video_meta_full (converted_buffer, v,
745 GST_VIDEO_INFO_FORMAT (out_info),
746 GST_VIDEO_INFO_WIDTH (out_info),
747 GST_VIDEO_INFO_HEIGHT (out_info),
748 GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride);
749 if (mixer->auxilliary_out) {
750 gst_buffer_add_video_meta_full (mixer->auxilliary_out, v,
751 GST_VIDEO_INFO_FORMAT (out_info),
752 GST_VIDEO_INFO_WIDTH (out_info),
753 GST_VIDEO_INFO_HEIGHT (out_info),
754 GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset,
755 out_info->stride);
756 }
757 }
758
759 return TRUE;
760 }
761
762 /* GstChildProxy implementation */
763 static GObject *
gst_gl_stereo_mix_child_proxy_get_child_by_index(GstChildProxy * child_proxy,guint index)764 gst_gl_stereo_mix_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
765 guint index)
766 {
767 GstGLStereoMix *gl_stereo_mix = GST_GL_STEREO_MIX (child_proxy);
768 GObject *obj = NULL;
769
770 GST_OBJECT_LOCK (gl_stereo_mix);
771 obj = g_list_nth_data (GST_ELEMENT_CAST (gl_stereo_mix)->sinkpads, index);
772 if (obj)
773 gst_object_ref (obj);
774 GST_OBJECT_UNLOCK (gl_stereo_mix);
775
776 return obj;
777 }
778
779 static guint
gst_gl_stereo_mix_child_proxy_get_children_count(GstChildProxy * child_proxy)780 gst_gl_stereo_mix_child_proxy_get_children_count (GstChildProxy * child_proxy)
781 {
782 guint count = 0;
783 GstGLStereoMix *gl_stereo_mix = GST_GL_STEREO_MIX (child_proxy);
784
785 GST_OBJECT_LOCK (gl_stereo_mix);
786 count = GST_ELEMENT_CAST (gl_stereo_mix)->numsinkpads;
787 GST_OBJECT_UNLOCK (gl_stereo_mix);
788 GST_INFO_OBJECT (gl_stereo_mix, "Children Count: %d", count);
789
790 return count;
791 }
792
793 static void
gst_gl_stereo_mix_child_proxy_init(gpointer g_iface,gpointer iface_data)794 gst_gl_stereo_mix_child_proxy_init (gpointer g_iface, gpointer iface_data)
795 {
796 GstChildProxyInterface *iface = g_iface;
797
798 iface->get_child_by_index = gst_gl_stereo_mix_child_proxy_get_child_by_index;
799 iface->get_children_count = gst_gl_stereo_mix_child_proxy_get_children_count;
800 }
801