1 /* GStreamer
2 * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
3 * Copyright (C) <2011> Sebastian Dröge <sebastian.droege@collabora.co.uk>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24
25 #include <string.h>
26 #include <gst/gst.h>
27
28 #include <gst/gst-i18n-plugin.h>
29 #include <gst/pbutils/pbutils.h>
30 #include <gst/video/video.h>
31 #include <gst/audio/streamvolume.h>
32 #include <gst/video/colorbalance.h>
33 #include <gst/video/videooverlay.h>
34 #include <gst/video/navigation.h>
35
36 #include "gstplaysink.h"
37 #include "gststreamsynchronizer.h"
38 #include "gstplaysinkvideoconvert.h"
39 #include "gstplaysinkaudioconvert.h"
40
41 GST_DEBUG_CATEGORY_STATIC (gst_play_sink_debug);
42 #define GST_CAT_DEFAULT gst_play_sink_debug
43
44 #define VOLUME_MAX_DOUBLE 10.0
45
46 #define DEFAULT_FLAGS GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_TEXT | \
47 GST_PLAY_FLAG_SOFT_VOLUME | GST_PLAY_FLAG_SOFT_COLORBALANCE
48
49 #define GST_PLAY_CHAIN(c) ((GstPlayChain *)(c))
50
51 /* enum types */
52 /**
53 * GstPlaySinkSendEventMode:
54 * @MODE_DEFAULT: default GstBin's send_event handling
55 * @MODE_FIRST: send event only to the first sink that return true
56 *
57 * Send event handling to use
58 */
59 typedef enum
60 {
61 MODE_DEFAULT = 0,
62 MODE_FIRST = 1
63 } GstPlaySinkSendEventMode;
64
65
66 #define GST_TYPE_PLAY_SINK_SEND_EVENT_MODE (gst_play_sink_send_event_mode_get_type ())
67 static GType
gst_play_sink_send_event_mode_get_type(void)68 gst_play_sink_send_event_mode_get_type (void)
69 {
70 static GType gtype = 0;
71
72 if (gtype == 0) {
73 static const GEnumValue values[] = {
74 {MODE_DEFAULT, "Default GstBin's send_event handling (default)",
75 "default"},
76 {MODE_FIRST, "Sends the event to sinks until the first one handles it",
77 "first"},
78 {0, NULL, NULL}
79 };
80
81 gtype = g_enum_register_static ("GstPlaySinkSendEventMode", values);
82 }
83 return gtype;
84 }
85
86 /* holds the common data fields for the audio and video pipelines. We keep them
87 * in a structure to more easily have all the info available. */
88 typedef struct
89 {
90 GstPlaySink *playsink;
91 GstElement *bin;
92 gboolean added;
93 gboolean activated;
94 gboolean raw;
95 } GstPlayChain;
96
97 typedef struct
98 {
99 GstPlayChain chain;
100 GstPad *sinkpad;
101 GstElement *queue;
102 GstElement *filter_conv;
103 GstElement *filter;
104 GstElement *conv;
105 GstElement *volume; /* element with the volume property */
106 gboolean sink_volume; /* if the volume was provided by the sink */
107 gulong notify_volume_id;
108 gulong notify_mute_id;
109 GstElement *sink;
110 GstElement *ts_offset;
111 } GstPlayAudioChain;
112
113 typedef struct
114 {
115 GstPlayChain chain;
116 GstPad *sinkpad, *srcpad;
117 GstElement *conv;
118 GstElement *deinterlace;
119 } GstPlayVideoDeinterlaceChain;
120
121 typedef struct
122 {
123 GstPlayChain chain;
124 GstPad *sinkpad;
125 GstElement *queue;
126 GstElement *filter_conv;
127 GstElement *filter;
128 GstElement *conv;
129 GstElement *sink;
130 gboolean async;
131 GstElement *ts_offset;
132 } GstPlayVideoChain;
133
134 typedef struct
135 {
136 GstPlayChain chain;
137 GstPad *sinkpad;
138 GstElement *queue;
139 GstElement *conv;
140 GstElement *resample;
141 GstPad *blockpad; /* srcpad of queue, used for blocking the vis */
142 GstPad *vispeerpad; /* srcpad of resample, used for unlinking the vis */
143 GstPad *vissinkpad; /* visualisation sinkpad, */
144 GstElement *vis;
145 GstPad *vissrcpad; /* visualisation srcpad, */
146 GstPad *srcpad; /* outgoing srcpad, used to connect to the next
147 * chain */
148 } GstPlayVisChain;
149
150 typedef struct
151 {
152 GstPlayChain chain;
153 GstPad *sinkpad;
154 GstElement *queue;
155 GstElement *identity;
156 GstElement *overlay;
157 GstPad *videosinkpad;
158 GstPad *textsinkpad;
159 GstPad *srcpad; /* outgoing srcpad, used to connect to the next
160 * chain */
161 GstElement *sink; /* custom sink to receive subtitle buffers */
162 } GstPlayTextChain;
163
164 #define GST_PLAY_SINK_GET_LOCK(playsink) (&((GstPlaySink *)playsink)->lock)
165 #define GST_PLAY_SINK_LOCK(playsink) G_STMT_START { \
166 GST_LOG_OBJECT (playsink, "locking from thread %p", g_thread_self ()); \
167 g_rec_mutex_lock (GST_PLAY_SINK_GET_LOCK (playsink)); \
168 GST_LOG_OBJECT (playsink, "locked from thread %p", g_thread_self ()); \
169 } G_STMT_END
170 #define GST_PLAY_SINK_UNLOCK(playsink) G_STMT_START { \
171 GST_LOG_OBJECT (playsink, "unlocking from thread %p", g_thread_self ()); \
172 g_rec_mutex_unlock (GST_PLAY_SINK_GET_LOCK (playsink)); \
173 } G_STMT_END
174
175 #define PENDING_FLAG_SET(playsink, flagtype) \
176 ((playsink->pending_blocked_pads) |= (1 << flagtype))
177 #define PENDING_FLAG_UNSET(playsink, flagtype) \
178 ((playsink->pending_blocked_pads) &= ~(1 << flagtype))
179 #define PENDING_FLAG_IS_SET(playsink, flagtype) \
180 ((playsink->pending_blocked_pads) & (1 << flagtype))
181 #define PENDING_VIDEO_BLOCK(playsink) \
182 ((playsink->pending_blocked_pads) & (1 << GST_PLAY_SINK_TYPE_VIDEO_RAW | 1 << GST_PLAY_SINK_TYPE_VIDEO))
183 #define PENDING_AUDIO_BLOCK(playsink) \
184 ((playsink->pending_blocked_pads) & (1 << GST_PLAY_SINK_TYPE_AUDIO_RAW | 1 << GST_PLAY_SINK_TYPE_AUDIO))
185 #define PENDING_TEXT_BLOCK(playsink) \
186 PENDING_FLAG_IS_SET(playsink, GST_PLAY_SINK_TYPE_TEXT)
187
188 struct _GstPlaySink
189 {
190 GstBin bin;
191
192 GRecMutex lock;
193
194 gboolean async_pending;
195 gboolean need_async_start;
196
197 GstPlayFlags flags;
198
199 GstStreamSynchronizer *stream_synchronizer;
200
201 /* chains */
202 GstPlayAudioChain *audiochain;
203 GstPlayVideoDeinterlaceChain *videodeinterlacechain;
204 GstPlayVideoChain *videochain;
205 GstPlayVisChain *vischain;
206 GstPlayTextChain *textchain;
207
208 /* audio */
209 GstPad *audio_pad;
210 gboolean audio_pad_raw;
211 gboolean audio_pad_blocked;
212 GstPad *audio_srcpad_stream_synchronizer;
213 GstPad *audio_sinkpad_stream_synchronizer;
214 GstElement *audio_ssync_queue;
215 GstPad *audio_ssync_queue_sinkpad;
216 gulong audio_block_id;
217 gulong audio_notify_caps_id;
218 /* audio tee */
219 GstElement *audio_tee;
220 GstPad *audio_tee_sink;
221 GstPad *audio_tee_asrc;
222 GstPad *audio_tee_vissrc;
223 /* video */
224 GstPad *video_pad;
225 gboolean video_pad_raw;
226 gboolean video_pad_blocked;
227 GstPad *video_srcpad_stream_synchronizer;
228 GstPad *video_sinkpad_stream_synchronizer;
229 gulong video_block_id;
230 gulong video_notify_caps_id;
231 /* text */
232 GstPad *text_pad;
233 gboolean text_pad_blocked;
234 GstPad *text_srcpad_stream_synchronizer;
235 GstPad *text_sinkpad_stream_synchronizer;
236 gulong text_block_id;
237
238 gulong vis_pad_block_id;
239
240 guint32 pending_blocked_pads;
241
242 /* properties */
243 GstElement *audio_sink;
244 GstElement *video_sink;
245 GstElement *audio_filter;
246 GstElement *video_filter;
247 GstElement *visualisation;
248 GstElement *text_sink;
249 gdouble volume;
250 gboolean mute;
251 gchar *font_desc; /* font description */
252 gchar *subtitle_encoding; /* subtitle encoding */
253 guint connection_speed; /* connection speed in bits/sec (0 = unknown) */
254 guint count;
255 gboolean volume_changed; /* volume/mute changed while no audiochain */
256 gboolean mute_changed; /* ... has been created yet */
257 gint64 av_offset;
258 gint64 text_offset;
259 GstPlaySinkSendEventMode send_event_mode;
260 gboolean force_aspect_ratio;
261
262 /* videooverlay proxy interface */
263 GstVideoOverlay *overlay_element; /* protected with LOCK */
264 gboolean overlay_handle_set;
265 guintptr overlay_handle;
266 gboolean overlay_render_rectangle_set;
267 gint overlay_x, overlay_y, overlay_width, overlay_height;
268 gboolean overlay_handle_events_set;
269 gboolean overlay_handle_events;
270
271 /* colorbalance proxy interface */
272 GstColorBalance *colorbalance_element;
273 GList *colorbalance_channels; /* CONTRAST, BRIGHTNESS, HUE, SATURATION */
274 gint colorbalance_values[4];
275 gulong colorbalance_value_changed_id;
276
277 /* sending audio/video flushes break stream changes when the pipeline
278 * is paused and played again in 0.10 */
279 #if 0
280 gboolean video_custom_flush_finished;
281 gboolean video_ignore_wrong_state;
282 gboolean video_pending_flush;
283
284 gboolean audio_custom_flush_finished;
285 gboolean audio_ignore_wrong_state;
286 gboolean audio_pending_flush;
287 #endif
288
289 gboolean text_custom_flush_finished;
290 gboolean text_ignore_wrong_state;
291 gboolean text_pending_flush;
292 };
293
294 struct _GstPlaySinkClass
295 {
296 GstBinClass parent_class;
297
298 gboolean (*reconfigure) (GstPlaySink * playsink);
299
300 GstSample *(*convert_sample) (GstPlaySink * playsink, GstCaps * caps);
301 };
302
303
304 static GstStaticPadTemplate audiotemplate =
305 GST_STATIC_PAD_TEMPLATE ("audio_sink",
306 GST_PAD_SINK,
307 GST_PAD_REQUEST,
308 GST_STATIC_CAPS_ANY);
309 static GstStaticPadTemplate videotemplate =
310 GST_STATIC_PAD_TEMPLATE ("video_sink",
311 GST_PAD_SINK,
312 GST_PAD_REQUEST,
313 GST_STATIC_CAPS_ANY);
314 static GstStaticPadTemplate texttemplate = GST_STATIC_PAD_TEMPLATE ("text_sink",
315 GST_PAD_SINK,
316 GST_PAD_REQUEST,
317 GST_STATIC_CAPS_ANY);
318
319 /* FIXME 0.11: Remove */
320 static GstStaticPadTemplate audiorawtemplate =
321 GST_STATIC_PAD_TEMPLATE ("audio_raw_sink",
322 GST_PAD_SINK,
323 GST_PAD_REQUEST,
324 GST_STATIC_CAPS_ANY);
325 static GstStaticPadTemplate videorawtemplate =
326 GST_STATIC_PAD_TEMPLATE ("video_raw_sink",
327 GST_PAD_SINK,
328 GST_PAD_REQUEST,
329 GST_STATIC_CAPS_ANY);
330
331
332 /* props */
333 enum
334 {
335 PROP_0,
336 PROP_FLAGS,
337 PROP_MUTE,
338 PROP_VOLUME,
339 PROP_FONT_DESC,
340 PROP_SUBTITLE_ENCODING,
341 PROP_VIS_PLUGIN,
342 PROP_SAMPLE,
343 PROP_AV_OFFSET,
344 PROP_TEXT_OFFSET,
345 PROP_VIDEO_SINK,
346 PROP_AUDIO_SINK,
347 PROP_TEXT_SINK,
348 PROP_SEND_EVENT_MODE,
349 PROP_FORCE_ASPECT_RATIO,
350 PROP_VIDEO_FILTER,
351 PROP_AUDIO_FILTER
352 };
353
354 /* signals */
355 enum
356 {
357 LAST_SIGNAL
358 };
359
360 static void gst_play_sink_dispose (GObject * object);
361 static void gst_play_sink_finalize (GObject * object);
362 static void gst_play_sink_set_property (GObject * object, guint prop_id,
363 const GValue * value, GParamSpec * spec);
364 static void gst_play_sink_get_property (GObject * object, guint prop_id,
365 GValue * value, GParamSpec * spec);
366
367 static GstPad *gst_play_sink_request_new_pad (GstElement * element,
368 GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
369 static void gst_play_sink_release_request_pad (GstElement * element,
370 GstPad * pad);
371 static gboolean gst_play_sink_send_event (GstElement * element,
372 GstEvent * event);
373 static GstStateChangeReturn gst_play_sink_change_state (GstElement * element,
374 GstStateChange transition);
375
376 static void gst_play_sink_handle_message (GstBin * bin, GstMessage * message);
377
378 /* sending audio/video flushes break stream changes when the pipeline
379 * is paused and played again in 0.10 */
380 #if 0
381 static gboolean gst_play_sink_video_sink_event (GstPad * pad, GstEvent * event);
382 static GstFlowReturn gst_play_sink_video_sink_chain (GstPad * pad,
383 GstBuffer * buffer);
384 static gboolean gst_play_sink_audio_sink_event (GstPad * pad, GstEvent * event);
385 static GstFlowReturn gst_play_sink_audio_sink_chain (GstPad * pad,
386 GstBuffer * buffer);
387 #endif
388 static gboolean gst_play_sink_text_sink_event (GstPad * pad, GstObject * parent,
389 GstEvent * event);
390 static GstFlowReturn gst_play_sink_text_sink_chain (GstPad * pad,
391 GstObject * parent, GstBuffer * buffer);
392
393 static void notify_volume_cb (GObject * object, GParamSpec * pspec,
394 GstPlaySink * playsink);
395 static void notify_mute_cb (GObject * object, GParamSpec * pspec,
396 GstPlaySink * playsink);
397
398 static void update_av_offset (GstPlaySink * playsink);
399 static void update_text_offset (GstPlaySink * playsink);
400
401 static gboolean gst_play_sink_do_reconfigure (GstPlaySink * playsink);
402
403 static GQuark _playsink_reset_segment_event_marker_id = 0;
404
405 /* static guint gst_play_sink_signals[LAST_SIGNAL] = { 0 }; */
406
407 static void gst_play_sink_overlay_init (gpointer g_iface,
408 gpointer g_iface_data);
409 static void gst_play_sink_navigation_init (gpointer g_iface,
410 gpointer g_iface_data);
411 static void gst_play_sink_colorbalance_init (gpointer g_iface,
412 gpointer g_iface_data);
413
414 static void
_do_init(GType type)415 _do_init (GType type)
416 {
417 static const GInterfaceInfo svol_info = {
418 NULL, NULL, NULL
419 };
420 static const GInterfaceInfo ov_info = {
421 gst_play_sink_overlay_init,
422 NULL, NULL
423 };
424 static const GInterfaceInfo nav_info = {
425 gst_play_sink_navigation_init,
426 NULL, NULL
427 };
428 static const GInterfaceInfo col_info = {
429 gst_play_sink_colorbalance_init,
430 NULL, NULL
431 };
432
433 g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_info);
434 g_type_add_interface_static (type, GST_TYPE_VIDEO_OVERLAY, &ov_info);
435 g_type_add_interface_static (type, GST_TYPE_NAVIGATION, &nav_info);
436 g_type_add_interface_static (type, GST_TYPE_COLOR_BALANCE, &col_info);
437 }
438
439 G_DEFINE_TYPE_WITH_CODE (GstPlaySink, gst_play_sink, GST_TYPE_BIN,
440 _do_init (g_define_type_id));
441
442 static void
gst_play_sink_class_init(GstPlaySinkClass * klass)443 gst_play_sink_class_init (GstPlaySinkClass * klass)
444 {
445 GObjectClass *gobject_klass;
446 GstElementClass *gstelement_klass;
447 GstBinClass *gstbin_klass;
448
449 gobject_klass = (GObjectClass *) klass;
450 gstelement_klass = (GstElementClass *) klass;
451 gstbin_klass = (GstBinClass *) klass;
452
453 gobject_klass->dispose = gst_play_sink_dispose;
454 gobject_klass->finalize = gst_play_sink_finalize;
455 gobject_klass->set_property = gst_play_sink_set_property;
456 gobject_klass->get_property = gst_play_sink_get_property;
457
458
459 /**
460 * GstPlaySink:flags
461 *
462 * Control the behaviour of playsink.
463 */
464 g_object_class_install_property (gobject_klass, PROP_FLAGS,
465 g_param_spec_flags ("flags", "Flags", "Flags to control behaviour",
466 GST_TYPE_PLAY_FLAGS, DEFAULT_FLAGS,
467 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
468
469 /**
470 * GstPlaySink:volume:
471 *
472 * Get or set the current audio stream volume. 1.0 means 100%,
473 * 0.0 means mute. This uses a linear volume scale.
474 *
475 */
476 g_object_class_install_property (gobject_klass, PROP_VOLUME,
477 g_param_spec_double ("volume", "Volume", "The audio volume, 1.0=100%",
478 0.0, VOLUME_MAX_DOUBLE, 1.0,
479 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
480 g_object_class_install_property (gobject_klass, PROP_MUTE,
481 g_param_spec_boolean ("mute", "Mute",
482 "Mute the audio channel without changing the volume", FALSE,
483 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
484 g_object_class_install_property (gobject_klass, PROP_FONT_DESC,
485 g_param_spec_string ("subtitle-font-desc",
486 "Subtitle font description",
487 "Pango font description of font "
488 "to be used for subtitle rendering", NULL,
489 G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
490 g_object_class_install_property (gobject_klass, PROP_SUBTITLE_ENCODING,
491 g_param_spec_string ("subtitle-encoding", "subtitle encoding",
492 "Encoding to assume if input subtitles are not in UTF-8 encoding. "
493 "If not set, the GST_SUBTITLE_ENCODING environment variable will "
494 "be checked for an encoding to use. If that is not set either, "
495 "ISO-8859-15 will be assumed.", NULL,
496 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
497 g_object_class_install_property (gobject_klass, PROP_VIS_PLUGIN,
498 g_param_spec_object ("vis-plugin", "Vis plugin",
499 "the visualization element to use (NULL = default)",
500 GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
501 /**
502 * GstPlaySink:sample:
503 *
504 * Get the currently rendered or prerolled sample in the video sink.
505 * The #GstCaps in the sample will describe the format of the buffer.
506 */
507 g_object_class_install_property (gobject_klass, PROP_SAMPLE,
508 g_param_spec_boxed ("sample", "Sample",
509 "The last sample (NULL = no video available)",
510 GST_TYPE_SAMPLE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
511 /**
512 * GstPlaySink:av-offset:
513 *
514 * Control the synchronisation offset between the audio and video streams.
515 * Positive values make the audio ahead of the video and negative values make
516 * the audio go behind the video.
517 */
518 g_object_class_install_property (gobject_klass, PROP_AV_OFFSET,
519 g_param_spec_int64 ("av-offset", "AV Offset",
520 "The synchronisation offset between audio and video in nanoseconds",
521 G_MININT64, G_MAXINT64, 0,
522 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
523
524 /**
525 * GstPlaySink:text-offset:
526 *
527 * Control the synchronisation offset between the text and video streams.
528 * Positive values make the text ahead of the video and negative values make
529 * the text go behind the video.
530 */
531 g_object_class_install_property (gobject_klass, PROP_TEXT_OFFSET,
532 g_param_spec_int64 ("text-offset", "Text Offset",
533 "The synchronisation offset between text and video in nanoseconds",
534 G_MININT64, G_MAXINT64, 0,
535 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
536
537 /**
538 * GstPlaySink:video-filter:
539 *
540 * Set the video filter element/bin to use. Will apply on a best-effort basis
541 * unless GST_PLAY_FLAG_FORCE_FILTERS is set. playsink must be in
542 * %GST_STATE_NULL
543 */
544 g_object_class_install_property (gobject_klass, PROP_VIDEO_FILTER,
545 g_param_spec_object ("video-filter", "Video filter",
546 "the video filter(s) to apply, if possible",
547 GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
548 /**
549 * GstPlaySink:audio-filter:
550 *
551 * Set the audio filter element/bin to use. Will apply on a best-effort basis
552 * unless GST_PLAY_FLAG_FORCE_FILTERS is set. playsink must be in
553 * %GST_STATE_NULL
554 */
555 g_object_class_install_property (gobject_klass, PROP_AUDIO_FILTER,
556 g_param_spec_object ("audio-filter", "Audio filter",
557 "the audio filter(s) to apply, if possible",
558 GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
559
560 /**
561 * GstPlaySink:video-sink:
562 *
563 * Set the used video sink element. NULL will use the default sink. playsink
564 * must be in %GST_STATE_NULL
565 */
566 g_object_class_install_property (gobject_klass, PROP_VIDEO_SINK,
567 g_param_spec_object ("video-sink", "Video Sink",
568 "the video output element to use (NULL = default sink)",
569 GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
570 /**
571 * GstPlaySink:audio-sink:
572 *
573 * Set the used audio sink element. NULL will use the default sink. playsink
574 * must be in %GST_STATE_NULL
575 */
576 g_object_class_install_property (gobject_klass, PROP_AUDIO_SINK,
577 g_param_spec_object ("audio-sink", "Audio Sink",
578 "the audio output element to use (NULL = default sink)",
579 GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
580
581 /**
582 * GstPlaySink:text-sink:
583 *
584 * Set the used text sink element. NULL will use the default sink. playsink
585 * must be in %GST_STATE_NULL
586 */
587 g_object_class_install_property (gobject_klass, PROP_TEXT_SINK,
588 g_param_spec_object ("text-sink", "Text sink",
589 "the text output element to use (NULL = default subtitleoverlay)",
590 GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
591
592 /**
593 * GstPlaySink::send-event-mode:
594 *
595 * Sets the handling method used for events received from send_event
596 * function. The default is %MODE_DEFAULT, that uses %GstBin's default
597 * handling (push the event to all internal sinks).
598 */
599 g_object_class_install_property (gobject_klass, PROP_SEND_EVENT_MODE,
600 g_param_spec_enum ("send-event-mode", "Send event mode",
601 "How to send events received in send_event function",
602 GST_TYPE_PLAY_SINK_SEND_EVENT_MODE, MODE_DEFAULT,
603 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
604
605 /**
606 * GstPlaySink::force-aspect-ratio:
607 *
608 * Requests the video sink to enforce the video display aspect ratio.
609 */
610 g_object_class_install_property (gobject_klass, PROP_FORCE_ASPECT_RATIO,
611 g_param_spec_boolean ("force-aspect-ratio", "Force Aspect Ratio",
612 "When enabled, scaling will respect original aspect ratio", TRUE,
613 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
614
615 g_signal_new ("reconfigure", G_TYPE_FROM_CLASS (klass),
616 G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstPlaySinkClass,
617 reconfigure), NULL, NULL, g_cclosure_marshal_generic, G_TYPE_BOOLEAN,
618 0, G_TYPE_NONE);
619 /**
620 * GstPlaySink::convert-sample
621 * @playsink: a #GstPlaySink
622 * @caps: the target format of the sample
623 *
624 * Action signal to retrieve the currently playing video sample in the format
625 * specified by @caps.
626 * If @caps is %NULL, no conversion will be performed and this function is
627 * equivalent to the #GstPlaySink::sample property.
628 *
629 * Returns: a #GstSample of the current video sample converted to #caps.
630 * The caps in the sample will describe the final layout of the buffer data.
631 * %NULL is returned when no current sample can be retrieved or when the
632 * conversion failed.
633 */
634 g_signal_new ("convert-sample", G_TYPE_FROM_CLASS (klass),
635 G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
636 G_STRUCT_OFFSET (GstPlaySinkClass, convert_sample), NULL, NULL,
637 g_cclosure_marshal_generic, GST_TYPE_SAMPLE, 1, GST_TYPE_CAPS);
638
639 gst_element_class_add_static_pad_template (gstelement_klass,
640 &audiorawtemplate);
641 gst_element_class_add_static_pad_template (gstelement_klass, &audiotemplate);
642 gst_element_class_add_static_pad_template (gstelement_klass,
643 &videorawtemplate);
644 gst_element_class_add_static_pad_template (gstelement_klass, &videotemplate);
645 gst_element_class_add_static_pad_template (gstelement_klass, &texttemplate);
646 gst_element_class_set_static_metadata (gstelement_klass, "Player Sink",
647 "Generic/Bin/Sink",
648 "Convenience sink for multiple streams",
649 "Wim Taymans <wim.taymans@gmail.com>");
650
651 gstelement_klass->change_state =
652 GST_DEBUG_FUNCPTR (gst_play_sink_change_state);
653 gstelement_klass->send_event = GST_DEBUG_FUNCPTR (gst_play_sink_send_event);
654 gstelement_klass->request_new_pad =
655 GST_DEBUG_FUNCPTR (gst_play_sink_request_new_pad);
656 gstelement_klass->release_pad =
657 GST_DEBUG_FUNCPTR (gst_play_sink_release_request_pad);
658
659 gstbin_klass->handle_message =
660 GST_DEBUG_FUNCPTR (gst_play_sink_handle_message);
661
662 klass->reconfigure = GST_DEBUG_FUNCPTR (gst_play_sink_reconfigure);
663 klass->convert_sample = GST_DEBUG_FUNCPTR (gst_play_sink_convert_sample);
664
665 _playsink_reset_segment_event_marker_id =
666 g_quark_from_static_string ("gst-playsink-reset-segment-event-marker");
667
668 g_type_class_ref (GST_TYPE_STREAM_SYNCHRONIZER);
669 g_type_class_ref (GST_TYPE_COLOR_BALANCE_CHANNEL);
670 }
671
672 static void
gst_play_sink_init(GstPlaySink * playsink)673 gst_play_sink_init (GstPlaySink * playsink)
674 {
675 GstColorBalanceChannel *channel;
676
677 /* init groups */
678 playsink->video_sink = NULL;
679 playsink->audio_sink = NULL;
680 playsink->visualisation = NULL;
681 playsink->text_sink = NULL;
682 playsink->volume = 1.0;
683 playsink->font_desc = NULL;
684 playsink->subtitle_encoding = NULL;
685 playsink->flags = DEFAULT_FLAGS;
686 playsink->send_event_mode = MODE_DEFAULT;
687 playsink->force_aspect_ratio = TRUE;
688
689 playsink->stream_synchronizer =
690 g_object_new (GST_TYPE_STREAM_SYNCHRONIZER, NULL);
691 gst_bin_add (GST_BIN_CAST (playsink),
692 GST_ELEMENT_CAST (playsink->stream_synchronizer));
693
694 g_rec_mutex_init (&playsink->lock);
695 GST_OBJECT_FLAG_SET (playsink, GST_ELEMENT_FLAG_SINK);
696 gst_bin_set_suppressed_flags (GST_BIN (playsink),
697 GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
698
699 channel =
700 GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
701 NULL));
702 channel->label = g_strdup ("CONTRAST");
703 channel->min_value = -1000;
704 channel->max_value = 1000;
705 playsink->colorbalance_channels =
706 g_list_append (playsink->colorbalance_channels, channel);
707 playsink->colorbalance_values[0] = 0;
708
709 channel =
710 GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
711 NULL));
712 channel->label = g_strdup ("BRIGHTNESS");
713 channel->min_value = -1000;
714 channel->max_value = 1000;
715 playsink->colorbalance_channels =
716 g_list_append (playsink->colorbalance_channels, channel);
717 playsink->colorbalance_values[1] = 0;
718
719 channel =
720 GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
721 NULL));
722 channel->label = g_strdup ("HUE");
723 channel->min_value = -1000;
724 channel->max_value = 1000;
725 playsink->colorbalance_channels =
726 g_list_append (playsink->colorbalance_channels, channel);
727 playsink->colorbalance_values[2] = 0;
728
729 channel =
730 GST_COLOR_BALANCE_CHANNEL (g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL,
731 NULL));
732 channel->label = g_strdup ("SATURATION");
733 channel->min_value = -1000;
734 channel->max_value = 1000;
735 playsink->colorbalance_channels =
736 g_list_append (playsink->colorbalance_channels, channel);
737 playsink->colorbalance_values[3] = 0;
738 }
739
740 static void
disconnect_audio_chain(GstPlayAudioChain * chain,GstPlaySink * playsink)741 disconnect_audio_chain (GstPlayAudioChain * chain, GstPlaySink * playsink)
742 {
743 if (chain) {
744 if (chain->notify_volume_id)
745 g_signal_handler_disconnect (chain->volume, chain->notify_volume_id);
746 if (chain->notify_mute_id)
747 g_signal_handler_disconnect (chain->volume, chain->notify_mute_id);
748 chain->notify_volume_id = chain->notify_mute_id = 0;
749 }
750 }
751
752 static void
free_chain(GstPlayChain * chain)753 free_chain (GstPlayChain * chain)
754 {
755 if (chain) {
756 if (chain->bin)
757 gst_object_unref (chain->bin);
758 g_free (chain);
759 }
760 }
761
762 static void
gst_play_sink_remove_audio_ssync_queue(GstPlaySink * playsink)763 gst_play_sink_remove_audio_ssync_queue (GstPlaySink * playsink)
764 {
765 if (playsink->audio_ssync_queue) {
766 gst_element_set_state (playsink->audio_ssync_queue, GST_STATE_NULL);
767 gst_object_unref (playsink->audio_ssync_queue_sinkpad);
768 gst_bin_remove (GST_BIN_CAST (playsink), playsink->audio_ssync_queue);
769 playsink->audio_ssync_queue = NULL;
770 playsink->audio_ssync_queue_sinkpad = NULL;
771 }
772 }
773
774 static void
gst_play_sink_dispose(GObject * object)775 gst_play_sink_dispose (GObject * object)
776 {
777 GstPlaySink *playsink;
778
779 playsink = GST_PLAY_SINK (object);
780
781 if (playsink->audio_filter != NULL) {
782 gst_element_set_state (playsink->audio_filter, GST_STATE_NULL);
783 gst_object_unref (playsink->audio_filter);
784 playsink->audio_filter = NULL;
785 }
786 if (playsink->video_filter != NULL) {
787 gst_element_set_state (playsink->video_filter, GST_STATE_NULL);
788 gst_object_unref (playsink->video_filter);
789 playsink->video_filter = NULL;
790 }
791 if (playsink->audio_sink != NULL) {
792 gst_element_set_state (playsink->audio_sink, GST_STATE_NULL);
793 gst_object_unref (playsink->audio_sink);
794 playsink->audio_sink = NULL;
795 }
796 if (playsink->video_sink != NULL) {
797 gst_element_set_state (playsink->video_sink, GST_STATE_NULL);
798 gst_object_unref (playsink->video_sink);
799 playsink->video_sink = NULL;
800 }
801 if (playsink->visualisation != NULL) {
802 gst_element_set_state (playsink->visualisation, GST_STATE_NULL);
803 gst_object_unref (playsink->visualisation);
804 playsink->visualisation = NULL;
805 }
806 if (playsink->text_sink != NULL) {
807 gst_element_set_state (playsink->text_sink, GST_STATE_NULL);
808 gst_object_unref (playsink->text_sink);
809 playsink->text_sink = NULL;
810 }
811
812 free_chain ((GstPlayChain *) playsink->videodeinterlacechain);
813 playsink->videodeinterlacechain = NULL;
814 free_chain ((GstPlayChain *) playsink->videochain);
815 playsink->videochain = NULL;
816 free_chain ((GstPlayChain *) playsink->audiochain);
817 playsink->audiochain = NULL;
818 free_chain ((GstPlayChain *) playsink->vischain);
819 playsink->vischain = NULL;
820 free_chain ((GstPlayChain *) playsink->textchain);
821 playsink->textchain = NULL;
822
823 if (playsink->audio_tee_sink) {
824 gst_object_unref (playsink->audio_tee_sink);
825 playsink->audio_tee_sink = NULL;
826 }
827
828 if (playsink->audio_tee_vissrc) {
829 gst_element_release_request_pad (playsink->audio_tee,
830 playsink->audio_tee_vissrc);
831 gst_object_unref (playsink->audio_tee_vissrc);
832 playsink->audio_tee_vissrc = NULL;
833 }
834
835 if (playsink->audio_tee_asrc) {
836 gst_element_release_request_pad (playsink->audio_tee,
837 playsink->audio_tee_asrc);
838 gst_object_unref (playsink->audio_tee_asrc);
839 playsink->audio_tee_asrc = NULL;
840 }
841
842 g_free (playsink->font_desc);
843 playsink->font_desc = NULL;
844
845 g_free (playsink->subtitle_encoding);
846 playsink->subtitle_encoding = NULL;
847
848 playsink->stream_synchronizer = NULL;
849
850 g_list_foreach (playsink->colorbalance_channels, (GFunc) gst_object_unref,
851 NULL);
852 g_list_free (playsink->colorbalance_channels);
853 playsink->colorbalance_channels = NULL;
854
855 G_OBJECT_CLASS (gst_play_sink_parent_class)->dispose (object);
856 }
857
858 static void
gst_play_sink_finalize(GObject * object)859 gst_play_sink_finalize (GObject * object)
860 {
861 GstPlaySink *playsink;
862
863 playsink = GST_PLAY_SINK (object);
864
865 g_rec_mutex_clear (&playsink->lock);
866
867 G_OBJECT_CLASS (gst_play_sink_parent_class)->finalize (object);
868 }
869
870 void
gst_play_sink_set_sink(GstPlaySink * playsink,GstPlaySinkType type,GstElement * sink)871 gst_play_sink_set_sink (GstPlaySink * playsink, GstPlaySinkType type,
872 GstElement * sink)
873 {
874 GstElement **elem = NULL, *old = NULL;
875 #ifndef GST_DISABLE_GST_DEBUG
876 GstPad *sink_pad;
877 const gchar *sink_type = NULL;
878 #endif
879
880 GST_LOG ("Setting sink %" GST_PTR_FORMAT " as sink type %d", sink, type);
881
882 GST_PLAY_SINK_LOCK (playsink);
883 switch (type) {
884 case GST_PLAY_SINK_TYPE_AUDIO:
885 case GST_PLAY_SINK_TYPE_AUDIO_RAW:
886 elem = &playsink->audio_sink;
887 #ifndef GST_DISABLE_GST_DEBUG
888 sink_type = "audio";
889 #endif
890 break;
891 case GST_PLAY_SINK_TYPE_VIDEO:
892 case GST_PLAY_SINK_TYPE_VIDEO_RAW:
893 elem = &playsink->video_sink;
894 #ifndef GST_DISABLE_GST_DEBUG
895 sink_type = "video";
896 #endif
897 break;
898 case GST_PLAY_SINK_TYPE_TEXT:
899 elem = &playsink->text_sink;
900 #ifndef GST_DISABLE_GST_DEBUG
901 sink_type = "text";
902 #endif
903 break;
904 default:
905 break;
906 }
907 if (elem) {
908 old = *elem;
909 if (sink)
910 gst_object_ref_sink (sink);
911 *elem = sink;
912 }
913 GST_PLAY_SINK_UNLOCK (playsink);
914
915 #ifndef GST_DISABLE_GST_DEBUG
916 /* Check and warn if an application sets a sink with no 'sink' pad */
917 if (sink && elem) {
918 if ((sink_pad = gst_element_get_static_pad (sink, "sink")) != NULL) {
919 gst_object_unref (sink_pad);
920 } else {
921 GST_ELEMENT_WARNING (playsink, CORE, FAILED,
922 ("Application error - playback can't work"),
923 ("custom %s sink has no pad named \"sink\"", sink_type));
924 }
925 }
926 #endif
927
928 if (old) {
929 /* Set the old sink to NULL if it is not used any longer */
930 if (old != sink && !GST_OBJECT_PARENT (old))
931 gst_element_set_state (old, GST_STATE_NULL);
932 gst_object_unref (old);
933 }
934 }
935
936 GstElement *
gst_play_sink_get_sink(GstPlaySink * playsink,GstPlaySinkType type)937 gst_play_sink_get_sink (GstPlaySink * playsink, GstPlaySinkType type)
938 {
939 GstElement *result = NULL;
940 GstElement *elem = NULL, *chainp = NULL;
941
942 GST_PLAY_SINK_LOCK (playsink);
943 switch (type) {
944 case GST_PLAY_SINK_TYPE_AUDIO:
945 case GST_PLAY_SINK_TYPE_AUDIO_RAW:
946 {
947 GstPlayAudioChain *chain;
948 if ((chain = (GstPlayAudioChain *) playsink->audiochain))
949 chainp = chain->sink;
950 elem = playsink->audio_sink;
951 break;
952 }
953 case GST_PLAY_SINK_TYPE_VIDEO:
954 case GST_PLAY_SINK_TYPE_VIDEO_RAW:
955 {
956 GstPlayVideoChain *chain;
957 if ((chain = (GstPlayVideoChain *) playsink->videochain))
958 chainp = chain->sink;
959 elem = playsink->video_sink;
960 break;
961 }
962 case GST_PLAY_SINK_TYPE_TEXT:
963 {
964 GstPlayTextChain *chain;
965 if ((chain = (GstPlayTextChain *) playsink->textchain))
966 chainp = chain->sink;
967 elem = playsink->text_sink;
968 break;
969 }
970 default:
971 break;
972 }
973 if (chainp) {
974 /* we have an active chain with a sink, get the sink */
975 result = gst_object_ref (chainp);
976 }
977 /* nothing found, return last configured sink */
978 if (result == NULL && elem)
979 result = gst_object_ref (elem);
980 GST_PLAY_SINK_UNLOCK (playsink);
981
982 return result;
983 }
984
985 void
gst_play_sink_set_filter(GstPlaySink * playsink,GstPlaySinkType type,GstElement * filter)986 gst_play_sink_set_filter (GstPlaySink * playsink, GstPlaySinkType type,
987 GstElement * filter)
988 {
989 GstElement **elem = NULL, *old = NULL;
990
991 GST_LOG_OBJECT (playsink,
992 "Setting filter %" GST_PTR_FORMAT " as filter type %d", filter, type);
993
994 GST_PLAY_SINK_LOCK (playsink);
995 switch (type) {
996 case GST_PLAY_SINK_TYPE_AUDIO:
997 case GST_PLAY_SINK_TYPE_AUDIO_RAW:
998 elem = &playsink->audio_filter;
999 break;
1000 case GST_PLAY_SINK_TYPE_VIDEO:
1001 case GST_PLAY_SINK_TYPE_VIDEO_RAW:
1002 elem = &playsink->video_filter;
1003 break;
1004 default:
1005 break;
1006 }
1007 if (elem) {
1008 old = *elem;
1009 if (filter)
1010 gst_object_ref_sink (filter);
1011 *elem = filter;
1012 }
1013 GST_PLAY_SINK_UNLOCK (playsink);
1014
1015 if (old) {
1016 /* Set the old filter to NULL if it is not used any longer */
1017 if (old != filter && !GST_OBJECT_PARENT (old))
1018 gst_element_set_state (old, GST_STATE_NULL);
1019 gst_object_unref (old);
1020 }
1021 }
1022
1023 GstElement *
gst_play_sink_get_filter(GstPlaySink * playsink,GstPlaySinkType type)1024 gst_play_sink_get_filter (GstPlaySink * playsink, GstPlaySinkType type)
1025 {
1026 GstElement *result = NULL;
1027 GstElement *elem = NULL, *chainp = NULL;
1028
1029 GST_PLAY_SINK_LOCK (playsink);
1030 switch (type) {
1031 case GST_PLAY_SINK_TYPE_AUDIO:
1032 case GST_PLAY_SINK_TYPE_AUDIO_RAW:
1033 {
1034 GstPlayAudioChain *chain;
1035 if ((chain = (GstPlayAudioChain *) playsink->audiochain))
1036 chainp = chain->filter;
1037 elem = playsink->audio_filter;
1038 break;
1039 }
1040 case GST_PLAY_SINK_TYPE_VIDEO:
1041 case GST_PLAY_SINK_TYPE_VIDEO_RAW:
1042 {
1043 GstPlayVideoChain *chain;
1044 if ((chain = (GstPlayVideoChain *) playsink->videochain))
1045 chainp = chain->filter;
1046 elem = playsink->video_filter;
1047 break;
1048 }
1049 default:
1050 break;
1051 }
1052 if (chainp) {
1053 /* we have an active chain with a filter, get the filter */
1054 result = gst_object_ref (chainp);
1055 }
1056 /* nothing found, return last configured filter */
1057 if (result == NULL && elem)
1058 result = gst_object_ref (elem);
1059 GST_PLAY_SINK_UNLOCK (playsink);
1060
1061 return result;
1062 }
1063
1064 static GstPadProbeReturn
gst_play_sink_vis_blocked(GstPad * tee_pad,GstPadProbeInfo * info,gpointer user_data)1065 gst_play_sink_vis_blocked (GstPad * tee_pad, GstPadProbeInfo * info,
1066 gpointer user_data)
1067 {
1068 GstPlaySink *playsink;
1069 GstPlayVisChain *chain;
1070
1071 playsink = GST_PLAY_SINK (user_data);
1072
1073 if (GST_IS_EVENT (info->data) && !GST_EVENT_IS_SERIALIZED (info->data)) {
1074 GST_DEBUG_OBJECT (playsink, "Letting non-serialized event %s pass",
1075 GST_EVENT_TYPE_NAME (info->data));
1076 return GST_PAD_PROBE_PASS;
1077 }
1078
1079 GST_PLAY_SINK_LOCK (playsink);
1080 GST_DEBUG_OBJECT (playsink, "vis pad blocked");
1081 /* now try to change the plugin in the running vis chain */
1082 if (!(chain = (GstPlayVisChain *) playsink->vischain))
1083 goto done;
1084
1085 /* unlink the old plugin and unghost the pad */
1086 gst_pad_unlink (chain->vispeerpad, chain->vissinkpad);
1087 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (chain->srcpad), NULL);
1088
1089 /* set the old plugin to NULL and remove */
1090 gst_element_set_state (chain->vis, GST_STATE_NULL);
1091 gst_bin_remove (GST_BIN_CAST (chain->chain.bin), chain->vis);
1092
1093 /* add new plugin and set state to playing */
1094 chain->vis = playsink->visualisation;
1095 gst_bin_add (GST_BIN_CAST (chain->chain.bin), chain->vis);
1096 gst_element_set_state (chain->vis, GST_STATE_PLAYING);
1097
1098 /* get pads */
1099 chain->vissinkpad = gst_element_get_static_pad (chain->vis, "sink");
1100 chain->vissrcpad = gst_element_get_static_pad (chain->vis, "src");
1101
1102 /* link pads */
1103 gst_pad_link_full (chain->vispeerpad, chain->vissinkpad,
1104 GST_PAD_LINK_CHECK_NOTHING);
1105 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (chain->srcpad),
1106 chain->vissrcpad);
1107
1108 done:
1109 playsink->vis_pad_block_id = 0;
1110
1111 GST_PLAY_SINK_UNLOCK (playsink);
1112
1113 /* remove the probe and unblock the pad */
1114 return GST_PAD_PROBE_REMOVE;
1115 }
1116
1117 void
gst_play_sink_set_vis_plugin(GstPlaySink * playsink,GstElement * vis)1118 gst_play_sink_set_vis_plugin (GstPlaySink * playsink, GstElement * vis)
1119 {
1120 GstPlayVisChain *chain;
1121
1122 /* setting NULL means creating the default vis plugin */
1123 if (vis == NULL)
1124 vis = gst_element_factory_make ("goom", "vis");
1125
1126 /* simply return if we don't have a vis plugin here */
1127 if (vis == NULL)
1128 return;
1129
1130 GST_PLAY_SINK_LOCK (playsink);
1131 /* first store the new vis */
1132 if (playsink->visualisation)
1133 gst_object_unref (playsink->visualisation);
1134 /* take ownership */
1135 gst_object_ref_sink (vis);
1136 playsink->visualisation = vis;
1137
1138 /* now try to change the plugin in the running vis chain, if we have no chain,
1139 * we don't bother, any future vis chain will be created with the new vis
1140 * plugin. */
1141 if (!(chain = (GstPlayVisChain *) playsink->vischain))
1142 goto done;
1143
1144 /* block the pad, the next time the callback is called we can change the
1145 * visualisation. It's possible that this never happens or that the pad was
1146 * already blocked. If the callback never happens, we don't have new data so
1147 * we don't need the new vis plugin. If the pad was already blocked, the
1148 * function returns FALSE but the previous pad block will do the right thing
1149 * anyway. */
1150 GST_DEBUG_OBJECT (playsink, "blocking vis pad");
1151 if (!playsink->vis_pad_block_id && !playsink->audio_block_id
1152 && !playsink->video_block_id && !playsink->text_block_id)
1153 playsink->vis_pad_block_id =
1154 gst_pad_add_probe (chain->blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
1155 gst_play_sink_vis_blocked, playsink, NULL);
1156 done:
1157 GST_PLAY_SINK_UNLOCK (playsink);
1158
1159 return;
1160 }
1161
1162 GstElement *
gst_play_sink_get_vis_plugin(GstPlaySink * playsink)1163 gst_play_sink_get_vis_plugin (GstPlaySink * playsink)
1164 {
1165 GstElement *result = NULL;
1166 GstPlayVisChain *chain;
1167
1168 GST_PLAY_SINK_LOCK (playsink);
1169 if ((chain = (GstPlayVisChain *) playsink->vischain)) {
1170 /* we have an active chain, get the sink */
1171 if (chain->vis)
1172 result = gst_object_ref (chain->vis);
1173 }
1174 /* nothing found, return last configured sink */
1175 if (result == NULL && playsink->visualisation)
1176 result = gst_object_ref (playsink->visualisation);
1177 GST_PLAY_SINK_UNLOCK (playsink);
1178
1179 return result;
1180 }
1181
1182 void
gst_play_sink_set_volume(GstPlaySink * playsink,gdouble volume)1183 gst_play_sink_set_volume (GstPlaySink * playsink, gdouble volume)
1184 {
1185 GstPlayAudioChain *chain;
1186
1187 GST_PLAY_SINK_LOCK (playsink);
1188 playsink->volume = volume;
1189 chain = (GstPlayAudioChain *) playsink->audiochain;
1190 if (chain && chain->volume) {
1191 GST_LOG_OBJECT (playsink,
1192 "elements: volume=%" GST_PTR_FORMAT "; new volume=%.03f, mute=%d",
1193 chain->volume, volume, playsink->mute);
1194 g_object_set (chain->volume, "volume", volume, NULL);
1195 } else {
1196 GST_LOG_OBJECT (playsink, "no volume element");
1197 playsink->volume_changed = TRUE;
1198 }
1199 GST_PLAY_SINK_UNLOCK (playsink);
1200 }
1201
1202 gdouble
gst_play_sink_get_volume(GstPlaySink * playsink)1203 gst_play_sink_get_volume (GstPlaySink * playsink)
1204 {
1205 gdouble result;
1206 GstPlayAudioChain *chain;
1207
1208 GST_PLAY_SINK_LOCK (playsink);
1209 chain = (GstPlayAudioChain *) playsink->audiochain;
1210 result = playsink->volume;
1211 if (chain && chain->volume) {
1212 g_object_get (chain->volume, "volume", &result, NULL);
1213 playsink->volume = result;
1214 }
1215 GST_PLAY_SINK_UNLOCK (playsink);
1216
1217 return result;
1218 }
1219
1220 void
gst_play_sink_set_mute(GstPlaySink * playsink,gboolean mute)1221 gst_play_sink_set_mute (GstPlaySink * playsink, gboolean mute)
1222 {
1223 GstPlayAudioChain *chain;
1224
1225 GST_PLAY_SINK_LOCK (playsink);
1226 playsink->mute = mute;
1227 chain = (GstPlayAudioChain *) playsink->audiochain;
1228 if (chain && chain->volume) {
1229 g_object_set (chain->volume, "mute", mute, NULL);
1230 } else {
1231 playsink->mute_changed = TRUE;
1232 }
1233 GST_PLAY_SINK_UNLOCK (playsink);
1234 }
1235
1236 gboolean
gst_play_sink_get_mute(GstPlaySink * playsink)1237 gst_play_sink_get_mute (GstPlaySink * playsink)
1238 {
1239 gboolean result;
1240 GstPlayAudioChain *chain;
1241
1242 GST_PLAY_SINK_LOCK (playsink);
1243 chain = (GstPlayAudioChain *) playsink->audiochain;
1244 if (chain && chain->volume) {
1245 g_object_get (chain->volume, "mute", &result, NULL);
1246 playsink->mute = result;
1247 } else {
1248 result = playsink->mute;
1249 }
1250 GST_PLAY_SINK_UNLOCK (playsink);
1251
1252 return result;
1253 }
1254
1255 static void
post_missing_element_message(GstPlaySink * playsink,const gchar * name)1256 post_missing_element_message (GstPlaySink * playsink, const gchar * name)
1257 {
1258 GstMessage *msg;
1259
1260 msg = gst_missing_element_message_new (GST_ELEMENT_CAST (playsink), name);
1261 gst_element_post_message (GST_ELEMENT_CAST (playsink), msg);
1262 }
1263
1264 static gboolean
add_chain(GstPlayChain * chain,gboolean add)1265 add_chain (GstPlayChain * chain, gboolean add)
1266 {
1267 if (chain->added == add)
1268 return TRUE;
1269
1270 if (add)
1271 gst_bin_add (GST_BIN_CAST (chain->playsink), chain->bin);
1272 else {
1273 gst_bin_remove (GST_BIN_CAST (chain->playsink), chain->bin);
1274 }
1275
1276 chain->added = add;
1277
1278 return TRUE;
1279 }
1280
1281 static gboolean
activate_chain(GstPlayChain * chain,gboolean activate)1282 activate_chain (GstPlayChain * chain, gboolean activate)
1283 {
1284 GstState state;
1285
1286 if (chain->activated == activate)
1287 return TRUE;
1288
1289 GST_OBJECT_LOCK (chain->playsink);
1290 state = GST_STATE_TARGET (chain->playsink);
1291 GST_OBJECT_UNLOCK (chain->playsink);
1292
1293 if (activate)
1294 gst_element_set_state (chain->bin, state);
1295 else
1296 gst_element_set_state (chain->bin, GST_STATE_NULL);
1297
1298 chain->activated = activate;
1299
1300 return TRUE;
1301 }
1302
1303 static gboolean
element_is_sink(GstElement * element)1304 element_is_sink (GstElement * element)
1305 {
1306 gboolean is_sink;
1307
1308 GST_OBJECT_LOCK (element);
1309 is_sink = GST_OBJECT_FLAG_IS_SET (element, GST_ELEMENT_FLAG_SINK);
1310 GST_OBJECT_UNLOCK (element);
1311
1312 GST_DEBUG_OBJECT (element, "is a sink: %s", (is_sink) ? "yes" : "no");
1313 return is_sink;
1314 }
1315
1316 static gboolean
element_has_property(GstElement * element,const gchar * pname,GType type)1317 element_has_property (GstElement * element, const gchar * pname, GType type)
1318 {
1319 GParamSpec *pspec;
1320
1321 pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (element), pname);
1322
1323 if (pspec == NULL) {
1324 GST_DEBUG_OBJECT (element, "no %s property", pname);
1325 return FALSE;
1326 }
1327
1328 if (type == G_TYPE_INVALID || type == pspec->value_type ||
1329 g_type_is_a (pspec->value_type, type)) {
1330 GST_DEBUG_OBJECT (element, "has %s property of type %s", pname,
1331 (type == G_TYPE_INVALID) ? "any type" : g_type_name (type));
1332 return TRUE;
1333 }
1334
1335 GST_WARNING_OBJECT (element, "has %s property, but property is of type %s "
1336 "and we expected it to be of type %s", pname,
1337 g_type_name (pspec->value_type), g_type_name (type));
1338
1339 return FALSE;
1340 }
1341
1342 typedef struct
1343 {
1344 const gchar *prop_name;
1345 GType prop_type;
1346 gboolean need_sink;
1347 } FindPropertyHelper;
1348
1349 static gint
find_property(const GValue * item,FindPropertyHelper * helper)1350 find_property (const GValue * item, FindPropertyHelper * helper)
1351 {
1352 GstElement *element = g_value_get_object (item);
1353 if (helper->need_sink && !element_is_sink (element)) {
1354 return 1;
1355 }
1356
1357 if (!element_has_property (element, helper->prop_name, helper->prop_type)) {
1358 return 1;
1359 }
1360
1361 GST_INFO_OBJECT (element, "found %s with %s property", helper->prop_name,
1362 (helper->need_sink) ? "sink" : "element");
1363 return 0; /* keep it */
1364 }
1365
1366 /* FIXME: why not move these functions into core? */
1367 /* find a sink in the hierarchy with a property named @name. This function does
1368 * not increase the refcount of the returned object and thus remains valid as
1369 * long as the bin is valid. */
1370 static GstElement *
gst_play_sink_find_property_sinks(GstPlaySink * playsink,GstElement * obj,const gchar * name,GType expected_type)1371 gst_play_sink_find_property_sinks (GstPlaySink * playsink, GstElement * obj,
1372 const gchar * name, GType expected_type)
1373 {
1374 GstElement *result = NULL;
1375 GstIterator *it;
1376
1377 if (element_has_property (obj, name, expected_type)) {
1378 result = obj;
1379 } else if (GST_IS_BIN (obj)) {
1380 gboolean found;
1381 GValue item = { 0, };
1382 FindPropertyHelper helper = { name, expected_type, TRUE };
1383
1384 it = gst_bin_iterate_recurse (GST_BIN_CAST (obj));
1385 found = gst_iterator_find_custom (it,
1386 (GCompareFunc) find_property, &item, &helper);
1387 gst_iterator_free (it);
1388 if (found) {
1389 result = g_value_get_object (&item);
1390 /* we don't need the extra ref */
1391 g_value_unset (&item);
1392 }
1393 }
1394 return result;
1395 }
1396
1397 /* find an object in the hierarchy with a property named @name */
1398 static GstElement *
gst_play_sink_find_property(GstPlaySink * playsink,GstElement * obj,const gchar * name,GType expected_type)1399 gst_play_sink_find_property (GstPlaySink * playsink, GstElement * obj,
1400 const gchar * name, GType expected_type)
1401 {
1402 GstElement *result = NULL;
1403 GstIterator *it;
1404
1405 if (GST_IS_BIN (obj)) {
1406 gboolean found;
1407 GValue item = { 0, };
1408 FindPropertyHelper helper = { name, expected_type, FALSE };
1409
1410 it = gst_bin_iterate_recurse (GST_BIN_CAST (obj));
1411 found = gst_iterator_find_custom (it,
1412 (GCompareFunc) find_property, &item, &helper);
1413 gst_iterator_free (it);
1414 if (found) {
1415 result = g_value_dup_object (&item);
1416 g_value_unset (&item);
1417 }
1418 } else {
1419 if (element_has_property (obj, name, expected_type)) {
1420 result = obj;
1421 gst_object_ref (obj);
1422 }
1423 }
1424 return result;
1425 }
1426
1427 static void
do_async_start(GstPlaySink * playsink)1428 do_async_start (GstPlaySink * playsink)
1429 {
1430 GstMessage *message;
1431
1432 if (!playsink->need_async_start) {
1433 GST_INFO_OBJECT (playsink, "no async_start needed");
1434 return;
1435 }
1436
1437 playsink->async_pending = TRUE;
1438
1439 GST_INFO_OBJECT (playsink, "Sending async_start message");
1440 message = gst_message_new_async_start (GST_OBJECT_CAST (playsink));
1441 GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST
1442 (playsink), message);
1443 }
1444
1445 static void
do_async_done(GstPlaySink * playsink)1446 do_async_done (GstPlaySink * playsink)
1447 {
1448 GstMessage *message;
1449
1450 if (playsink->async_pending) {
1451 GST_INFO_OBJECT (playsink, "Sending async_done message");
1452 message =
1453 gst_message_new_async_done (GST_OBJECT_CAST (playsink),
1454 GST_CLOCK_TIME_NONE);
1455 GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (GST_BIN_CAST
1456 (playsink), message);
1457
1458 playsink->async_pending = FALSE;
1459 }
1460
1461 playsink->need_async_start = FALSE;
1462 }
1463
1464 /* try to change the state of an element. This function returns the element when
1465 * the state change could be performed. When this function returns NULL an error
1466 * occured and the element is unreffed if @unref is TRUE. */
1467 static GstElement *
try_element(GstPlaySink * playsink,GstElement * element,gboolean unref)1468 try_element (GstPlaySink * playsink, GstElement * element, gboolean unref)
1469 {
1470 GstStateChangeReturn ret;
1471
1472 if (element) {
1473 ret = gst_element_set_state (element, GST_STATE_READY);
1474 if (ret == GST_STATE_CHANGE_FAILURE) {
1475 GST_DEBUG_OBJECT (playsink, "failed state change..");
1476 gst_element_set_state (element, GST_STATE_NULL);
1477 if (unref)
1478 gst_object_unref (element);
1479 element = NULL;
1480 }
1481 }
1482 return element;
1483 }
1484
1485 /* make the element (bin) that contains the elements needed to perform
1486 * video deinterlacing. Only used for *raw* video streams.
1487 *
1488 * +---------------------------------------+
1489 * | vbin |
1490 * | +----------+ +-----------+ |
1491 * | |colorspace| |deinterlace| |
1492 * | +-sink src-sink src-+ |
1493 * | | +----------+ +-----------+ | |
1494 * sink-+ +-src
1495 * +---------------------------------------+
1496 *
1497 */
1498 static GstPlayVideoDeinterlaceChain *
gen_video_deinterlace_chain(GstPlaySink * playsink)1499 gen_video_deinterlace_chain (GstPlaySink * playsink)
1500 {
1501 GstPlayVideoDeinterlaceChain *chain;
1502 GstBin *bin;
1503 GstPad *pad;
1504 GstElement *head = NULL, *prev = NULL;
1505
1506 chain = g_new0 (GstPlayVideoDeinterlaceChain, 1);
1507 chain->chain.playsink = playsink;
1508
1509 GST_DEBUG_OBJECT (playsink, "making video deinterlace chain %p", chain);
1510
1511 /* create a bin to hold objects, as we create them we add them to this bin so
1512 * that when something goes wrong we only need to unref the bin */
1513 chain->chain.bin = gst_bin_new ("vdbin");
1514 bin = GST_BIN_CAST (chain->chain.bin);
1515 gst_object_ref_sink (bin);
1516
1517 GST_DEBUG_OBJECT (playsink, "creating " COLORSPACE);
1518 chain->conv = gst_element_factory_make (COLORSPACE, "vdconv");
1519 if (chain->conv == NULL) {
1520 post_missing_element_message (playsink, COLORSPACE);
1521 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
1522 (_("Missing element '%s' - check your GStreamer installation."),
1523 COLORSPACE), ("video rendering might fail"));
1524 } else {
1525 gst_bin_add (bin, chain->conv);
1526 head = chain->conv;
1527 prev = chain->conv;
1528 }
1529
1530 GST_DEBUG_OBJECT (playsink, "creating deinterlace");
1531 chain->deinterlace = gst_element_factory_make ("deinterlace", "deinterlace");
1532 if (chain->deinterlace == NULL) {
1533 post_missing_element_message (playsink, "deinterlace");
1534 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
1535 (_("Missing element '%s' - check your GStreamer installation."),
1536 "deinterlace"), ("deinterlacing won't work"));
1537 } else {
1538 gst_bin_add (bin, chain->deinterlace);
1539 if (prev) {
1540 if (!gst_element_link_pads_full (prev, "src", chain->deinterlace, "sink",
1541 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
1542 goto link_failed;
1543 } else {
1544 head = chain->deinterlace;
1545 }
1546 prev = chain->deinterlace;
1547 }
1548
1549 if (head) {
1550 pad = gst_element_get_static_pad (head, "sink");
1551 chain->sinkpad = gst_ghost_pad_new ("sink", pad);
1552 gst_object_unref (pad);
1553 } else {
1554 chain->sinkpad = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
1555 }
1556
1557 if (prev) {
1558 pad = gst_element_get_static_pad (prev, "src");
1559 chain->srcpad = gst_ghost_pad_new ("src", pad);
1560 gst_object_unref (pad);
1561 } else {
1562 chain->srcpad = gst_ghost_pad_new ("src", chain->sinkpad);
1563 }
1564
1565 gst_element_add_pad (chain->chain.bin, chain->sinkpad);
1566 gst_element_add_pad (chain->chain.bin, chain->srcpad);
1567
1568 return chain;
1569
1570 link_failed:
1571 {
1572 GST_ELEMENT_ERROR (playsink, CORE, PAD,
1573 (NULL), ("Failed to configure the video deinterlace chain."));
1574 free_chain ((GstPlayChain *) chain);
1575 return NULL;
1576 }
1577 }
1578
1579 static gboolean
is_valid_color_balance_element(GstColorBalance * bal)1580 is_valid_color_balance_element (GstColorBalance * bal)
1581 {
1582 gboolean have_brightness = FALSE;
1583 gboolean have_contrast = FALSE;
1584 gboolean have_hue = FALSE;
1585 gboolean have_saturation = FALSE;
1586 const GList *channels, *l;
1587
1588 channels = gst_color_balance_list_channels (bal);
1589 for (l = channels; l; l = l->next) {
1590 GstColorBalanceChannel *ch = l->data;
1591
1592 if (g_strrstr (ch->label, "BRIGHTNESS"))
1593 have_brightness = TRUE;
1594 else if (g_strrstr (ch->label, "CONTRAST"))
1595 have_contrast = TRUE;
1596 else if (g_strrstr (ch->label, "HUE"))
1597 have_hue = TRUE;
1598 else if (g_strrstr (ch->label, "SATURATION"))
1599 have_saturation = TRUE;
1600 }
1601
1602 return have_brightness && have_contrast && have_hue && have_saturation;
1603 }
1604
1605 static void
iterate_color_balance_elements(const GValue * item,gpointer user_data)1606 iterate_color_balance_elements (const GValue * item, gpointer user_data)
1607 {
1608 gboolean valid;
1609 GstColorBalance *cb, **cb_out = user_data;
1610
1611 cb = GST_COLOR_BALANCE (g_value_get_object (item));
1612 valid = is_valid_color_balance_element (cb);
1613 if (valid) {
1614 if (*cb_out
1615 && gst_color_balance_get_balance_type (*cb_out) ==
1616 GST_COLOR_BALANCE_SOFTWARE) {
1617 gst_object_unref (*cb_out);
1618 *cb_out = GST_COLOR_BALANCE (gst_object_ref (cb));
1619 } else if (!*cb_out) {
1620 *cb_out = GST_COLOR_BALANCE (gst_object_ref (cb));
1621 }
1622 }
1623 }
1624
1625 static GstColorBalance *
find_color_balance_element(GstElement * element)1626 find_color_balance_element (GstElement * element)
1627 {
1628 GstIterator *it;
1629 GstColorBalance *cb = NULL;
1630
1631 if (GST_IS_COLOR_BALANCE (element)
1632 && is_valid_color_balance_element (GST_COLOR_BALANCE (element)))
1633 return GST_COLOR_BALANCE (gst_object_ref (element));
1634 else if (!GST_IS_BIN (element))
1635 return FALSE;
1636
1637 it = gst_bin_iterate_all_by_interface (GST_BIN (element),
1638 GST_TYPE_COLOR_BALANCE);
1639 while (gst_iterator_foreach (it, iterate_color_balance_elements,
1640 &cb) == GST_ITERATOR_RESYNC)
1641 gst_iterator_resync (it);
1642 gst_iterator_free (it);
1643
1644 return cb;
1645 }
1646
1647 static void
colorbalance_value_changed_cb(GstColorBalance * balance,GstColorBalanceChannel * channel,gint value,GstPlaySink * playsink)1648 colorbalance_value_changed_cb (GstColorBalance * balance,
1649 GstColorBalanceChannel * channel, gint value, GstPlaySink * playsink)
1650 {
1651 GList *l;
1652 gint i;
1653
1654 for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
1655 GstColorBalanceChannel *proxy = l->data;
1656
1657 if (g_strrstr (channel->label, proxy->label)) {
1658 gdouble new_val;
1659
1660 /* Convert to [0, 1] range */
1661 new_val =
1662 ((gdouble) value -
1663 (gdouble) channel->min_value) / ((gdouble) channel->max_value -
1664 (gdouble) channel->min_value);
1665 /* Convert to proxy range */
1666 new_val =
1667 proxy->min_value + new_val * ((gdouble) proxy->max_value -
1668 (gdouble) proxy->min_value);
1669 playsink->colorbalance_values[i] = (gint) (0.5 + new_val);
1670
1671 gst_color_balance_value_changed (GST_COLOR_BALANCE (playsink), proxy,
1672 playsink->colorbalance_values[i]);
1673 break;
1674 }
1675 }
1676 }
1677
1678 static void
update_colorbalance(GstPlaySink * playsink)1679 update_colorbalance (GstPlaySink * playsink)
1680 {
1681 GstColorBalance *balance = NULL;
1682 GList *l;
1683 gint i;
1684
1685 GST_OBJECT_LOCK (playsink);
1686 if (playsink->colorbalance_element) {
1687 balance =
1688 GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element));
1689 }
1690 GST_OBJECT_UNLOCK (playsink);
1691 if (!balance)
1692 return;
1693
1694 g_signal_handler_block (balance, playsink->colorbalance_value_changed_id);
1695
1696 for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
1697 GstColorBalanceChannel *proxy = l->data;
1698 GstColorBalanceChannel *channel = NULL;
1699 const GList *channels, *k;
1700 gdouble new_val;
1701
1702 channels = gst_color_balance_list_channels (balance);
1703 for (k = channels; k; k = k->next) {
1704 GstColorBalanceChannel *tmp = k->data;
1705
1706 if (g_strrstr (tmp->label, proxy->label)) {
1707 channel = tmp;
1708 break;
1709 }
1710 }
1711
1712 g_assert (channel);
1713
1714 /* Convert to [0, 1] range */
1715 new_val =
1716 ((gdouble) playsink->colorbalance_values[i] -
1717 (gdouble) proxy->min_value) / ((gdouble) proxy->max_value -
1718 (gdouble) proxy->min_value);
1719 /* Convert to channel range */
1720 new_val =
1721 channel->min_value + new_val * ((gdouble) channel->max_value -
1722 (gdouble) channel->min_value);
1723
1724 gst_color_balance_set_value (balance, channel, (gint) (new_val + 0.5));
1725 }
1726
1727 g_signal_handler_unblock (balance, playsink->colorbalance_value_changed_id);
1728
1729 gst_object_unref (balance);
1730 }
1731
1732 /* make the element (bin) that contains the elements needed to perform
1733 * video display.
1734 *
1735 * +------------------------------------------------------------------------+
1736 * | vbin |
1737 * | +--------+ +-------+ +----------+ +----------+ +---------+ |
1738 * | | filter | | queue | |colorspace| |videoscale| |videosink| |
1739 * | +-sink src-sink src-sink src-sink src-sink | |
1740 * | | +--------+ +-------+ +----------+ +----------+ +---------+ |
1741 * sink-+ |
1742 * +------------------------------------------------------------------------+
1743 *
1744 */
1745 static GstPlayVideoChain *
gen_video_chain(GstPlaySink * playsink,gboolean raw,gboolean async)1746 gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async)
1747 {
1748 GstPlayVideoChain *chain;
1749 GstBin *bin;
1750 GstPad *pad;
1751 GstElement *head = NULL, *prev = NULL, *elem = NULL;
1752
1753 chain = g_new0 (GstPlayVideoChain, 1);
1754 chain->chain.playsink = playsink;
1755 chain->chain.raw = raw;
1756
1757 GST_DEBUG_OBJECT (playsink, "making video chain %p", chain);
1758
1759 if (playsink->video_sink) {
1760 GST_DEBUG_OBJECT (playsink, "trying configured videosink");
1761 chain->sink = try_element (playsink, playsink->video_sink, FALSE);
1762 } else {
1763 /* only try fallback if no specific sink was chosen */
1764 if (chain->sink == NULL) {
1765 GST_DEBUG_OBJECT (playsink, "trying autovideosink");
1766 elem = gst_element_factory_make ("autovideosink", "videosink");
1767 chain->sink = try_element (playsink, elem, TRUE);
1768 }
1769 if (chain->sink == NULL) {
1770 /* if default sink from config.h is different then try it too */
1771 if (strcmp (DEFAULT_VIDEOSINK, "autovideosink")) {
1772 GST_DEBUG_OBJECT (playsink, "trying " DEFAULT_VIDEOSINK);
1773 elem = gst_element_factory_make (DEFAULT_VIDEOSINK, "videosink");
1774 chain->sink = try_element (playsink, elem, TRUE);
1775 }
1776 }
1777 if (chain->sink)
1778 playsink->video_sink = gst_object_ref (chain->sink);
1779 }
1780 if (chain->sink == NULL)
1781 goto no_sinks;
1782 head = chain->sink;
1783
1784 /* if we can disable async behaviour of the sink, we can avoid adding a
1785 * queue for the audio chain. */
1786 elem =
1787 gst_play_sink_find_property_sinks (playsink, chain->sink, "async",
1788 G_TYPE_BOOLEAN);
1789 if (elem) {
1790 GST_DEBUG_OBJECT (playsink, "setting async property to %d on element %s",
1791 async, GST_ELEMENT_NAME (elem));
1792 g_object_set (elem, "async", async, NULL);
1793 chain->async = async;
1794 } else {
1795 GST_DEBUG_OBJECT (playsink, "no async property on the sink");
1796 chain->async = TRUE;
1797 }
1798
1799 /* Make sure the aspect ratio is kept */
1800 elem =
1801 gst_play_sink_find_property_sinks (playsink, chain->sink,
1802 "force-aspect-ratio", G_TYPE_BOOLEAN);
1803 if (elem)
1804 g_object_set (elem, "force-aspect-ratio", playsink->force_aspect_ratio,
1805 NULL);
1806
1807 /* find ts-offset element */
1808 gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *)
1809 gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset",
1810 G_TYPE_INT64));
1811
1812 /* create a bin to hold objects, as we create them we add them to this bin so
1813 * that when something goes wrong we only need to unref the bin */
1814 chain->chain.bin = gst_bin_new ("vbin");
1815 bin = GST_BIN_CAST (chain->chain.bin);
1816 gst_object_ref_sink (bin);
1817 gst_bin_add (bin, chain->sink);
1818
1819 /* Get the VideoOverlay element */
1820 {
1821 GstVideoOverlay *overlay = NULL;
1822
1823 GST_OBJECT_LOCK (playsink);
1824 if (playsink->overlay_element)
1825 gst_object_unref (playsink->overlay_element);
1826 playsink->overlay_element =
1827 GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (chain->chain.bin),
1828 GST_TYPE_VIDEO_OVERLAY));
1829 if (playsink->overlay_element)
1830 overlay = GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
1831 GST_OBJECT_UNLOCK (playsink);
1832
1833 if (overlay) {
1834 if (playsink->overlay_handle_set)
1835 gst_video_overlay_set_window_handle (overlay, playsink->overlay_handle);
1836 if (playsink->overlay_handle_events_set)
1837 gst_video_overlay_handle_events (overlay,
1838 playsink->overlay_handle_events);
1839 if (playsink->overlay_render_rectangle_set)
1840 gst_video_overlay_set_render_rectangle (overlay,
1841 playsink->overlay_x, playsink->overlay_y,
1842 playsink->overlay_width, playsink->overlay_height);
1843 gst_object_unref (overlay);
1844 }
1845 }
1846
1847 head = chain->sink;
1848 prev = NULL;
1849
1850 /* add the video filter first, so everything is working with post-filter
1851 * samples */
1852 chain->filter = gst_play_sink_get_filter (playsink,
1853 GST_PLAY_SINK_TYPE_VIDEO_RAW);
1854 if (chain->filter) {
1855 if (!raw) {
1856 gst_object_unref (chain->filter);
1857 chain->filter = NULL;
1858
1859 if (playsink->flags & GST_PLAY_FLAG_FORCE_FILTERS) {
1860 goto filter_with_nonraw;
1861 } else {
1862 GST_DEBUG_OBJECT (playsink,
1863 "skipping video filter since we're not raw");
1864 }
1865 } else {
1866 GST_DEBUG_OBJECT (playsink, "adding video filter");
1867 chain->filter_conv =
1868 gst_element_factory_make ("videoconvert", "filter-convert");
1869 if (!chain->filter_conv) {
1870 post_missing_element_message (playsink, "videoconvert");
1871 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
1872 (_("Missing element '%s' - check your GStreamer installation."),
1873 "videoconvert"),
1874 ("video playback and visualizations might not work"));
1875 } else {
1876 gst_bin_add (bin, chain->filter_conv);
1877 head = prev = chain->filter_conv;
1878 }
1879
1880 gst_bin_add (bin, chain->filter);
1881 /* Bin takes a new reference because we sinked any
1882 * floating reference ourselves already */
1883 gst_object_unref (chain->filter);
1884 if (prev) {
1885 if (!gst_element_link_pads_full (prev, "src", chain->filter, NULL,
1886 GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) {
1887 goto link_failed;
1888 }
1889 } else {
1890 head = chain->filter;
1891 }
1892 prev = chain->filter;
1893 }
1894 }
1895
1896 /* decouple decoder from sink, this improves playback quite a lot since the
1897 * decoder can continue while the sink blocks for synchronisation. We don't
1898 * need a lot of buffers as this consumes a lot of memory and we don't want
1899 * too little because else we would be context switching too quickly. */
1900 chain->queue = gst_element_factory_make ("queue", "vqueue");
1901 if (chain->queue == NULL) {
1902 post_missing_element_message (playsink, "queue");
1903 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
1904 (_("Missing element '%s' - check your GStreamer installation."),
1905 "queue"), ("video rendering might be suboptimal"));
1906 } else {
1907 g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3,
1908 "max-size-bytes", 0, "max-size-time", (gint64) 0, "silent", TRUE, NULL);
1909 gst_bin_add (bin, chain->queue);
1910 if (prev) {
1911 if (!gst_element_link_pads_full (prev, "src", chain->queue, "sink",
1912 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
1913 goto link_failed;
1914 } else {
1915 head = chain->queue;
1916 }
1917 prev = chain->queue;
1918 }
1919
1920 GST_OBJECT_LOCK (playsink);
1921 if (playsink->colorbalance_element) {
1922 g_signal_handler_disconnect (playsink->colorbalance_element,
1923 playsink->colorbalance_value_changed_id);
1924 gst_object_unref (playsink->colorbalance_element);
1925 playsink->colorbalance_value_changed_id = 0;
1926 }
1927 playsink->colorbalance_element = find_color_balance_element (chain->sink);
1928 if (playsink->colorbalance_element) {
1929 playsink->colorbalance_value_changed_id =
1930 g_signal_connect (playsink->colorbalance_element, "value-changed",
1931 G_CALLBACK (colorbalance_value_changed_cb), playsink);
1932 }
1933 GST_OBJECT_UNLOCK (playsink);
1934
1935 if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)
1936 || (!playsink->colorbalance_element
1937 && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE))) {
1938 gboolean use_converters = !(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO);
1939 gboolean use_balance = !playsink->colorbalance_element
1940 && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE);
1941
1942 GST_DEBUG_OBJECT (playsink, "creating videoconverter");
1943 chain->conv =
1944 g_object_new (GST_TYPE_PLAY_SINK_VIDEO_CONVERT, "name", "vconv",
1945 "use-converters", use_converters, "use-balance", use_balance, NULL);
1946
1947 GST_OBJECT_LOCK (playsink);
1948 if (use_balance && GST_PLAY_SINK_VIDEO_CONVERT (chain->conv)->balance) {
1949 playsink->colorbalance_element =
1950 GST_COLOR_BALANCE (gst_object_ref (GST_PLAY_SINK_VIDEO_CONVERT
1951 (chain->conv)->balance));
1952 playsink->colorbalance_value_changed_id =
1953 g_signal_connect (playsink->colorbalance_element, "value-changed",
1954 G_CALLBACK (colorbalance_value_changed_cb), playsink);
1955 }
1956 GST_OBJECT_UNLOCK (playsink);
1957
1958 gst_bin_add (bin, chain->conv);
1959 if (prev) {
1960 if (!gst_element_link_pads_full (prev, "src", chain->conv, "sink",
1961 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
1962 goto link_failed;
1963 } else {
1964 head = chain->conv;
1965 }
1966 prev = chain->conv;
1967 }
1968
1969 update_colorbalance (playsink);
1970
1971 if (prev) {
1972 GST_DEBUG_OBJECT (playsink, "linking to sink");
1973 if (!gst_element_link_pads_full (prev, "src", chain->sink, NULL,
1974 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
1975 goto link_failed;
1976 }
1977
1978 pad = gst_element_get_static_pad (head, "sink");
1979 chain->sinkpad = gst_ghost_pad_new ("sink", pad);
1980
1981 /* sending audio/video flushes break stream changes when the pipeline
1982 * is paused and played again in 0.10 */
1983 #if 0
1984 gst_pad_set_event_function (chain->sinkpad,
1985 GST_DEBUG_FUNCPTR (gst_play_sink_video_sink_event));
1986 gst_pad_set_chain_function (chain->sinkpad,
1987 GST_DEBUG_FUNCPTR (gst_play_sink_video_sink_chain));
1988 #endif
1989
1990 gst_object_unref (pad);
1991 gst_element_add_pad (chain->chain.bin, chain->sinkpad);
1992
1993 return chain;
1994
1995 /* ERRORS */
1996 no_sinks:
1997 {
1998 if (!elem && !playsink->video_sink) {
1999 post_missing_element_message (playsink, "autovideosink");
2000 if (strcmp (DEFAULT_VIDEOSINK, "autovideosink")) {
2001 post_missing_element_message (playsink, DEFAULT_VIDEOSINK);
2002 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
2003 (_("Both autovideosink and %s elements are missing."),
2004 DEFAULT_VIDEOSINK), (NULL));
2005 } else {
2006 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
2007 (_("The autovideosink element is missing.")), (NULL));
2008 }
2009 } else {
2010 if (playsink->video_sink) {
2011 GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE,
2012 (_("Configured videosink %s is not working."),
2013 GST_ELEMENT_NAME (playsink->video_sink)), (NULL));
2014 } else if (strcmp (DEFAULT_VIDEOSINK, "autovideosink")) {
2015 GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE,
2016 (_("Both autovideosink and %s elements are not working."),
2017 DEFAULT_VIDEOSINK), (NULL));
2018 } else {
2019 GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE,
2020 (_("The autovideosink element is not working.")), (NULL));
2021 }
2022 }
2023 free_chain ((GstPlayChain *) chain);
2024 return NULL;
2025 }
2026
2027 link_failed:
2028 {
2029 GST_ELEMENT_ERROR (playsink, CORE, PAD,
2030 (NULL), ("Failed to configure the video sink."));
2031 goto cleanup;
2032 }
2033 filter_with_nonraw:
2034 {
2035 GST_ELEMENT_ERROR (playsink, CORE, NEGOTIATION,
2036 (NULL), ("Cannot apply video-filter on non-raw stream"));
2037 goto cleanup;
2038 }
2039 cleanup:
2040 /* checking sink made it READY */
2041 gst_element_set_state (chain->sink, GST_STATE_NULL);
2042 /* Remove chain from the bin to allow reuse later */
2043 gst_bin_remove (bin, chain->sink);
2044 free_chain ((GstPlayChain *) chain);
2045 return NULL;
2046 }
2047
2048 static gboolean
setup_video_chain(GstPlaySink * playsink,gboolean raw,gboolean async)2049 setup_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async)
2050 {
2051 GstElement *elem;
2052 GstPlayVideoChain *chain;
2053 GstStateChangeReturn ret;
2054
2055 chain = playsink->videochain;
2056
2057 /* if we have a filter, and raw-ness changed, we have to force a rebuild */
2058 if (chain->filter && chain->chain.raw != raw)
2059 return FALSE;
2060
2061 chain->chain.raw = raw;
2062
2063 /* if the chain was active we don't do anything */
2064 if (GST_PLAY_CHAIN (chain)->activated)
2065 return TRUE;
2066
2067 /* try to set the sink element to READY again */
2068 ret = gst_element_set_state (chain->sink, GST_STATE_READY);
2069 if (ret == GST_STATE_CHANGE_FAILURE)
2070 return FALSE;
2071
2072 /* Get the VideoOverlay element */
2073 {
2074 GstVideoOverlay *overlay = NULL;
2075
2076 GST_OBJECT_LOCK (playsink);
2077 if (playsink->overlay_element)
2078 gst_object_unref (playsink->overlay_element);
2079 playsink->overlay_element =
2080 GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (chain->chain.bin),
2081 GST_TYPE_VIDEO_OVERLAY));
2082 if (playsink->overlay_element)
2083 overlay = GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
2084 GST_OBJECT_UNLOCK (playsink);
2085
2086 if (overlay) {
2087 if (playsink->overlay_handle_set)
2088 gst_video_overlay_set_window_handle (overlay, playsink->overlay_handle);
2089 if (playsink->overlay_handle_events_set)
2090 gst_video_overlay_handle_events (overlay,
2091 playsink->overlay_handle_events);
2092 if (playsink->overlay_render_rectangle_set)
2093 gst_video_overlay_set_render_rectangle (overlay,
2094 playsink->overlay_x, playsink->overlay_y,
2095 playsink->overlay_width, playsink->overlay_height);
2096 gst_object_unref (overlay);
2097 }
2098 }
2099
2100 /* find ts-offset element */
2101 gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *)
2102 gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset",
2103 G_TYPE_INT64));
2104
2105 /* if we can disable async behaviour of the sink, we can avoid adding a
2106 * queue for the audio chain. */
2107 elem =
2108 gst_play_sink_find_property_sinks (playsink, chain->sink, "async",
2109 G_TYPE_BOOLEAN);
2110 if (elem) {
2111 GST_DEBUG_OBJECT (playsink, "setting async property to %d on element %s",
2112 async, GST_ELEMENT_NAME (elem));
2113 g_object_set (elem, "async", async, NULL);
2114 chain->async = async;
2115 } else {
2116 GST_DEBUG_OBJECT (playsink, "no async property on the sink");
2117 chain->async = TRUE;
2118 }
2119
2120 /* Make sure the aspect ratio is kept */
2121 elem =
2122 gst_play_sink_find_property_sinks (playsink, chain->sink,
2123 "force-aspect-ratio", G_TYPE_BOOLEAN);
2124 if (elem)
2125 g_object_set (elem, "force-aspect-ratio", playsink->force_aspect_ratio,
2126 NULL);
2127
2128 GST_OBJECT_LOCK (playsink);
2129 if (playsink->colorbalance_element) {
2130 g_signal_handler_disconnect (playsink->colorbalance_element,
2131 playsink->colorbalance_value_changed_id);
2132 playsink->colorbalance_value_changed_id = 0;
2133 gst_object_unref (playsink->colorbalance_element);
2134 }
2135 playsink->colorbalance_element = find_color_balance_element (chain->sink);
2136 if (playsink->colorbalance_element) {
2137 playsink->colorbalance_value_changed_id =
2138 g_signal_connect (playsink->colorbalance_element, "value-changed",
2139 G_CALLBACK (colorbalance_value_changed_cb), playsink);
2140 }
2141 GST_OBJECT_UNLOCK (playsink);
2142
2143 if (chain->conv) {
2144 gboolean use_balance = !playsink->colorbalance_element
2145 && (playsink->flags & GST_PLAY_FLAG_SOFT_COLORBALANCE);
2146
2147 g_object_set (chain->conv, "use-balance", use_balance, NULL);
2148
2149 GST_OBJECT_LOCK (playsink);
2150 if (use_balance && GST_PLAY_SINK_VIDEO_CONVERT (chain->conv)->balance) {
2151 playsink->colorbalance_element =
2152 GST_COLOR_BALANCE (gst_object_ref (GST_PLAY_SINK_VIDEO_CONVERT
2153 (chain->conv)->balance));
2154 playsink->colorbalance_value_changed_id =
2155 g_signal_connect (playsink->colorbalance_element, "value-changed",
2156 G_CALLBACK (colorbalance_value_changed_cb), playsink);
2157 }
2158 GST_OBJECT_UNLOCK (playsink);
2159 }
2160
2161 update_colorbalance (playsink);
2162
2163 return TRUE;
2164 }
2165
2166 static gboolean
gst_play_sink_sink_event(GstPad * pad,GstObject * parent,GstEvent * event,const gchar * sink_type,gboolean * sink_ignore_wrong_state,gboolean * sink_custom_flush_finished,gboolean * sink_pending_flush)2167 gst_play_sink_sink_event (GstPad * pad, GstObject * parent, GstEvent * event,
2168 const gchar * sink_type,
2169 gboolean * sink_ignore_wrong_state,
2170 gboolean * sink_custom_flush_finished, gboolean * sink_pending_flush)
2171 {
2172 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent));
2173 gboolean ret;
2174 const GstStructure *structure = gst_event_get_structure (event);
2175
2176 if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_DOWNSTREAM_OOB && structure) {
2177 gchar *custom_flush;
2178 gchar *custom_flush_finish;
2179
2180 custom_flush = g_strdup_printf ("playsink-custom-%s-flush", sink_type);
2181 custom_flush_finish =
2182 g_strdup_printf ("playsink-custom-%s-flush-finish", sink_type);
2183 if (strcmp (gst_structure_get_name (structure), custom_flush) == 0) {
2184 GST_DEBUG_OBJECT (pad,
2185 "Custom %s flush event received, marking to flush %s", sink_type,
2186 sink_type);
2187 GST_PLAY_SINK_LOCK (playsink);
2188 *sink_ignore_wrong_state = TRUE;
2189 *sink_custom_flush_finished = FALSE;
2190 GST_PLAY_SINK_UNLOCK (playsink);
2191 } else if (strcmp (gst_structure_get_name (structure),
2192 custom_flush_finish) == 0) {
2193 GST_DEBUG_OBJECT (pad, "Custom %s flush finish event received",
2194 sink_type);
2195 GST_PLAY_SINK_LOCK (playsink);
2196 *sink_pending_flush = TRUE;
2197 *sink_custom_flush_finished = TRUE;
2198 GST_PLAY_SINK_UNLOCK (playsink);
2199 }
2200
2201 g_free (custom_flush);
2202 g_free (custom_flush_finish);
2203 }
2204
2205 GST_DEBUG_OBJECT (pad, "Forwarding event %" GST_PTR_FORMAT, event);
2206 ret = gst_pad_event_default (pad, parent, gst_event_ref (event));
2207
2208 gst_event_unref (event);
2209 gst_object_unref (playsink);
2210 return ret;
2211 }
2212
2213 static GstFlowReturn
gst_play_sink_sink_chain(GstPad * pad,GstObject * parent,GstBuffer * buffer,const gchar * sink_type,gboolean * sink_ignore_wrong_state,gboolean * sink_custom_flush_finished,gboolean * sink_pending_flush)2214 gst_play_sink_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer,
2215 const gchar * sink_type,
2216 gboolean * sink_ignore_wrong_state,
2217 gboolean * sink_custom_flush_finished, gboolean * sink_pending_flush)
2218 {
2219 GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
2220 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
2221 GstFlowReturn ret;
2222
2223 GST_PLAY_SINK_LOCK (playsink);
2224
2225 if (*sink_pending_flush) {
2226 GstEvent *segment_event;
2227 GstEvent *event;
2228 GstStructure *structure;
2229
2230 *sink_pending_flush = FALSE;
2231
2232 GST_PLAY_SINK_UNLOCK (playsink);
2233
2234 segment_event = gst_pad_get_sticky_event (pad, GST_EVENT_SEGMENT, 0);
2235
2236 /* make the bin drop all cached data.
2237 * This event will be dropped on the src pad, if any. */
2238 event = gst_event_new_flush_start ();
2239 if (segment_event)
2240 gst_event_set_seqnum (event, gst_event_get_seqnum (segment_event));
2241 structure = gst_event_writable_structure (event);
2242 gst_structure_id_set (structure,
2243 _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL);
2244
2245 GST_DEBUG_OBJECT (pad,
2246 "Pushing %s flush-start event with reset segment marker set: %"
2247 GST_PTR_FORMAT, sink_type, event);
2248 gst_pad_send_event (pad, event);
2249
2250 /* make queue drop all cached data.
2251 * This event will be dropped on the src pad. */
2252 event = gst_event_new_flush_stop (TRUE);
2253 if (segment_event)
2254 gst_event_set_seqnum (event, gst_event_get_seqnum (segment_event));
2255 structure = gst_event_writable_structure (event);
2256 gst_structure_id_set (structure,
2257 _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL);
2258
2259 GST_DEBUG_OBJECT (pad,
2260 "Pushing %s flush-stop event with reset segment marker set: %"
2261 GST_PTR_FORMAT, sink_type, event);
2262 gst_pad_send_event (pad, event);
2263
2264 /* Re-sync queue segment info after flush-stop.
2265 * This event will be dropped on the src pad. */
2266 if (segment_event) {
2267 event = gst_event_copy (segment_event);
2268 structure = gst_event_writable_structure (event);
2269 gst_structure_id_set (structure,
2270 _playsink_reset_segment_event_marker_id, G_TYPE_BOOLEAN, TRUE, NULL);
2271
2272 GST_DEBUG_OBJECT (playsink,
2273 "Pushing segment event with reset "
2274 "segment marker set: %" GST_PTR_FORMAT, event);
2275 gst_pad_send_event (pad, event);
2276 gst_event_unref (segment_event);
2277 }
2278 } else {
2279 GST_PLAY_SINK_UNLOCK (playsink);
2280 }
2281
2282 ret = gst_proxy_pad_chain_default (pad, parent, buffer);
2283
2284 GST_PLAY_SINK_LOCK (playsink);
2285 if (ret == GST_FLOW_FLUSHING && *sink_ignore_wrong_state) {
2286 GST_DEBUG_OBJECT (pad, "Ignoring wrong state for %s during flush",
2287 sink_type);
2288 if (*sink_custom_flush_finished) {
2289 GST_DEBUG_OBJECT (pad, "Custom flush finished, stop ignoring "
2290 "wrong state for %s", sink_type);
2291 *sink_ignore_wrong_state = FALSE;
2292 }
2293
2294 ret = GST_FLOW_OK;
2295 }
2296 GST_PLAY_SINK_UNLOCK (playsink);
2297
2298 gst_object_unref (playsink);
2299 gst_object_unref (tbin);
2300 return ret;
2301 }
2302
2303 /* sending audio/video flushes break stream changes when the pipeline
2304 * is paused and played again in 0.10 */
2305 #if 0
2306 static gboolean
2307 gst_play_sink_video_sink_event (GstPad * pad, GstEvent * event)
2308 {
2309 GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
2310 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
2311 gboolean ret;
2312
2313 ret = gst_play_sink_sink_event (pad, event, "video",
2314 &playsink->video_ignore_wrong_state,
2315 &playsink->video_custom_flush_finished,
2316 &playsink->video_pending_flush, &playsink->video_segment);
2317
2318 gst_object_unref (playsink);
2319 gst_object_unref (tbin);
2320 return ret;
2321 }
2322
2323 static GstFlowReturn
2324 gst_play_sink_video_sink_chain (GstPad * pad, GstBuffer * buffer)
2325 {
2326 GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
2327 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
2328 gboolean ret;
2329
2330 ret = gst_play_sink_sink_chain (pad, buffer, "video",
2331 &playsink->video_ignore_wrong_state,
2332 &playsink->video_custom_flush_finished,
2333 &playsink->video_pending_flush, &playsink->video_segment);
2334
2335 gst_object_unref (playsink);
2336 gst_object_unref (tbin);
2337 return ret;
2338 }
2339
2340 static gboolean
2341 gst_play_sink_audio_sink_event (GstPad * pad, GstEvent * event)
2342 {
2343 GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
2344 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
2345 gboolean ret;
2346
2347 ret = gst_play_sink_sink_event (pad, event, "audio",
2348 &playsink->audio_ignore_wrong_state,
2349 &playsink->audio_custom_flush_finished,
2350 &playsink->audio_pending_flush, &playsink->audio_segment);
2351
2352 gst_object_unref (playsink);
2353 gst_object_unref (tbin);
2354 return ret;
2355 }
2356
2357 static GstFlowReturn
2358 gst_play_sink_audio_sink_chain (GstPad * pad, GstBuffer * buffer)
2359 {
2360 GstBin *tbin = GST_BIN_CAST (gst_pad_get_parent (pad));
2361 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_pad_get_parent (tbin));
2362 gboolean ret;
2363
2364 ret = gst_play_sink_sink_chain (pad, buffer, "audio",
2365 &playsink->audio_ignore_wrong_state,
2366 &playsink->audio_custom_flush_finished,
2367 &playsink->audio_pending_flush, &playsink->audio_segment);
2368
2369 gst_object_unref (playsink);
2370 gst_object_unref (tbin);
2371 return ret;
2372 }
2373 #endif
2374
2375 static gboolean
gst_play_sink_text_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)2376 gst_play_sink_text_sink_event (GstPad * pad, GstObject * parent,
2377 GstEvent * event)
2378 {
2379 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent));
2380 gboolean ret;
2381
2382 ret = gst_play_sink_sink_event (pad, parent, event, "subtitle",
2383 &playsink->text_ignore_wrong_state,
2384 &playsink->text_custom_flush_finished, &playsink->text_pending_flush);
2385
2386 gst_object_unref (playsink);
2387
2388 return ret;
2389 }
2390
2391 static GstFlowReturn
gst_play_sink_text_sink_chain(GstPad * pad,GstObject * parent,GstBuffer * buffer)2392 gst_play_sink_text_sink_chain (GstPad * pad, GstObject * parent,
2393 GstBuffer * buffer)
2394 {
2395 gboolean ret;
2396 GstPlaySink *playsink = GST_PLAY_SINK_CAST (gst_object_get_parent (parent));
2397
2398 ret = gst_play_sink_sink_chain (pad, parent, buffer, "subtitle",
2399 &playsink->text_ignore_wrong_state,
2400 &playsink->text_custom_flush_finished, &playsink->text_pending_flush);
2401
2402 gst_object_unref (playsink);
2403 return ret;
2404 }
2405
2406 static gboolean
gst_play_sink_text_src_event(GstPad * pad,GstObject * parent,GstEvent * event)2407 gst_play_sink_text_src_event (GstPad * pad, GstObject * parent,
2408 GstEvent * event)
2409 {
2410 gboolean ret;
2411 const GstStructure *structure;
2412
2413 GST_DEBUG_OBJECT (pad, "Got event %" GST_PTR_FORMAT, event);
2414
2415 structure = gst_event_get_structure (event);
2416
2417 if (structure &&
2418 gst_structure_id_has_field (structure,
2419 _playsink_reset_segment_event_marker_id)) {
2420 /* the events marked with a reset segment marker
2421 * are sent internally to reset the queue and
2422 * must be dropped here */
2423 GST_DEBUG_OBJECT (pad, "Dropping event with reset "
2424 "segment marker set: %" GST_PTR_FORMAT, event);
2425 ret = TRUE;
2426 goto out;
2427 }
2428
2429 ret = gst_pad_event_default (pad, parent, gst_event_ref (event));
2430
2431 out:
2432 gst_event_unref (event);
2433 return ret;
2434 }
2435
2436 /* make an element for playback of video with subtitles embedded.
2437 * Only used for *raw* video streams.
2438 *
2439 * +--------------------------------------------+
2440 * | tbin |
2441 * | +--------+ +-----------------+ |
2442 * | | queue | | subtitleoverlay | |
2443 * video--src sink---video_sink | |
2444 * | +--------+ | src--src
2445 * text------------------text_sink | |
2446 * | +-----------------+ |
2447 * +--------------------------------------------+
2448 *
2449 */
2450 static GstPlayTextChain *
gen_text_chain(GstPlaySink * playsink)2451 gen_text_chain (GstPlaySink * playsink)
2452 {
2453 GstPlayTextChain *chain;
2454 GstBin *bin;
2455 GstElement *elem;
2456 GstPad *videosinkpad, *textsinkpad, *srcpad;
2457
2458 chain = g_new0 (GstPlayTextChain, 1);
2459 chain->chain.playsink = playsink;
2460
2461 GST_DEBUG_OBJECT (playsink, "making text chain %p", chain);
2462
2463 chain->chain.bin = gst_bin_new ("tbin");
2464 bin = GST_BIN_CAST (chain->chain.bin);
2465 gst_object_ref_sink (bin);
2466
2467 videosinkpad = textsinkpad = srcpad = NULL;
2468
2469 /* first try to hook the text pad to the custom sink */
2470 if (playsink->text_sink) {
2471 GST_DEBUG_OBJECT (playsink, "trying configured textsink");
2472 chain->sink = try_element (playsink, playsink->text_sink, FALSE);
2473 if (chain->sink) {
2474 elem =
2475 gst_play_sink_find_property_sinks (playsink, chain->sink, "async",
2476 G_TYPE_BOOLEAN);
2477 if (elem) {
2478 /* make sure the sparse subtitles don't participate in the preroll */
2479 g_object_set (elem, "async", FALSE, NULL);
2480 GST_DEBUG_OBJECT (playsink, "adding custom text sink");
2481 gst_bin_add (bin, chain->sink);
2482 /* NOTE streamsynchronizer needs streams decoupled */
2483 /* make a little queue */
2484 chain->queue = gst_element_factory_make ("queue", "subqueue");
2485 if (chain->queue == NULL) {
2486 post_missing_element_message (playsink, "queue");
2487 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2488 (_("Missing element '%s' - check your GStreamer installation."),
2489 "queue"), ("rendering might be suboptimal"));
2490 } else {
2491 g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3,
2492 "max-size-bytes", 0, "max-size-time", (gint64) GST_SECOND,
2493 "silent", TRUE, NULL);
2494 gst_bin_add (bin, chain->queue);
2495 }
2496 /* we have a custom sink, this will be our textsinkpad */
2497 if (gst_element_link_pads_full (chain->queue, "src", chain->sink,
2498 "sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) {
2499 /* we're all fine now and we can add the sink to the chain */
2500 GST_DEBUG_OBJECT (playsink, "using custom text sink");
2501 textsinkpad = gst_element_get_static_pad (chain->queue, "sink");
2502 } else {
2503 GST_WARNING_OBJECT (playsink,
2504 "can't find a sink pad on custom text sink");
2505 gst_bin_remove (bin, chain->sink);
2506 gst_bin_remove (bin, chain->queue);
2507 chain->sink = NULL;
2508 chain->queue = NULL;
2509 }
2510 /* try to set sync to true but it's no biggie when we can't */
2511 if (chain->sink && (elem =
2512 gst_play_sink_find_property_sinks (playsink, chain->sink,
2513 "sync", G_TYPE_BOOLEAN)))
2514 g_object_set (elem, "sync", TRUE, NULL);
2515
2516 if (!textsinkpad)
2517 gst_bin_remove (bin, chain->sink);
2518 } else {
2519 GST_WARNING_OBJECT (playsink,
2520 "can't find async property in custom text sink");
2521 }
2522 }
2523 if (textsinkpad == NULL) {
2524 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2525 (_("Custom text sink element is not usable.")),
2526 ("fallback to default subtitleoverlay"));
2527 }
2528 }
2529
2530 if (textsinkpad == NULL) {
2531 if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_VIDEO)) {
2532 /* make a little queue */
2533 chain->queue = gst_element_factory_make ("queue", "vqueue");
2534 if (chain->queue == NULL) {
2535 post_missing_element_message (playsink, "queue");
2536 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2537 (_("Missing element '%s' - check your GStreamer installation."),
2538 "queue"), ("video rendering might be suboptimal"));
2539 } else {
2540 g_object_set (G_OBJECT (chain->queue), "max-size-buffers", 3,
2541 "max-size-bytes", 0, "max-size-time", (gint64) 0,
2542 "silent", TRUE, NULL);
2543 gst_bin_add (bin, chain->queue);
2544 videosinkpad = gst_element_get_static_pad (chain->queue, "sink");
2545 }
2546
2547 chain->overlay =
2548 gst_element_factory_make ("subtitleoverlay", "suboverlay");
2549 if (chain->overlay == NULL) {
2550 post_missing_element_message (playsink, "subtitleoverlay");
2551 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2552 (_("Missing element '%s' - check your GStreamer installation."),
2553 "subtitleoverlay"), ("subtitle rendering disabled"));
2554 } else {
2555 GstElement *element;
2556
2557 gst_bin_add (bin, chain->overlay);
2558
2559 g_object_set (G_OBJECT (chain->overlay), "silent", FALSE, NULL);
2560 if (playsink->font_desc) {
2561 g_object_set (G_OBJECT (chain->overlay), "font-desc",
2562 playsink->font_desc, NULL);
2563 }
2564 if (playsink->subtitle_encoding) {
2565 g_object_set (G_OBJECT (chain->overlay), "subtitle-encoding",
2566 playsink->subtitle_encoding, NULL);
2567 }
2568
2569 gst_element_link_pads_full (chain->queue, "src", chain->overlay,
2570 "video_sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS);
2571
2572 /* make another little queue to decouple streams */
2573 element = gst_element_factory_make ("queue", "subqueue");
2574 if (element == NULL) {
2575 post_missing_element_message (playsink, "queue");
2576 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2577 (_("Missing element '%s' - check your GStreamer installation."),
2578 "queue"), ("rendering might be suboptimal"));
2579 } else {
2580 g_object_set (G_OBJECT (element), "max-size-buffers", 3,
2581 "max-size-bytes", 0, "max-size-time", (gint64) GST_SECOND,
2582 "silent", TRUE, NULL);
2583 gst_bin_add (bin, element);
2584 if (gst_element_link_pads_full (element, "src", chain->overlay,
2585 "subtitle_sink", GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) {
2586 textsinkpad = gst_element_get_static_pad (element, "sink");
2587 srcpad = gst_element_get_static_pad (chain->overlay, "src");
2588 } else {
2589 gst_bin_remove (bin, chain->sink);
2590 gst_bin_remove (bin, chain->overlay);
2591 chain->sink = NULL;
2592 chain->overlay = NULL;
2593 gst_object_unref (videosinkpad);
2594 videosinkpad = NULL;
2595 }
2596 }
2597 }
2598 }
2599 }
2600
2601 if (videosinkpad == NULL) {
2602 /* if we still don't have a videosink, we don't have an overlay. the only
2603 * thing we can do is insert an identity and ghost the src
2604 * and sink pads. */
2605 chain->identity = gst_element_factory_make ("identity", "tidentity");
2606 if (chain->identity == NULL) {
2607 post_missing_element_message (playsink, "identity");
2608 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
2609 (_("Missing element '%s' - check your GStreamer installation."),
2610 "identity"), (NULL));
2611 } else {
2612 g_object_set (chain->identity, "signal-handoffs", FALSE, NULL);
2613 g_object_set (chain->identity, "silent", TRUE, NULL);
2614 gst_bin_add (bin, chain->identity);
2615 srcpad = gst_element_get_static_pad (chain->identity, "src");
2616 videosinkpad = gst_element_get_static_pad (chain->identity, "sink");
2617 }
2618 }
2619
2620 /* expose the ghostpads */
2621 if (videosinkpad) {
2622 chain->videosinkpad = gst_ghost_pad_new ("sink", videosinkpad);
2623 gst_object_unref (videosinkpad);
2624 gst_element_add_pad (chain->chain.bin, chain->videosinkpad);
2625 }
2626 if (textsinkpad) {
2627 chain->textsinkpad = gst_ghost_pad_new ("text_sink", textsinkpad);
2628 gst_object_unref (textsinkpad);
2629
2630 gst_pad_set_event_function (chain->textsinkpad,
2631 GST_DEBUG_FUNCPTR (gst_play_sink_text_sink_event));
2632 gst_pad_set_chain_function (chain->textsinkpad,
2633 GST_DEBUG_FUNCPTR (gst_play_sink_text_sink_chain));
2634
2635 gst_element_add_pad (chain->chain.bin, chain->textsinkpad);
2636 }
2637 if (srcpad) {
2638 chain->srcpad = gst_ghost_pad_new ("src", srcpad);
2639 gst_object_unref (srcpad);
2640
2641 gst_pad_set_event_function (chain->srcpad,
2642 GST_DEBUG_FUNCPTR (gst_play_sink_text_src_event));
2643
2644 gst_element_add_pad (chain->chain.bin, chain->srcpad);
2645 }
2646
2647 return chain;
2648 }
2649
2650 static void
notify_volume_cb(GObject * object,GParamSpec * pspec,GstPlaySink * playsink)2651 notify_volume_cb (GObject * object, GParamSpec * pspec, GstPlaySink * playsink)
2652 {
2653 gdouble vol;
2654
2655 g_object_get (object, "volume", &vol, NULL);
2656 playsink->volume = vol;
2657
2658 g_object_notify (G_OBJECT (playsink), "volume");
2659 }
2660
2661 static void
notify_mute_cb(GObject * object,GParamSpec * pspec,GstPlaySink * playsink)2662 notify_mute_cb (GObject * object, GParamSpec * pspec, GstPlaySink * playsink)
2663 {
2664 gboolean mute;
2665
2666 g_object_get (object, "mute", &mute, NULL);
2667 playsink->mute = mute;
2668
2669 g_object_notify (G_OBJECT (playsink), "mute");
2670 }
2671
2672 /* make the chain that contains the elements needed to perform
2673 * audio playback.
2674 *
2675 * We add a tee as the first element so that we can link the visualisation chain
2676 * to it when requested.
2677 *
2678 * +--------------------------------------------------------------+
2679 * | abin |
2680 * | +----------+ +--------+ +---------+ +-----------+ |
2681 * | | filter | | queue | | convbin | | audiosink | |
2682 * | +-sink src-sink src-sink src-sink | |
2683 * | | +----------+ +--------+ +---------+ +-----------+ |
2684 * sink-+ |
2685 * +--------------------------------------------------------------+
2686 */
2687 static GstPlayAudioChain *
gen_audio_chain(GstPlaySink * playsink,gboolean raw)2688 gen_audio_chain (GstPlaySink * playsink, gboolean raw)
2689 {
2690 GstPlayAudioChain *chain;
2691 GstBin *bin;
2692 gboolean have_volume;
2693 GstPad *pad;
2694 GstElement *head, *prev, *elem = NULL;
2695
2696 chain = g_new0 (GstPlayAudioChain, 1);
2697 chain->chain.playsink = playsink;
2698 chain->chain.raw = raw;
2699
2700 GST_DEBUG_OBJECT (playsink, "making audio chain %p", chain);
2701
2702 if (playsink->audio_sink) {
2703 GST_DEBUG_OBJECT (playsink, "trying configured audiosink %" GST_PTR_FORMAT,
2704 playsink->audio_sink);
2705 chain->sink = try_element (playsink, playsink->audio_sink, FALSE);
2706 } else {
2707 /* only try fallback if no specific sink was chosen */
2708 if (chain->sink == NULL) {
2709 GST_DEBUG_OBJECT (playsink, "trying autoaudiosink");
2710 elem = gst_element_factory_make ("autoaudiosink", "audiosink");
2711 chain->sink = try_element (playsink, elem, TRUE);
2712 }
2713 if (chain->sink == NULL) {
2714 /* if default sink from config.h is different then try it too */
2715 if (strcmp (DEFAULT_AUDIOSINK, "autoaudiosink")) {
2716 GST_DEBUG_OBJECT (playsink, "trying " DEFAULT_AUDIOSINK);
2717 elem = gst_element_factory_make (DEFAULT_AUDIOSINK, "audiosink");
2718 chain->sink = try_element (playsink, elem, TRUE);
2719 }
2720 }
2721 if (chain->sink)
2722 playsink->audio_sink = gst_object_ref (chain->sink);
2723 }
2724 if (chain->sink == NULL)
2725 goto no_sinks;
2726
2727 chain->chain.bin = gst_bin_new ("abin");
2728 bin = GST_BIN_CAST (chain->chain.bin);
2729 gst_object_ref_sink (bin);
2730 gst_bin_add (bin, chain->sink);
2731
2732 head = chain->sink;
2733 prev = NULL;
2734
2735 /* add the audio filter first, so everything is working with post-filter
2736 * samples */
2737 chain->filter = gst_play_sink_get_filter (playsink,
2738 GST_PLAY_SINK_TYPE_AUDIO_RAW);
2739 if (chain->filter) {
2740 if (!raw) {
2741 gst_object_unref (chain->filter);
2742 chain->filter = NULL;
2743
2744 if (playsink->flags & GST_PLAY_FLAG_FORCE_FILTERS) {
2745 goto filter_with_nonraw;
2746 } else {
2747 GST_DEBUG_OBJECT (playsink,
2748 "skipping audio filter since we're not raw");
2749 }
2750 } else {
2751 GST_DEBUG_OBJECT (playsink, "adding audio filter");
2752 chain->filter_conv =
2753 gst_element_factory_make ("audioconvert", "filter-convert");
2754 if (!chain->filter_conv) {
2755 post_missing_element_message (playsink, "audioconvert");
2756 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2757 (_("Missing element '%s' - check your GStreamer installation."),
2758 "audioconvert"),
2759 ("audio playback and visualizations might not work"));
2760 } else {
2761 gst_bin_add (bin, chain->filter_conv);
2762 head = prev = chain->filter_conv;
2763 }
2764
2765 gst_bin_add (bin, chain->filter);
2766 /* Bin takes a new reference because we sinked any
2767 * floating reference ourselves already */
2768 gst_object_unref (chain->filter);
2769 if (prev) {
2770 if (!gst_element_link_pads_full (prev, "src", chain->filter, NULL,
2771 GST_PAD_LINK_CHECK_TEMPLATE_CAPS)) {
2772 goto link_failed;
2773 }
2774 } else {
2775 head = chain->filter;
2776 }
2777 prev = chain->filter;
2778 }
2779 }
2780
2781 /* we have to add a queue when we need to decouple for the video sink in
2782 * visualisations and for streamsynchronizer */
2783 GST_DEBUG_OBJECT (playsink, "adding audio queue");
2784 chain->queue = gst_element_factory_make ("queue", "aqueue");
2785 if (chain->queue == NULL) {
2786 post_missing_element_message (playsink, "queue");
2787 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
2788 (_("Missing element '%s' - check your GStreamer installation."),
2789 "queue"), ("audio playback and visualizations might not work"));
2790 } else {
2791 g_object_set (chain->queue, "silent", TRUE, NULL);
2792 gst_bin_add (bin, chain->queue);
2793 if (prev) {
2794 if (!gst_element_link_pads_full (prev, "src", chain->queue, "sink",
2795 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
2796 goto link_failed;
2797 } else {
2798 head = chain->queue;
2799 }
2800 prev = chain->queue;
2801 }
2802
2803 /* find ts-offset element */
2804 gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *)
2805 gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset",
2806 G_TYPE_INT64));
2807
2808 /* check if the sink, or something within the sink, implements the
2809 * streamvolume interface. If it does we don't need to add a volume element. */
2810 if (GST_IS_BIN (chain->sink))
2811 elem =
2812 gst_bin_get_by_interface (GST_BIN_CAST (chain->sink),
2813 GST_TYPE_STREAM_VOLUME);
2814 else if (GST_IS_STREAM_VOLUME (chain->sink))
2815 elem = gst_object_ref (chain->sink);
2816 else
2817 elem = NULL;
2818 chain->notify_volume_id = chain->notify_mute_id = 0;
2819 if (elem) {
2820 chain->volume = elem;
2821
2822 chain->notify_volume_id = g_signal_connect (chain->volume, "notify::volume",
2823 G_CALLBACK (notify_volume_cb), playsink);
2824
2825 GST_DEBUG_OBJECT (playsink, "the sink has a volume property");
2826 have_volume = TRUE;
2827 chain->sink_volume = TRUE;
2828 chain->notify_mute_id = g_signal_connect (chain->volume, "notify::mute",
2829 G_CALLBACK (notify_mute_cb), playsink);
2830 /* use the sink to control the volume and mute */
2831 if (playsink->volume_changed) {
2832 g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, NULL);
2833 playsink->volume_changed = FALSE;
2834 }
2835 if (playsink->mute_changed) {
2836 g_object_set (chain->volume, "mute", playsink->mute, NULL);
2837 playsink->mute_changed = FALSE;
2838 }
2839 } else {
2840 /* no volume, we need to add a volume element when we can */
2841 GST_DEBUG_OBJECT (playsink, "the sink has no volume property");
2842 have_volume = FALSE;
2843 chain->sink_volume = FALSE;
2844 }
2845
2846 if (!(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO) || (!have_volume
2847 && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME))) {
2848 gboolean use_converters = !(playsink->flags & GST_PLAY_FLAG_NATIVE_AUDIO);
2849 gboolean use_volume =
2850 !have_volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME);
2851 GST_DEBUG_OBJECT (playsink,
2852 "creating audioconvert with use-converters %d, use-volume %d",
2853 use_converters, use_volume);
2854 chain->conv =
2855 g_object_new (GST_TYPE_PLAY_SINK_AUDIO_CONVERT, "name", "aconv",
2856 "use-converters", use_converters, "use-volume", use_volume, NULL);
2857 gst_bin_add (bin, chain->conv);
2858 if (prev) {
2859 if (!gst_element_link_pads_full (prev, "src", chain->conv, "sink",
2860 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
2861 goto link_failed;
2862 } else {
2863 head = chain->conv;
2864 }
2865 prev = chain->conv;
2866
2867 if (!have_volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) {
2868 GstPlaySinkAudioConvert *conv =
2869 GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv);
2870
2871 if (conv->volume) {
2872 chain->volume = conv->volume;
2873 have_volume = TRUE;
2874
2875 chain->notify_volume_id =
2876 g_signal_connect (chain->volume, "notify::volume",
2877 G_CALLBACK (notify_volume_cb), playsink);
2878
2879 /* volume also has the mute property */
2880 chain->notify_mute_id = g_signal_connect (chain->volume, "notify::mute",
2881 G_CALLBACK (notify_mute_cb), playsink);
2882
2883 /* configure with the latest volume and mute */
2884 g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume,
2885 NULL);
2886 g_object_set (G_OBJECT (chain->volume), "mute", playsink->mute, NULL);
2887 }
2888 }
2889 }
2890
2891 if (prev) {
2892 /* we only have to link to the previous element if we have something in
2893 * front of the sink */
2894 GST_DEBUG_OBJECT (playsink, "linking to sink");
2895 if (!gst_element_link_pads_full (prev, "src", chain->sink, NULL,
2896 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
2897 goto link_failed;
2898 }
2899
2900 /* post a warning if we have no way to configure the volume */
2901 if (!have_volume) {
2902 GST_ELEMENT_WARNING (playsink, STREAM, NOT_IMPLEMENTED,
2903 (_("No volume control found")), ("Volume/mute is not available"));
2904 }
2905
2906 /* and ghost the sinkpad of the headmost element */
2907 GST_DEBUG_OBJECT (playsink, "ghosting sink pad");
2908 pad = gst_element_get_static_pad (head, "sink");
2909 chain->sinkpad = gst_ghost_pad_new ("sink", pad);
2910
2911 /* sending audio/video flushes break stream changes when the pipeline
2912 * is paused and played again in 0.10 */
2913 #if 0
2914 gst_pad_set_event_function (chain->sinkpad,
2915 GST_DEBUG_FUNCPTR (gst_play_sink_audio_sink_event));
2916 gst_pad_set_chain_function (chain->sinkpad,
2917 GST_DEBUG_FUNCPTR (gst_play_sink_audio_sink_chain));
2918 #endif
2919
2920 gst_object_unref (pad);
2921 gst_element_add_pad (chain->chain.bin, chain->sinkpad);
2922
2923 return chain;
2924
2925 /* ERRORS */
2926 no_sinks:
2927 {
2928 if (!elem && !playsink->audio_sink) {
2929 post_missing_element_message (playsink, "autoaudiosink");
2930 if (strcmp (DEFAULT_AUDIOSINK, "autoaudiosink")) {
2931 post_missing_element_message (playsink, DEFAULT_AUDIOSINK);
2932 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
2933 (_("Both autoaudiosink and %s elements are missing."),
2934 DEFAULT_AUDIOSINK), (NULL));
2935 } else {
2936 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
2937 (_("The autoaudiosink element is missing.")), (NULL));
2938 }
2939 } else {
2940 if (playsink->audio_sink) {
2941 GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE,
2942 (_("Configured audiosink %s is not working."),
2943 GST_ELEMENT_NAME (playsink->audio_sink)), (NULL));
2944 } else if (strcmp (DEFAULT_AUDIOSINK, "autoaudiosink")) {
2945 GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE,
2946 (_("Both autoaudiosink and %s elements are not working."),
2947 DEFAULT_AUDIOSINK), (NULL));
2948 } else {
2949 GST_ELEMENT_ERROR (playsink, CORE, STATE_CHANGE,
2950 (_("The autoaudiosink element is not working.")), (NULL));
2951 }
2952 }
2953 free_chain ((GstPlayChain *) chain);
2954 return NULL;
2955 }
2956 link_failed:
2957 {
2958 GST_ELEMENT_ERROR (playsink, CORE, PAD,
2959 (NULL), ("Failed to configure the audio sink."));
2960 goto cleanup;
2961 }
2962 filter_with_nonraw:
2963 {
2964 GST_ELEMENT_ERROR (playsink, CORE, NEGOTIATION,
2965 (NULL), ("Cannot apply video-filter on non-raw stream"));
2966 goto cleanup;
2967 }
2968 cleanup:
2969 /* checking sink made it READY */
2970 gst_element_set_state (chain->sink, GST_STATE_NULL);
2971 /* Remove chain from the bin to allow reuse later */
2972 gst_bin_remove (bin, chain->sink);
2973 free_chain ((GstPlayChain *) chain);
2974 return NULL;
2975 }
2976
2977 static gboolean
setup_audio_chain(GstPlaySink * playsink,gboolean raw)2978 setup_audio_chain (GstPlaySink * playsink, gboolean raw)
2979 {
2980 GstElement *elem;
2981 GstPlayAudioChain *chain;
2982 GstStateChangeReturn ret;
2983 GstPlaySinkAudioConvert *conv;
2984
2985 chain = playsink->audiochain;
2986 conv = GST_PLAY_SINK_AUDIO_CONVERT_CAST (chain->conv);
2987
2988 /* if we have a filter, and raw-ness changed, we have to force a rebuild */
2989 if (chain->filter && chain->chain.raw != raw)
2990 return FALSE;
2991
2992 chain->chain.raw = raw;
2993
2994 /* if the chain was active we don't do anything */
2995 if (GST_PLAY_CHAIN (chain)->activated)
2996 return TRUE;
2997
2998 /* try to set the sink element to READY again */
2999 ret = gst_element_set_state (chain->sink, GST_STATE_READY);
3000 if (ret == GST_STATE_CHANGE_FAILURE)
3001 return FALSE;
3002
3003 /* find ts-offset element */
3004 gst_object_replace ((GstObject **) & chain->ts_offset, (GstObject *)
3005 gst_play_sink_find_property_sinks (playsink, chain->sink, "ts-offset",
3006 G_TYPE_INT64));
3007
3008 /* Disconnect signals */
3009 disconnect_audio_chain (chain, playsink);
3010
3011 /* check if the sink, or something within the sink, implements the
3012 * streamvolume interface. If it does we don't need to add a volume element. */
3013 if (GST_IS_BIN (chain->sink))
3014 elem =
3015 gst_bin_get_by_interface (GST_BIN_CAST (chain->sink),
3016 GST_TYPE_STREAM_VOLUME);
3017 else if (GST_IS_STREAM_VOLUME (chain->sink))
3018 elem = gst_object_ref (chain->sink);
3019 else
3020 elem = NULL;
3021 if (elem) {
3022 chain->volume = elem;
3023
3024 if (playsink->volume_changed) {
3025 GST_DEBUG_OBJECT (playsink, "the sink has a volume property, setting %f",
3026 playsink->volume);
3027 /* use the sink to control the volume */
3028 g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, NULL);
3029 playsink->volume_changed = FALSE;
3030 }
3031
3032 chain->notify_volume_id = g_signal_connect (chain->volume, "notify::volume",
3033 G_CALLBACK (notify_volume_cb), playsink);
3034 chain->notify_mute_id = g_signal_connect (chain->volume, "notify::mute",
3035 G_CALLBACK (notify_mute_cb), playsink);
3036 g_object_set (chain->volume, "mute", playsink->mute, NULL);
3037 playsink->mute_changed = FALSE;
3038
3039 g_object_set (chain->conv, "use-volume", FALSE, NULL);
3040 } else if (conv) {
3041 /* no volume, we need to add a volume element when we can */
3042 g_object_set (chain->conv, "use-volume",
3043 ! !(playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME), NULL);
3044 GST_DEBUG_OBJECT (playsink, "the sink has no volume property");
3045
3046 if (conv->volume && (playsink->flags & GST_PLAY_FLAG_SOFT_VOLUME)) {
3047 chain->volume = conv->volume;
3048
3049 chain->notify_volume_id =
3050 g_signal_connect (chain->volume, "notify::volume",
3051 G_CALLBACK (notify_volume_cb), playsink);
3052
3053 chain->notify_mute_id = g_signal_connect (chain->volume, "notify::mute",
3054 G_CALLBACK (notify_mute_cb), playsink);
3055
3056 /* configure with the latest volume and mute */
3057 g_object_set (G_OBJECT (chain->volume), "volume", playsink->volume, NULL);
3058 g_object_set (G_OBJECT (chain->volume), "mute", playsink->mute, NULL);
3059 }
3060
3061 GST_DEBUG_OBJECT (playsink, "reusing existing volume element");
3062 }
3063 return TRUE;
3064 }
3065
3066 /*
3067 * +-------------------------------------------------------------------+
3068 * | visbin |
3069 * | +----------+ +------------+ +----------+ +-------+ |
3070 * | | visqueue | | audioconv | | audiores | | vis | |
3071 * | +-sink src-sink + samp src-sink src-sink src-+ |
3072 * | | +----------+ +------------+ +----------+ +-------+ | |
3073 * sink-+ +-src
3074 * +-------------------------------------------------------------------+
3075 *
3076 */
3077 static GstPlayVisChain *
gen_vis_chain(GstPlaySink * playsink)3078 gen_vis_chain (GstPlaySink * playsink)
3079 {
3080 GstPlayVisChain *chain;
3081 GstBin *bin;
3082 gboolean res;
3083 GstPad *pad;
3084 GstElement *elem;
3085
3086 chain = g_new0 (GstPlayVisChain, 1);
3087 chain->chain.playsink = playsink;
3088
3089 GST_DEBUG_OBJECT (playsink, "making vis chain %p", chain);
3090
3091 chain->chain.bin = gst_bin_new ("visbin");
3092 bin = GST_BIN_CAST (chain->chain.bin);
3093 gst_object_ref_sink (bin);
3094
3095 /* we're queuing raw audio here, we can remove this queue when we can disable
3096 * async behaviour in the video sink. */
3097 chain->queue = gst_element_factory_make ("queue", "visqueue");
3098 if (chain->queue == NULL)
3099 goto no_queue;
3100 g_object_set (chain->queue, "silent", TRUE, NULL);
3101 gst_bin_add (bin, chain->queue);
3102
3103 chain->conv = gst_element_factory_make ("audioconvert", "aconv");
3104 if (chain->conv == NULL)
3105 goto no_audioconvert;
3106 gst_bin_add (bin, chain->conv);
3107
3108 chain->resample = gst_element_factory_make ("audioresample", "aresample");
3109 if (chain->resample == NULL)
3110 goto no_audioresample;
3111 gst_bin_add (bin, chain->resample);
3112
3113 /* this pad will be used for blocking the dataflow and switching the vis
3114 * plugin, we block right after the queue, this makes it possible for the
3115 * resample and convert to convert to a format supported by the new vis
3116 * plugin */
3117 chain->blockpad = gst_element_get_static_pad (chain->queue, "src");
3118 /* this is the pad where the vis is linked to */
3119 chain->vispeerpad = gst_element_get_static_pad (chain->resample, "src");
3120
3121 if (playsink->visualisation) {
3122 GST_DEBUG_OBJECT (playsink, "trying configure vis");
3123 chain->vis = try_element (playsink, playsink->visualisation, FALSE);
3124 }
3125 if (chain->vis == NULL) {
3126 GST_DEBUG_OBJECT (playsink, "trying goom");
3127 elem = gst_element_factory_make ("goom", "vis");
3128 chain->vis = try_element (playsink, elem, TRUE);
3129 gst_object_replace ((GstObject **) & playsink->visualisation,
3130 (GstObject *) elem);
3131 }
3132 if (chain->vis == NULL)
3133 goto no_goom;
3134
3135 gst_bin_add (bin, chain->vis);
3136
3137 res = gst_element_link_pads_full (chain->queue, "src", chain->conv, "sink",
3138 GST_PAD_LINK_CHECK_NOTHING);
3139 res &=
3140 gst_element_link_pads_full (chain->conv, "src", chain->resample, "sink",
3141 GST_PAD_LINK_CHECK_NOTHING);
3142 res &=
3143 gst_element_link_pads_full (chain->resample, "src", chain->vis, "sink",
3144 GST_PAD_LINK_CHECK_NOTHING);
3145 if (!res)
3146 goto link_failed;
3147
3148 chain->vissinkpad = gst_element_get_static_pad (chain->vis, "sink");
3149 chain->vissrcpad = gst_element_get_static_pad (chain->vis, "src");
3150
3151 pad = gst_element_get_static_pad (chain->queue, "sink");
3152 chain->sinkpad = gst_ghost_pad_new ("sink", pad);
3153 gst_object_unref (pad);
3154 gst_element_add_pad (chain->chain.bin, chain->sinkpad);
3155
3156 chain->srcpad = gst_ghost_pad_new ("src", chain->vissrcpad);
3157 gst_element_add_pad (chain->chain.bin, chain->srcpad);
3158
3159 return chain;
3160
3161 /* ERRORS */
3162 no_queue:
3163 {
3164 post_missing_element_message (playsink, "queue");
3165 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
3166 (_("Missing element '%s' - check your GStreamer installation."),
3167 "queue"), (NULL));
3168 free_chain ((GstPlayChain *) chain);
3169 return NULL;
3170 }
3171 no_audioconvert:
3172 {
3173 post_missing_element_message (playsink, "audioconvert");
3174 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
3175 (_("Missing element '%s' - check your GStreamer installation."),
3176 "audioconvert"), ("make sure audioconvert isn't blacklisted"));
3177 free_chain ((GstPlayChain *) chain);
3178 return NULL;
3179 }
3180 no_audioresample:
3181 {
3182 post_missing_element_message (playsink, "audioresample");
3183 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
3184 (_("Missing element '%s' - check your GStreamer installation."),
3185 "audioresample"), (NULL));
3186 free_chain ((GstPlayChain *) chain);
3187 return NULL;
3188 }
3189 no_goom:
3190 {
3191 post_missing_element_message (playsink, "goom");
3192 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
3193 (_("Missing element '%s' - check your GStreamer installation."),
3194 "goom"), (NULL));
3195 free_chain ((GstPlayChain *) chain);
3196 return NULL;
3197 }
3198 link_failed:
3199 {
3200 GST_ELEMENT_ERROR (playsink, CORE, PAD,
3201 (NULL), ("Failed to configure the visualisation element."));
3202 /* element made it to READY */
3203 gst_element_set_state (chain->vis, GST_STATE_NULL);
3204 free_chain ((GstPlayChain *) chain);
3205 return NULL;
3206 }
3207 }
3208
3209 /* this function is called when all the request pads are requested and when we
3210 * have to construct the final pipeline. Based on the flags we construct the
3211 * final output pipelines.
3212 */
3213 static gboolean
gst_play_sink_do_reconfigure(GstPlaySink * playsink)3214 gst_play_sink_do_reconfigure (GstPlaySink * playsink)
3215 {
3216 GstPlayFlags flags;
3217 gboolean need_audio, need_video, need_deinterlace, need_vis, need_text;
3218
3219 GST_DEBUG_OBJECT (playsink, "reconfiguring");
3220
3221 /* assume we need nothing */
3222 need_audio = need_video = need_deinterlace = need_vis = need_text = FALSE;
3223
3224 GST_PLAY_SINK_LOCK (playsink);
3225 GST_OBJECT_LOCK (playsink);
3226 /* get flags, there are protected with the object lock */
3227 flags = playsink->flags;
3228 GST_OBJECT_UNLOCK (playsink);
3229
3230 /* figure out which components we need */
3231 if (flags & GST_PLAY_FLAG_TEXT && playsink->text_pad) {
3232 /* we have subtitles and we are requested to show it */
3233 need_text = TRUE;
3234 }
3235
3236 if (((flags & GST_PLAY_FLAG_VIDEO)
3237 || (flags & GST_PLAY_FLAG_NATIVE_VIDEO)) && playsink->video_pad) {
3238 /* we have video and we are requested to show it */
3239 need_video = TRUE;
3240
3241 /* we only deinterlace if native video is not requested and
3242 * we have raw video */
3243 if ((flags & GST_PLAY_FLAG_DEINTERLACE)
3244 && !(flags & GST_PLAY_FLAG_NATIVE_VIDEO) && playsink->video_pad_raw)
3245 need_deinterlace = TRUE;
3246 }
3247
3248 if (playsink->audio_pad) {
3249 if ((flags & GST_PLAY_FLAG_AUDIO) || (flags & GST_PLAY_FLAG_NATIVE_AUDIO)) {
3250 need_audio = TRUE;
3251 }
3252 if (playsink->audio_pad_raw) {
3253 /* only can do vis with raw uncompressed audio */
3254 if (flags & GST_PLAY_FLAG_VIS && !need_video) {
3255 /* also add video when we add visualisation */
3256 need_video = TRUE;
3257 need_vis = TRUE;
3258 }
3259 }
3260 }
3261
3262 /* we have a text_pad and we need text rendering, in this case we need a
3263 * video_pad to combine the video with the text or visualizations */
3264 if (need_text && !need_video && !playsink->text_sink) {
3265 if (playsink->video_pad) {
3266 need_video = TRUE;
3267 } else if (need_audio) {
3268 GST_ELEMENT_WARNING (playsink, STREAM, FORMAT,
3269 (_("Can't play a text file without video or visualizations.")),
3270 ("Have text pad but no video pad or visualizations"));
3271 need_text = FALSE;
3272 } else {
3273 GST_ELEMENT_ERROR (playsink, STREAM, FORMAT,
3274 (_("Can't play a text file without video or visualizations.")),
3275 ("Have text pad but no video pad or visualizations"));
3276 GST_PLAY_SINK_UNLOCK (playsink);
3277 return FALSE;
3278 }
3279 }
3280
3281 GST_DEBUG_OBJECT (playsink, "audio:%d, video:%d, vis:%d, text:%d", need_audio,
3282 need_video, need_vis, need_text);
3283
3284 /* set up video pipeline */
3285 if (need_video) {
3286 gboolean raw, async;
3287
3288 /* we need a raw sink when we do vis or when we have a raw pad */
3289 raw = need_vis ? TRUE : playsink->video_pad_raw;
3290 /* we try to set the sink async=FALSE when we need vis, this way we can
3291 * avoid a queue in the audio chain. */
3292 async = !need_vis;
3293
3294 GST_DEBUG_OBJECT (playsink, "adding video, raw %d",
3295 playsink->video_pad_raw);
3296
3297 if (playsink->videochain) {
3298 /* try to reactivate the chain */
3299 if ((playsink->video_sink
3300 && playsink->video_sink != playsink->videochain->sink)
3301 || (playsink->video_filter
3302 && playsink->video_filter != playsink->videochain->filter)
3303 || !setup_video_chain (playsink, raw, async)) {
3304 if (playsink->video_sinkpad_stream_synchronizer) {
3305 gst_element_release_request_pad (GST_ELEMENT_CAST
3306 (playsink->stream_synchronizer),
3307 playsink->video_sinkpad_stream_synchronizer);
3308 gst_object_unref (playsink->video_sinkpad_stream_synchronizer);
3309 playsink->video_sinkpad_stream_synchronizer = NULL;
3310 gst_object_unref (playsink->video_srcpad_stream_synchronizer);
3311 playsink->video_srcpad_stream_synchronizer = NULL;
3312 }
3313
3314 add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
3315
3316 /* Remove the sink from the bin to keep its state
3317 * and unparent it to allow reuse */
3318 if (playsink->videochain->sink) {
3319 if (playsink->videochain->sink != playsink->video_sink)
3320 gst_element_set_state (playsink->videochain->sink, GST_STATE_NULL);
3321 gst_bin_remove (GST_BIN_CAST (playsink->videochain->chain.bin),
3322 playsink->videochain->sink);
3323 }
3324
3325 /* Remove the filter from the bin to keep its state
3326 * and unparent it to allow reuse */
3327 if (playsink->videochain->filter) {
3328 if (playsink->videochain->filter != playsink->video_filter)
3329 gst_element_set_state (playsink->videochain->filter,
3330 GST_STATE_NULL);
3331 gst_bin_remove (GST_BIN_CAST (playsink->videochain->chain.bin),
3332 playsink->videochain->filter);
3333 }
3334
3335 activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
3336 free_chain ((GstPlayChain *) playsink->videochain);
3337 playsink->videochain = NULL;
3338
3339 GST_OBJECT_LOCK (playsink);
3340 if (playsink->overlay_element)
3341 gst_object_unref (playsink->overlay_element);
3342 playsink->overlay_element = NULL;
3343
3344 if (playsink->colorbalance_element) {
3345 g_signal_handler_disconnect (playsink->colorbalance_element,
3346 playsink->colorbalance_value_changed_id);
3347 playsink->colorbalance_value_changed_id = 0;
3348 gst_object_unref (playsink->colorbalance_element);
3349 }
3350 playsink->colorbalance_element = NULL;
3351 GST_OBJECT_UNLOCK (playsink);
3352 }
3353 }
3354
3355 if (!playsink->videochain)
3356 playsink->videochain = gen_video_chain (playsink, raw, async);
3357 if (!playsink->videochain)
3358 goto no_chain;
3359
3360 if (!playsink->video_sinkpad_stream_synchronizer) {
3361 GValue item = { 0, };
3362 GstIterator *it;
3363
3364 playsink->video_sinkpad_stream_synchronizer =
3365 gst_element_get_request_pad (GST_ELEMENT_CAST
3366 (playsink->stream_synchronizer), "sink_%u");
3367 it = gst_pad_iterate_internal_links
3368 (playsink->video_sinkpad_stream_synchronizer);
3369 g_assert (it);
3370 gst_iterator_next (it, &item);
3371 playsink->video_srcpad_stream_synchronizer = g_value_dup_object (&item);
3372 g_value_unset (&item);
3373 g_assert (playsink->video_srcpad_stream_synchronizer);
3374 gst_iterator_free (it);
3375 }
3376
3377 if (playsink->video_pad)
3378 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad),
3379 playsink->video_sinkpad_stream_synchronizer);
3380
3381 if (need_deinterlace) {
3382 if (!playsink->videodeinterlacechain)
3383 playsink->videodeinterlacechain =
3384 gen_video_deinterlace_chain (playsink);
3385 if (!playsink->videodeinterlacechain)
3386 goto no_chain;
3387
3388 GST_DEBUG_OBJECT (playsink, "adding video deinterlace chain");
3389
3390 GST_DEBUG_OBJECT (playsink, "setting up deinterlacing chain");
3391
3392 add_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), TRUE);
3393 activate_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), TRUE);
3394
3395 gst_pad_unlink (playsink->video_srcpad_stream_synchronizer,
3396 playsink->videochain->sinkpad);
3397 gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
3398 playsink->videodeinterlacechain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3399 } else {
3400 if (playsink->videodeinterlacechain) {
3401 add_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), FALSE);
3402 activate_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain),
3403 FALSE);
3404 }
3405 }
3406
3407 GST_DEBUG_OBJECT (playsink, "adding video chain");
3408 add_chain (GST_PLAY_CHAIN (playsink->videochain), TRUE);
3409 activate_chain (GST_PLAY_CHAIN (playsink->videochain), TRUE);
3410 /* if we are not part of vis or subtitles, set the ghostpad target */
3411 if (!need_vis && !need_text && (!playsink->textchain
3412 || !playsink->text_pad)) {
3413 GST_DEBUG_OBJECT (playsink, "ghosting video sinkpad");
3414 gst_pad_unlink (playsink->video_srcpad_stream_synchronizer,
3415 playsink->videochain->sinkpad);
3416 if (playsink->videodeinterlacechain
3417 && playsink->videodeinterlacechain->srcpad)
3418 gst_pad_unlink (playsink->videodeinterlacechain->srcpad,
3419 playsink->videochain->sinkpad);
3420 if (need_deinterlace)
3421 gst_pad_link_full (playsink->videodeinterlacechain->srcpad,
3422 playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3423 else
3424 gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
3425 playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3426 }
3427 } else {
3428 GST_DEBUG_OBJECT (playsink, "no video needed");
3429 if (playsink->videochain) {
3430 GST_DEBUG_OBJECT (playsink, "removing video chain");
3431 if (playsink->vischain) {
3432 GstPad *srcpad;
3433
3434 GST_DEBUG_OBJECT (playsink, "unlinking vis chain");
3435
3436 /* also had visualisation, release the tee srcpad before we then
3437 * unlink the video from it */
3438 if (playsink->audio_tee_vissrc) {
3439 gst_element_release_request_pad (playsink->audio_tee,
3440 playsink->audio_tee_vissrc);
3441 gst_object_unref (playsink->audio_tee_vissrc);
3442 playsink->audio_tee_vissrc = NULL;
3443 }
3444 srcpad =
3445 gst_element_get_static_pad (playsink->vischain->chain.bin, "src");
3446 gst_pad_unlink (srcpad, playsink->videochain->sinkpad);
3447 }
3448
3449 if (playsink->video_sinkpad_stream_synchronizer) {
3450 gst_element_release_request_pad (GST_ELEMENT_CAST
3451 (playsink->stream_synchronizer),
3452 playsink->video_sinkpad_stream_synchronizer);
3453 gst_object_unref (playsink->video_sinkpad_stream_synchronizer);
3454 playsink->video_sinkpad_stream_synchronizer = NULL;
3455 gst_object_unref (playsink->video_srcpad_stream_synchronizer);
3456 playsink->video_srcpad_stream_synchronizer = NULL;
3457 }
3458
3459 add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
3460 activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
3461 if (playsink->videochain->ts_offset)
3462 gst_object_unref (playsink->videochain->ts_offset);
3463 playsink->videochain->ts_offset = NULL;
3464 }
3465
3466 if (playsink->videodeinterlacechain) {
3467 add_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), FALSE);
3468 activate_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), FALSE);
3469 }
3470
3471 if (playsink->video_pad)
3472 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad), NULL);
3473
3474 GST_OBJECT_LOCK (playsink);
3475 if (playsink->overlay_element)
3476 gst_object_unref (playsink->overlay_element);
3477 playsink->overlay_element = NULL;
3478
3479 if (playsink->colorbalance_element) {
3480 g_signal_handler_disconnect (playsink->colorbalance_element,
3481 playsink->colorbalance_value_changed_id);
3482 playsink->colorbalance_value_changed_id = 0;
3483 gst_object_unref (playsink->colorbalance_element);
3484 }
3485 playsink->colorbalance_element = NULL;
3486 GST_OBJECT_UNLOCK (playsink);
3487
3488 if (playsink->video_sink)
3489 gst_element_set_state (playsink->video_sink, GST_STATE_NULL);
3490 if (playsink->video_filter)
3491 gst_element_set_state (playsink->video_filter, GST_STATE_NULL);
3492 }
3493
3494 if (need_audio) {
3495 gboolean raw;
3496
3497 GST_DEBUG_OBJECT (playsink, "adding audio");
3498
3499 /* get a raw sink if we are asked for a raw pad */
3500 raw = playsink->audio_pad_raw;
3501
3502 if (playsink->audiochain) {
3503 /* try to reactivate the chain */
3504 if ((playsink->audio_sink
3505 && playsink->audio_sink != playsink->audiochain->sink)
3506 || (playsink->audio_filter
3507 && playsink->audio_filter != playsink->audiochain->filter)
3508 || !setup_audio_chain (playsink, raw)) {
3509 GST_DEBUG_OBJECT (playsink, "removing current audio chain");
3510 if (playsink->audio_tee_asrc) {
3511 gst_element_release_request_pad (playsink->audio_tee,
3512 playsink->audio_tee_asrc);
3513 gst_object_unref (playsink->audio_tee_asrc);
3514 playsink->audio_tee_asrc = NULL;
3515 }
3516
3517 if (playsink->audio_sinkpad_stream_synchronizer) {
3518 gst_element_release_request_pad (GST_ELEMENT_CAST
3519 (playsink->stream_synchronizer),
3520 playsink->audio_sinkpad_stream_synchronizer);
3521 gst_object_unref (playsink->audio_sinkpad_stream_synchronizer);
3522 playsink->audio_sinkpad_stream_synchronizer = NULL;
3523 gst_object_unref (playsink->audio_srcpad_stream_synchronizer);
3524 playsink->audio_srcpad_stream_synchronizer = NULL;
3525
3526 gst_play_sink_remove_audio_ssync_queue (playsink);
3527 }
3528
3529 add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
3530
3531 /* Remove the sink from the bin to keep its state
3532 * and unparent it to allow reuse */
3533 if (playsink->audiochain->sink) {
3534 if (playsink->audiochain->sink != playsink->audio_sink)
3535 gst_element_set_state (playsink->audiochain->sink, GST_STATE_NULL);
3536 gst_bin_remove (GST_BIN_CAST (playsink->audiochain->chain.bin),
3537 playsink->audiochain->sink);
3538 }
3539
3540 /* Remove the filter from the bin to keep its state
3541 * and unparent it to allow reuse */
3542 if (playsink->audiochain->filter) {
3543 if (playsink->audiochain->filter != playsink->audio_filter)
3544 gst_element_set_state (playsink->audiochain->filter,
3545 GST_STATE_NULL);
3546 gst_bin_remove (GST_BIN_CAST (playsink->audiochain->chain.bin),
3547 playsink->audiochain->filter);
3548 }
3549
3550 activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
3551 disconnect_audio_chain (playsink->audiochain, playsink);
3552 if (playsink->audiochain->volume)
3553 gst_object_unref (playsink->audiochain->volume);
3554 playsink->audiochain->volume = NULL;
3555 if (playsink->audiochain->ts_offset)
3556 gst_object_unref (playsink->audiochain->ts_offset);
3557 playsink->audiochain->ts_offset = NULL;
3558 free_chain ((GstPlayChain *) playsink->audiochain);
3559 playsink->audiochain = NULL;
3560 playsink->volume_changed = playsink->mute_changed = FALSE;
3561 }
3562 }
3563
3564 if (!playsink->audiochain) {
3565 GST_DEBUG_OBJECT (playsink, "creating new audio chain");
3566 playsink->audiochain = gen_audio_chain (playsink, raw);
3567 }
3568
3569 if (!playsink->audiochain)
3570 goto no_chain;
3571
3572 if (!playsink->audio_sinkpad_stream_synchronizer) {
3573 GValue item = { 0, };
3574 GstIterator *it;
3575
3576 playsink->audio_sinkpad_stream_synchronizer =
3577 gst_element_get_request_pad (GST_ELEMENT_CAST
3578 (playsink->stream_synchronizer), "sink_%u");
3579 it = gst_pad_iterate_internal_links
3580 (playsink->audio_sinkpad_stream_synchronizer);
3581 g_assert (it);
3582 gst_iterator_next (it, &item);
3583 playsink->audio_srcpad_stream_synchronizer = g_value_dup_object (&item);
3584 g_value_unset (&item);
3585 g_assert (playsink->audio_srcpad_stream_synchronizer);
3586 gst_iterator_free (it);
3587 }
3588
3589 if (need_vis) {
3590 GstPad *audio_queue_srcpad;
3591
3592 if (gst_pad_is_linked (playsink->audio_sinkpad_stream_synchronizer)) {
3593 GstPad *peer_pad =
3594 gst_pad_get_peer (playsink->audio_sinkpad_stream_synchronizer);
3595 gst_pad_unlink (peer_pad, playsink->audio_sinkpad_stream_synchronizer);
3596 gst_object_unref (peer_pad);
3597 }
3598
3599 if (!playsink->audio_ssync_queue) {
3600 GST_DEBUG_OBJECT (playsink, "adding audio stream synchronizer queue");
3601 playsink->audio_ssync_queue =
3602 gst_element_factory_make ("queue", "audiossyncqueue");
3603 if (playsink->audio_ssync_queue == NULL) {
3604 post_missing_element_message (playsink, "queue");
3605 GST_ELEMENT_WARNING (playsink, CORE, MISSING_PLUGIN,
3606 (_("Missing element '%s' - check your GStreamer installation."),
3607 "queue"),
3608 ("audio playback and visualizations might not work"));
3609 }
3610 g_object_set (playsink->audio_ssync_queue, "max-size-buffers",
3611 (guint) 1, NULL);
3612 gst_bin_add (GST_BIN_CAST (playsink), playsink->audio_ssync_queue);
3613 playsink->audio_ssync_queue_sinkpad =
3614 gst_element_get_static_pad (playsink->audio_ssync_queue, "sink");
3615 }
3616
3617 audio_queue_srcpad =
3618 gst_element_get_static_pad (playsink->audio_ssync_queue, "src");
3619 gst_pad_link_full (audio_queue_srcpad,
3620 playsink->audio_sinkpad_stream_synchronizer,
3621 GST_PAD_LINK_CHECK_NOTHING);
3622 gst_object_unref (audio_queue_srcpad);
3623 gst_element_sync_state_with_parent (playsink->audio_ssync_queue);
3624 }
3625
3626 if (playsink->audiochain) {
3627 GstPad *sinkpad;
3628
3629 GST_DEBUG_OBJECT (playsink, "adding audio chain");
3630 if (playsink->audio_tee_asrc == NULL) {
3631 playsink->audio_tee_asrc =
3632 gst_element_get_request_pad (playsink->audio_tee, "src_%u");
3633 }
3634
3635 sinkpad = playsink->audio_ssync_queue_sinkpad;
3636 if (!sinkpad)
3637 sinkpad = playsink->audio_sinkpad_stream_synchronizer;
3638
3639 add_chain (GST_PLAY_CHAIN (playsink->audiochain), TRUE);
3640 activate_chain (GST_PLAY_CHAIN (playsink->audiochain), TRUE);
3641 gst_pad_link_full (playsink->audio_tee_asrc, sinkpad,
3642 GST_PAD_LINK_CHECK_NOTHING);
3643 gst_pad_link_full (playsink->audio_srcpad_stream_synchronizer,
3644 playsink->audiochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3645 }
3646 } else {
3647 GST_DEBUG_OBJECT (playsink, "no audio needed");
3648 /* we have no audio or we are requested to not play audio */
3649 if (playsink->audiochain) {
3650 GST_DEBUG_OBJECT (playsink, "removing audio chain");
3651 /* release the audio pad */
3652 if (playsink->audio_tee_asrc) {
3653 gst_element_release_request_pad (playsink->audio_tee,
3654 playsink->audio_tee_asrc);
3655 gst_object_unref (playsink->audio_tee_asrc);
3656 playsink->audio_tee_asrc = NULL;
3657 }
3658
3659 if (playsink->audio_sinkpad_stream_synchronizer) {
3660 gst_element_release_request_pad (GST_ELEMENT_CAST
3661 (playsink->stream_synchronizer),
3662 playsink->audio_sinkpad_stream_synchronizer);
3663 gst_object_unref (playsink->audio_sinkpad_stream_synchronizer);
3664 playsink->audio_sinkpad_stream_synchronizer = NULL;
3665 gst_object_unref (playsink->audio_srcpad_stream_synchronizer);
3666 playsink->audio_srcpad_stream_synchronizer = NULL;
3667
3668 gst_play_sink_remove_audio_ssync_queue (playsink);
3669 }
3670
3671 if (playsink->audiochain->sink_volume) {
3672 disconnect_audio_chain (playsink->audiochain, playsink);
3673 if (playsink->audiochain->volume)
3674 gst_object_unref (playsink->audiochain->volume);
3675 playsink->audiochain->volume = NULL;
3676 if (playsink->audiochain->ts_offset)
3677 gst_object_unref (playsink->audiochain->ts_offset);
3678 playsink->audiochain->ts_offset = NULL;
3679 }
3680 add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
3681 activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
3682 }
3683
3684 if (playsink->audio_sink)
3685 gst_element_set_state (playsink->audio_sink, GST_STATE_NULL);
3686 if (playsink->audio_filter)
3687 gst_element_set_state (playsink->audio_filter, GST_STATE_NULL);
3688 }
3689
3690 if (need_vis) {
3691 GstPad *srcpad;
3692
3693 if (!playsink->vischain)
3694 playsink->vischain = gen_vis_chain (playsink);
3695
3696 GST_DEBUG_OBJECT (playsink, "adding visualisation");
3697
3698 if (playsink->vischain) {
3699 GST_DEBUG_OBJECT (playsink, "setting up vis chain");
3700
3701 /* Lazily add and activate chain */
3702 if (!playsink->vischain->chain.added) {
3703 srcpad =
3704 gst_element_get_static_pad (playsink->vischain->chain.bin, "src");
3705 add_chain (GST_PLAY_CHAIN (playsink->vischain), TRUE);
3706 activate_chain (GST_PLAY_CHAIN (playsink->vischain), TRUE);
3707 if (playsink->audio_tee_vissrc == NULL) {
3708 playsink->audio_tee_vissrc =
3709 gst_element_get_request_pad (playsink->audio_tee, "src_%u");
3710 }
3711 gst_pad_link_full (playsink->audio_tee_vissrc,
3712 playsink->vischain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3713 gst_pad_link_full (srcpad, playsink->video_sinkpad_stream_synchronizer,
3714 GST_PAD_LINK_CHECK_NOTHING);
3715 gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
3716 playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3717 gst_object_unref (srcpad);
3718 }
3719
3720 /* Is a reconfiguration required? */
3721 if (playsink->vischain->vis != playsink->visualisation) {
3722 /* unlink the old plugin and unghost the pad */
3723 gst_pad_unlink (playsink->vischain->vispeerpad,
3724 playsink->vischain->vissinkpad);
3725 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->vischain->
3726 srcpad), NULL);
3727
3728 /* set the old plugin to NULL and remove */
3729 gst_element_set_state (playsink->vischain->vis, GST_STATE_NULL);
3730 gst_bin_remove (GST_BIN_CAST (playsink->vischain->chain.bin),
3731 playsink->vischain->vis);
3732
3733 /* add new plugin and set state to playing */
3734 playsink->vischain->vis = playsink->visualisation;
3735 gst_bin_add (GST_BIN_CAST (playsink->vischain->chain.bin),
3736 playsink->vischain->vis);
3737 gst_element_set_state (playsink->vischain->vis, GST_STATE_PLAYING);
3738
3739 /* get pads */
3740 playsink->vischain->vissinkpad =
3741 gst_element_get_static_pad (playsink->vischain->vis, "sink");
3742 playsink->vischain->vissrcpad =
3743 gst_element_get_static_pad (playsink->vischain->vis, "src");
3744
3745 /* link pads */
3746 gst_pad_link_full (playsink->vischain->vispeerpad,
3747 playsink->vischain->vissinkpad, GST_PAD_LINK_CHECK_NOTHING);
3748 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->vischain->
3749 srcpad), playsink->vischain->vissrcpad);
3750 }
3751 }
3752 } else {
3753 GST_DEBUG_OBJECT (playsink, "no vis needed");
3754 if (playsink->vischain) {
3755 if (playsink->audio_tee_vissrc) {
3756 gst_element_release_request_pad (playsink->audio_tee,
3757 playsink->audio_tee_vissrc);
3758 gst_object_unref (playsink->audio_tee_vissrc);
3759 playsink->audio_tee_vissrc = NULL;
3760 }
3761 GST_DEBUG_OBJECT (playsink, "removing vis chain");
3762 add_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE);
3763 activate_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE);
3764 }
3765 }
3766
3767 if (need_text) {
3768 GST_DEBUG_OBJECT (playsink, "adding text");
3769 if (!playsink->textchain) {
3770 GST_DEBUG_OBJECT (playsink, "creating text chain");
3771 playsink->textchain = gen_text_chain (playsink);
3772 }
3773 if (playsink->textchain) {
3774 GstIterator *it;
3775
3776 GST_DEBUG_OBJECT (playsink, "adding text chain");
3777 if (playsink->textchain->overlay)
3778 g_object_set (G_OBJECT (playsink->textchain->overlay), "silent", FALSE,
3779 NULL);
3780 add_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE);
3781
3782 if (!playsink->text_sinkpad_stream_synchronizer) {
3783 GValue item = { 0, };
3784
3785 playsink->text_sinkpad_stream_synchronizer =
3786 gst_element_get_request_pad (GST_ELEMENT_CAST
3787 (playsink->stream_synchronizer), "sink_%u");
3788 it = gst_pad_iterate_internal_links
3789 (playsink->text_sinkpad_stream_synchronizer);
3790 g_assert (it);
3791 gst_iterator_next (it, &item);
3792 playsink->text_srcpad_stream_synchronizer = g_value_dup_object (&item);
3793 g_value_unset (&item);
3794 g_assert (playsink->text_srcpad_stream_synchronizer);
3795 gst_iterator_free (it);
3796 }
3797
3798 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->text_pad),
3799 playsink->text_sinkpad_stream_synchronizer);
3800 gst_pad_link_full (playsink->text_srcpad_stream_synchronizer,
3801 playsink->textchain->textsinkpad, GST_PAD_LINK_CHECK_NOTHING);
3802
3803 if (need_vis || need_video) {
3804 if (need_vis) {
3805 GstPad *srcpad;
3806
3807 srcpad =
3808 gst_element_get_static_pad (playsink->vischain->chain.bin, "src");
3809 gst_pad_unlink (srcpad, playsink->videochain->sinkpad);
3810 gst_pad_link_full (srcpad, playsink->textchain->videosinkpad,
3811 GST_PAD_LINK_CHECK_NOTHING);
3812 gst_object_unref (srcpad);
3813 } else {
3814 if (need_deinterlace) {
3815 gst_pad_unlink (playsink->videodeinterlacechain->srcpad,
3816 playsink->videochain->sinkpad);
3817 gst_pad_link_full (playsink->videodeinterlacechain->srcpad,
3818 playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING);
3819 } else {
3820 gst_pad_unlink (playsink->video_srcpad_stream_synchronizer,
3821 playsink->videochain->sinkpad);
3822 gst_pad_link_full (playsink->video_srcpad_stream_synchronizer,
3823 playsink->textchain->videosinkpad, GST_PAD_LINK_CHECK_NOTHING);
3824 }
3825 }
3826 gst_pad_link_full (playsink->textchain->srcpad,
3827 playsink->videochain->sinkpad, GST_PAD_LINK_CHECK_NOTHING);
3828 }
3829
3830 activate_chain (GST_PLAY_CHAIN (playsink->textchain), TRUE);
3831 }
3832 } else {
3833 GST_DEBUG_OBJECT (playsink, "no text needed");
3834 /* we have no subtitles/text or we are requested to not show them */
3835
3836 if (playsink->textchain) {
3837 if (playsink->text_pad == NULL) {
3838 /* no text pad, remove the chain entirely */
3839 GST_DEBUG_OBJECT (playsink, "removing text chain");
3840 add_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE);
3841 activate_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE);
3842
3843 if (playsink->text_sinkpad_stream_synchronizer) {
3844 gst_element_release_request_pad (GST_ELEMENT_CAST
3845 (playsink->stream_synchronizer),
3846 playsink->text_sinkpad_stream_synchronizer);
3847 gst_object_unref (playsink->text_sinkpad_stream_synchronizer);
3848 playsink->text_sinkpad_stream_synchronizer = NULL;
3849 gst_object_unref (playsink->text_srcpad_stream_synchronizer);
3850 playsink->text_srcpad_stream_synchronizer = NULL;
3851 }
3852
3853 if (!need_video && playsink->video_pad) {
3854 if (playsink->video_sinkpad_stream_synchronizer) {
3855 gst_element_release_request_pad (GST_ELEMENT_CAST
3856 (playsink->stream_synchronizer),
3857 playsink->video_sinkpad_stream_synchronizer);
3858 gst_object_unref (playsink->video_sinkpad_stream_synchronizer);
3859 playsink->video_sinkpad_stream_synchronizer = NULL;
3860 gst_object_unref (playsink->video_srcpad_stream_synchronizer);
3861 playsink->video_srcpad_stream_synchronizer = NULL;
3862 }
3863
3864 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->video_pad),
3865 NULL);
3866 }
3867
3868 if (playsink->text_pad && !playsink->textchain)
3869 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (playsink->text_pad),
3870 NULL);
3871
3872 if (playsink->text_sink)
3873 gst_element_set_state (playsink->text_sink, GST_STATE_NULL);
3874 } else {
3875 /* we have a chain and a textpad, turn the subtitles off */
3876 GST_DEBUG_OBJECT (playsink, "turning off the text");
3877 if (playsink->textchain->overlay)
3878 g_object_set (G_OBJECT (playsink->textchain->overlay), "silent", TRUE,
3879 NULL);
3880 }
3881 }
3882 }
3883 update_av_offset (playsink);
3884 update_text_offset (playsink);
3885 do_async_done (playsink);
3886 GST_PLAY_SINK_UNLOCK (playsink);
3887
3888 return TRUE;
3889
3890 /* ERRORS */
3891 no_chain:
3892 {
3893 /* gen_ chain already posted error */
3894 GST_DEBUG_OBJECT (playsink, "failed to setup chain");
3895 GST_PLAY_SINK_UNLOCK (playsink);
3896 return FALSE;
3897 }
3898 }
3899
3900 /**
3901 * gst_play_sink_set_flags:
3902 * @playsink: a #GstPlaySink
3903 * @flags: #GstPlayFlags
3904 *
3905 * Configure @flags on @playsink. The flags control the behaviour of @playsink
3906 * when constructing the sink pipelins.
3907 *
3908 * Returns: TRUE if the flags could be configured.
3909 */
3910 gboolean
gst_play_sink_set_flags(GstPlaySink * playsink,GstPlayFlags flags)3911 gst_play_sink_set_flags (GstPlaySink * playsink, GstPlayFlags flags)
3912 {
3913 g_return_val_if_fail (GST_IS_PLAY_SINK (playsink), FALSE);
3914
3915 GST_OBJECT_LOCK (playsink);
3916 playsink->flags = flags;
3917 GST_OBJECT_UNLOCK (playsink);
3918
3919 return TRUE;
3920 }
3921
3922 /**
3923 * gst_play_sink_get_flags:
3924 * @playsink: a #GstPlaySink
3925 *
3926 * Get the flags of @playsink. That flags control the behaviour of the sink when
3927 * it constructs the sink pipelines.
3928 *
3929 * Returns: the currently configured #GstPlayFlags.
3930 */
3931 GstPlayFlags
gst_play_sink_get_flags(GstPlaySink * playsink)3932 gst_play_sink_get_flags (GstPlaySink * playsink)
3933 {
3934 GstPlayFlags res;
3935
3936 g_return_val_if_fail (GST_IS_PLAY_SINK (playsink), 0);
3937
3938 GST_OBJECT_LOCK (playsink);
3939 res = playsink->flags;
3940 GST_OBJECT_UNLOCK (playsink);
3941
3942 return res;
3943 }
3944
3945 void
gst_play_sink_set_font_desc(GstPlaySink * playsink,const gchar * desc)3946 gst_play_sink_set_font_desc (GstPlaySink * playsink, const gchar * desc)
3947 {
3948 GstPlayTextChain *chain;
3949
3950 GST_PLAY_SINK_LOCK (playsink);
3951 chain = (GstPlayTextChain *) playsink->textchain;
3952 g_free (playsink->font_desc);
3953 playsink->font_desc = g_strdup (desc);
3954 if (chain && chain->overlay) {
3955 g_object_set (chain->overlay, "font-desc", desc, NULL);
3956 }
3957 GST_PLAY_SINK_UNLOCK (playsink);
3958 }
3959
3960 gchar *
gst_play_sink_get_font_desc(GstPlaySink * playsink)3961 gst_play_sink_get_font_desc (GstPlaySink * playsink)
3962 {
3963 gchar *result = NULL;
3964 GstPlayTextChain *chain;
3965
3966 GST_PLAY_SINK_LOCK (playsink);
3967 chain = (GstPlayTextChain *) playsink->textchain;
3968 if (chain && chain->overlay) {
3969 g_object_get (chain->overlay, "font-desc", &result, NULL);
3970 playsink->font_desc = g_strdup (result);
3971 } else {
3972 result = g_strdup (playsink->font_desc);
3973 }
3974 GST_PLAY_SINK_UNLOCK (playsink);
3975
3976 return result;
3977 }
3978
3979 void
gst_play_sink_set_subtitle_encoding(GstPlaySink * playsink,const gchar * encoding)3980 gst_play_sink_set_subtitle_encoding (GstPlaySink * playsink,
3981 const gchar * encoding)
3982 {
3983 GstPlayTextChain *chain;
3984
3985 GST_PLAY_SINK_LOCK (playsink);
3986 chain = (GstPlayTextChain *) playsink->textchain;
3987 g_free (playsink->subtitle_encoding);
3988 playsink->subtitle_encoding = g_strdup (encoding);
3989 if (chain && chain->overlay) {
3990 g_object_set (chain->overlay, "subtitle-encoding", encoding, NULL);
3991 }
3992 GST_PLAY_SINK_UNLOCK (playsink);
3993 }
3994
3995 gchar *
gst_play_sink_get_subtitle_encoding(GstPlaySink * playsink)3996 gst_play_sink_get_subtitle_encoding (GstPlaySink * playsink)
3997 {
3998 gchar *result = NULL;
3999 GstPlayTextChain *chain;
4000
4001 GST_PLAY_SINK_LOCK (playsink);
4002 chain = (GstPlayTextChain *) playsink->textchain;
4003 if (chain && chain->overlay) {
4004 g_object_get (chain->overlay, "subtitle-encoding", &result, NULL);
4005 playsink->subtitle_encoding = g_strdup (result);
4006 } else {
4007 result = g_strdup (playsink->subtitle_encoding);
4008 }
4009 GST_PLAY_SINK_UNLOCK (playsink);
4010
4011 return result;
4012 }
4013
4014 static void
update_av_offset(GstPlaySink * playsink)4015 update_av_offset (GstPlaySink * playsink)
4016 {
4017 gint64 av_offset;
4018 GstPlayAudioChain *achain;
4019 GstPlayVideoChain *vchain;
4020
4021 av_offset = playsink->av_offset;
4022 achain = (GstPlayAudioChain *) playsink->audiochain;
4023 vchain = (GstPlayVideoChain *) playsink->videochain;
4024
4025 if (achain && vchain && achain->ts_offset && vchain->ts_offset) {
4026 g_object_set (achain->ts_offset,
4027 "ts-offset", MAX (G_GINT64_CONSTANT (0), -av_offset), NULL);
4028 g_object_set (vchain->ts_offset,
4029 "ts-offset", MAX (G_GINT64_CONSTANT (0), av_offset), NULL);
4030 } else {
4031 GST_LOG_OBJECT (playsink, "no ts_offset elements");
4032 }
4033 }
4034
4035 void
gst_play_sink_set_av_offset(GstPlaySink * playsink,gint64 av_offset)4036 gst_play_sink_set_av_offset (GstPlaySink * playsink, gint64 av_offset)
4037 {
4038 GST_PLAY_SINK_LOCK (playsink);
4039 playsink->av_offset = av_offset;
4040 update_av_offset (playsink);
4041 GST_PLAY_SINK_UNLOCK (playsink);
4042 }
4043
4044 gint64
gst_play_sink_get_av_offset(GstPlaySink * playsink)4045 gst_play_sink_get_av_offset (GstPlaySink * playsink)
4046 {
4047 gint64 result;
4048
4049 GST_PLAY_SINK_LOCK (playsink);
4050 result = playsink->av_offset;
4051 GST_PLAY_SINK_UNLOCK (playsink);
4052
4053 return result;
4054 }
4055
4056 static void
update_text_offset(GstPlaySink * playsink)4057 update_text_offset (GstPlaySink * playsink)
4058 {
4059 gint64 text_offset;
4060 GstPlayTextChain *tchain;
4061 GstElement *elem;
4062
4063 text_offset = playsink->text_offset;
4064 tchain = (GstPlayTextChain *) playsink->textchain;
4065
4066 if (tchain) {
4067 if (tchain->sink) {
4068 elem =
4069 gst_play_sink_find_property_sinks (playsink, tchain->sink,
4070 "ts-offset", G_TYPE_INT64);
4071 if (elem)
4072 g_object_set (elem, "ts-offset", text_offset, NULL);
4073 } else if (tchain->overlay) {
4074 g_object_set (tchain->overlay, "subtitle-ts-offset", text_offset, NULL);
4075 }
4076 } else {
4077 GST_LOG_OBJECT (playsink, "no text chain");
4078 }
4079 }
4080
4081 void
gst_play_sink_set_text_offset(GstPlaySink * playsink,gint64 text_offset)4082 gst_play_sink_set_text_offset (GstPlaySink * playsink, gint64 text_offset)
4083 {
4084 GST_PLAY_SINK_LOCK (playsink);
4085 playsink->text_offset = text_offset;
4086 update_text_offset (playsink);
4087 GST_PLAY_SINK_UNLOCK (playsink);
4088 }
4089
4090 gint64
gst_play_sink_get_text_offset(GstPlaySink * playsink)4091 gst_play_sink_get_text_offset (GstPlaySink * playsink)
4092 {
4093 gint64 result;
4094
4095 GST_PLAY_SINK_LOCK (playsink);
4096 result = playsink->text_offset;
4097 GST_PLAY_SINK_UNLOCK (playsink);
4098
4099 return result;
4100 }
4101
4102 /**
4103 * gst_play_sink_get_last_sample:
4104 * @playsink: a #GstPlaySink
4105 *
4106 * Get the last displayed sample from @playsink. This sample is in the native
4107 * format of the sink element, the caps in the result sample contain the format
4108 * of the frame data.
4109 *
4110 * Returns: a #GstSample with the frame data or %NULL when no video frame is
4111 * available.
4112 */
4113 GstSample *
gst_play_sink_get_last_sample(GstPlaySink * playsink)4114 gst_play_sink_get_last_sample (GstPlaySink * playsink)
4115 {
4116 GstSample *result = NULL;
4117 GstPlayVideoChain *chain;
4118
4119 GST_PLAY_SINK_LOCK (playsink);
4120 GST_DEBUG_OBJECT (playsink, "taking last sample");
4121 /* get the video chain if we can */
4122 if ((chain = (GstPlayVideoChain *) playsink->videochain)) {
4123 GST_DEBUG_OBJECT (playsink, "found video chain");
4124 /* see if the chain is active */
4125 if (chain->chain.activated && chain->sink) {
4126 GstElement *elem;
4127
4128 GST_DEBUG_OBJECT (playsink, "video chain active and has a sink");
4129
4130 /* find and get the last-buffer property now */
4131 if ((elem =
4132 gst_play_sink_find_property (playsink, chain->sink,
4133 "last-sample", GST_TYPE_SAMPLE))) {
4134 GST_DEBUG_OBJECT (playsink, "getting last-sample property");
4135 g_object_get (elem, "last-sample", &result, NULL);
4136 gst_object_unref (elem);
4137 }
4138 }
4139 }
4140 GST_PLAY_SINK_UNLOCK (playsink);
4141
4142 return result;
4143 }
4144
4145 /**
4146 * gst_play_sink_convert_sample:
4147 * @playsink: a #GstPlaySink
4148 * @caps: a #GstCaps
4149 *
4150 * Get the last displayed frame from @playsink. If caps is %NULL, the video will
4151 * be in the native format of the sink element and the caps on the buffer
4152 * describe the format of the frame. If @caps is not %NULL, the video
4153 * frame will be converted to the format of the caps.
4154 *
4155 * Returns: a #GstSample of the current video sample converted to #caps.
4156 * The caps in the sample will describe the final layout of the buffer data.
4157 * %NULL is returned when no current sample can be retrieved or when the
4158 * conversion failed.
4159 */
4160 GstSample *
gst_play_sink_convert_sample(GstPlaySink * playsink,GstCaps * caps)4161 gst_play_sink_convert_sample (GstPlaySink * playsink, GstCaps * caps)
4162 {
4163 GstSample *result;
4164 GError *err = NULL;
4165
4166 result = gst_play_sink_get_last_sample (playsink);
4167 if (result != NULL && caps != NULL) {
4168 GstSample *temp;
4169
4170 temp = gst_video_convert_sample (result, caps, 25 * GST_SECOND, &err);
4171 if (temp == NULL && err)
4172 goto error;
4173
4174 gst_sample_unref (result);
4175 result = temp;
4176 }
4177 return result;
4178
4179 /* ERRORS */
4180 error:
4181 {
4182 /* I'm really uncertain whether we should make playsink post an error
4183 * on the bus or not. It's not like it's a critical issue regarding
4184 * playsink behaviour. */
4185 GST_ERROR ("Error converting frame: %s", err->message);
4186 gst_sample_unref (result);
4187 g_error_free (err);
4188 return NULL;
4189 }
4190 }
4191
4192 static gboolean
is_raw_structure(GstStructure * s)4193 is_raw_structure (GstStructure * s)
4194 {
4195 const gchar *name;
4196
4197 name = gst_structure_get_name (s);
4198
4199 if (g_str_equal (name, "video/x-raw") || g_str_equal (name, "audio/x-raw"))
4200 return TRUE;
4201 return FALSE;
4202 }
4203
4204 static gboolean
is_raw_pad(GstPad * pad)4205 is_raw_pad (GstPad * pad)
4206 {
4207 GstPad *peer = gst_pad_get_peer (pad);
4208 GstCaps *caps;
4209 gboolean raw = TRUE;
4210
4211 if (!peer)
4212 return raw;
4213
4214 caps = gst_pad_get_current_caps (peer);
4215 if (!caps) {
4216 guint i, n;
4217
4218 caps = gst_pad_query_caps (peer, NULL);
4219
4220 n = gst_caps_get_size (caps);
4221 for (i = 0; i < n; i++) {
4222 gboolean r = is_raw_structure (gst_caps_get_structure (caps, i));
4223
4224 if (i == 0) {
4225 raw = r;
4226 } else if (raw != r) {
4227 GST_ERROR_OBJECT (pad,
4228 "Caps contains raw and non-raw structures: %" GST_PTR_FORMAT, caps);
4229 raw = FALSE;
4230 break;
4231 }
4232 }
4233 } else {
4234 raw = is_raw_structure (gst_caps_get_structure (caps, 0));
4235 }
4236 gst_caps_unref (caps);
4237 gst_object_unref (peer);
4238
4239 return raw;
4240 }
4241
4242 static GstPadProbeReturn
4243 sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info,
4244 gpointer user_data);
4245
4246 static void
video_set_blocked(GstPlaySink * playsink,gboolean blocked)4247 video_set_blocked (GstPlaySink * playsink, gboolean blocked)
4248 {
4249 if (playsink->video_pad) {
4250 GstPad *opad =
4251 GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
4252 (playsink->video_pad)));
4253 if (blocked && playsink->video_block_id == 0) {
4254 if (playsink->vis_pad_block_id)
4255 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->
4256 blockpad, playsink->vis_pad_block_id);
4257 playsink->vis_pad_block_id = 0;
4258
4259 playsink->video_block_id =
4260 gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
4261 sinkpad_blocked_cb, playsink, NULL);
4262 } else if (!blocked && playsink->video_block_id) {
4263 gst_pad_remove_probe (opad, playsink->video_block_id);
4264 PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO_RAW);
4265 PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_VIDEO);
4266 playsink->video_block_id = 0;
4267 playsink->video_pad_blocked = FALSE;
4268 }
4269 gst_object_unref (opad);
4270 }
4271 }
4272
4273 static void
audio_set_blocked(GstPlaySink * playsink,gboolean blocked)4274 audio_set_blocked (GstPlaySink * playsink, gboolean blocked)
4275 {
4276 if (playsink->audio_pad) {
4277 GstPad *opad =
4278 GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
4279 (playsink->audio_pad)));
4280 if (blocked && playsink->audio_block_id == 0) {
4281 if (playsink->vis_pad_block_id)
4282 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->
4283 blockpad, playsink->vis_pad_block_id);
4284 playsink->vis_pad_block_id = 0;
4285
4286 playsink->audio_block_id =
4287 gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
4288 sinkpad_blocked_cb, playsink, NULL);
4289 } else if (!blocked && playsink->audio_block_id) {
4290 if (playsink->vis_pad_block_id)
4291 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->
4292 blockpad, playsink->vis_pad_block_id);
4293 playsink->vis_pad_block_id = 0;
4294
4295 gst_pad_remove_probe (opad, playsink->audio_block_id);
4296 PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO_RAW);
4297 PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_AUDIO);
4298 playsink->audio_block_id = 0;
4299 playsink->audio_pad_blocked = FALSE;
4300 }
4301 gst_object_unref (opad);
4302 }
4303 }
4304
4305 static void
text_set_blocked(GstPlaySink * playsink,gboolean blocked)4306 text_set_blocked (GstPlaySink * playsink, gboolean blocked)
4307 {
4308 if (playsink->text_pad) {
4309 GstPad *opad =
4310 GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD
4311 (playsink->text_pad)));
4312 if (blocked && playsink->text_block_id == 0) {
4313 if (playsink->vis_pad_block_id)
4314 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->
4315 blockpad, playsink->vis_pad_block_id);
4316 playsink->vis_pad_block_id = 0;
4317
4318 playsink->text_block_id =
4319 gst_pad_add_probe (opad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
4320 sinkpad_blocked_cb, playsink, NULL);
4321 } else if (!blocked && playsink->text_block_id) {
4322 gst_pad_remove_probe (opad, playsink->text_block_id);
4323 PENDING_FLAG_UNSET (playsink, GST_PLAY_SINK_TYPE_TEXT);
4324 playsink->text_block_id = 0;
4325 playsink->text_pad_blocked = FALSE;
4326 }
4327 gst_object_unref (opad);
4328 }
4329 }
4330
4331 gboolean
gst_play_sink_reconfigure(GstPlaySink * playsink)4332 gst_play_sink_reconfigure (GstPlaySink * playsink)
4333 {
4334 GST_LOG_OBJECT (playsink, "Triggering reconfiguration");
4335
4336 GST_PLAY_SINK_LOCK (playsink);
4337 video_set_blocked (playsink, TRUE);
4338 audio_set_blocked (playsink, TRUE);
4339 text_set_blocked (playsink, TRUE);
4340 GST_PLAY_SINK_UNLOCK (playsink);
4341
4342 return TRUE;
4343 }
4344
4345 static GstPadProbeReturn
sinkpad_blocked_cb(GstPad * blockedpad,GstPadProbeInfo * info,gpointer user_data)4346 sinkpad_blocked_cb (GstPad * blockedpad, GstPadProbeInfo * info,
4347 gpointer user_data)
4348 {
4349 GstPlaySink *playsink = (GstPlaySink *) user_data;
4350 GstPad *pad;
4351
4352 if (GST_IS_EVENT (info->data) && !GST_EVENT_IS_SERIALIZED (info->data)) {
4353 GST_DEBUG_OBJECT (playsink, "Letting non-serialized event %s pass",
4354 GST_EVENT_TYPE_NAME (info->data));
4355 return GST_PAD_PROBE_PASS;
4356 }
4357
4358 GST_PLAY_SINK_LOCK (playsink);
4359
4360 pad = GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (blockedpad)));
4361 if (pad == playsink->video_pad) {
4362 playsink->video_pad_blocked = TRUE;
4363 GST_DEBUG_OBJECT (pad, "Video pad blocked");
4364 } else if (pad == playsink->audio_pad) {
4365 playsink->audio_pad_blocked = TRUE;
4366 GST_DEBUG_OBJECT (pad, "Audio pad blocked");
4367 } else if (pad == playsink->text_pad) {
4368 playsink->text_pad_blocked = TRUE;
4369 GST_DEBUG_OBJECT (pad, "Text pad blocked");
4370 }
4371
4372 /* We reconfigure when for ALL streams:
4373 * * there isn't a pad
4374 * * OR the pad is blocked
4375 * * OR there are no pending blocks on that pad
4376 */
4377
4378 if ((!playsink->video_pad || playsink->video_pad_blocked
4379 || !PENDING_VIDEO_BLOCK (playsink)) && (!playsink->audio_pad
4380 || playsink->audio_pad_blocked || !PENDING_AUDIO_BLOCK (playsink))
4381 && (!playsink->text_pad || playsink->text_pad_blocked
4382 || !PENDING_TEXT_BLOCK (playsink))) {
4383 GST_DEBUG_OBJECT (playsink, "All pads blocked -- reconfiguring");
4384
4385 if (playsink->video_pad) {
4386 playsink->video_pad_raw = is_raw_pad (playsink->video_pad);
4387 GST_DEBUG_OBJECT (playsink, "Video pad is raw: %d",
4388 playsink->video_pad_raw);
4389 }
4390
4391 if (playsink->audio_pad) {
4392 playsink->audio_pad_raw = is_raw_pad (playsink->audio_pad);
4393 GST_DEBUG_OBJECT (playsink, "Audio pad is raw: %d",
4394 playsink->audio_pad_raw);
4395 }
4396
4397 gst_play_sink_do_reconfigure (playsink);
4398
4399 video_set_blocked (playsink, FALSE);
4400 audio_set_blocked (playsink, FALSE);
4401 text_set_blocked (playsink, FALSE);
4402 }
4403
4404 gst_object_unref (pad);
4405
4406 GST_PLAY_SINK_UNLOCK (playsink);
4407
4408 return GST_PAD_PROBE_OK;
4409 }
4410
4411 static void
caps_notify_cb(GstPad * pad,GParamSpec * unused,GstPlaySink * playsink)4412 caps_notify_cb (GstPad * pad, GParamSpec * unused, GstPlaySink * playsink)
4413 {
4414 gboolean reconfigure = FALSE;
4415 GstCaps *caps;
4416 gboolean raw;
4417
4418 g_object_get (pad, "caps", &caps, NULL);
4419 if (!caps)
4420 return;
4421
4422 if (pad == playsink->audio_pad) {
4423 raw = is_raw_pad (pad);
4424 reconfigure = (! !playsink->audio_pad_raw != ! !raw)
4425 && playsink->audiochain;
4426 GST_DEBUG_OBJECT (pad,
4427 "Audio caps changed: raw %d reconfigure %d caps %" GST_PTR_FORMAT, raw,
4428 reconfigure, caps);
4429 } else if (pad == playsink->video_pad) {
4430 raw = is_raw_pad (pad);
4431 reconfigure = (! !playsink->video_pad_raw != ! !raw)
4432 && playsink->videochain;
4433 GST_DEBUG_OBJECT (pad,
4434 "Video caps changed: raw %d reconfigure %d caps %" GST_PTR_FORMAT, raw,
4435 reconfigure, caps);
4436 }
4437
4438 gst_caps_unref (caps);
4439
4440 if (reconfigure)
4441 gst_play_sink_reconfigure (playsink);
4442 }
4443
4444 void
gst_play_sink_refresh_pad(GstPlaySink * playsink,GstPad * pad,GstPlaySinkType type)4445 gst_play_sink_refresh_pad (GstPlaySink * playsink, GstPad * pad,
4446 GstPlaySinkType type)
4447 {
4448 gulong *block_id = NULL;
4449
4450 GST_DEBUG_OBJECT (playsink, "refresh pad %" GST_PTR_FORMAT, pad);
4451
4452 GST_PLAY_SINK_LOCK (playsink);
4453 if (pad == playsink->video_pad) {
4454 if (type != GST_PLAY_SINK_TYPE_VIDEO_RAW &&
4455 type != GST_PLAY_SINK_TYPE_VIDEO)
4456 goto wrong_type;
4457 block_id = &playsink->video_block_id;
4458 } else if (pad == playsink->audio_pad) {
4459 if (type != GST_PLAY_SINK_TYPE_AUDIO_RAW &&
4460 type != GST_PLAY_SINK_TYPE_AUDIO)
4461 goto wrong_type;
4462 block_id = &playsink->audio_block_id;
4463 } else if (pad == playsink->text_pad) {
4464 if (type != GST_PLAY_SINK_TYPE_TEXT)
4465 goto wrong_type;
4466 block_id = &playsink->text_block_id;
4467 }
4468
4469 if (type != GST_PLAY_SINK_TYPE_FLUSHING && (block_id && *block_id == 0)) {
4470 GstPad *blockpad =
4471 GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (pad)));
4472
4473 if (playsink->vis_pad_block_id)
4474 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->blockpad,
4475 playsink->vis_pad_block_id);
4476 playsink->vis_pad_block_id = 0;
4477
4478 *block_id =
4479 gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
4480 sinkpad_blocked_cb, playsink, NULL);
4481 PENDING_FLAG_SET (playsink, type);
4482 gst_object_unref (blockpad);
4483 }
4484 GST_PLAY_SINK_UNLOCK (playsink);
4485
4486 return;
4487
4488 /* ERRORS */
4489 wrong_type:
4490 {
4491 GST_WARNING_OBJECT (playsink, "wrong type %u for pad %" GST_PTR_FORMAT,
4492 type, pad);
4493 GST_PLAY_SINK_UNLOCK (playsink);
4494 return;
4495 }
4496 }
4497
4498 /**
4499 * gst_play_sink_request_pad
4500 * @playsink: a #GstPlaySink
4501 * @type: a #GstPlaySinkType
4502 *
4503 * Create or return a pad of @type.
4504 *
4505 * Returns: a #GstPad of @type or %NULL when the pad could not be created.
4506 */
4507 GstPad *
gst_play_sink_request_pad(GstPlaySink * playsink,GstPlaySinkType type)4508 gst_play_sink_request_pad (GstPlaySink * playsink, GstPlaySinkType type)
4509 {
4510 GstPad *res = NULL;
4511 gboolean created = FALSE;
4512 gboolean activate = TRUE;
4513 const gchar *pad_name = NULL;
4514 gulong *block_id = NULL;
4515
4516 GST_DEBUG_OBJECT (playsink, "request pad type %d", type);
4517
4518 GST_PLAY_SINK_LOCK (playsink);
4519 switch (type) {
4520 case GST_PLAY_SINK_TYPE_AUDIO_RAW:
4521 case GST_PLAY_SINK_TYPE_AUDIO:
4522 pad_name = "audio_sink";
4523 if (!playsink->audio_tee) {
4524 GST_LOG_OBJECT (playsink, "creating tee");
4525 /* create tee when needed. This element will feed the audio sink chain
4526 * and the vis chain. */
4527 playsink->audio_tee = gst_element_factory_make ("tee", "audiotee");
4528 if (playsink->audio_tee == NULL) {
4529 post_missing_element_message (playsink, "tee");
4530 GST_ELEMENT_ERROR (playsink, CORE, MISSING_PLUGIN,
4531 (_("Missing element '%s' - check your GStreamer installation."),
4532 "tee"), (NULL));
4533 res = NULL;
4534 break;
4535 }
4536 playsink->audio_tee_sink =
4537 gst_element_get_static_pad (playsink->audio_tee, "sink");
4538 gst_bin_add (GST_BIN_CAST (playsink), playsink->audio_tee);
4539 }
4540 gst_element_set_state (playsink->audio_tee, GST_STATE_PAUSED);
4541 if (!playsink->audio_pad) {
4542 GST_LOG_OBJECT (playsink, "ghosting tee sinkpad");
4543 playsink->audio_pad =
4544 gst_ghost_pad_new (pad_name, playsink->audio_tee_sink);
4545 playsink->audio_notify_caps_id =
4546 g_signal_connect (G_OBJECT (playsink->audio_pad), "notify::caps",
4547 G_CALLBACK (caps_notify_cb), playsink);
4548 created = TRUE;
4549 }
4550 playsink->audio_pad_raw = FALSE;
4551 res = playsink->audio_pad;
4552 block_id = &playsink->audio_block_id;
4553 break;
4554 case GST_PLAY_SINK_TYPE_VIDEO_RAW:
4555 case GST_PLAY_SINK_TYPE_VIDEO:
4556 pad_name = "video_sink";
4557 if (!playsink->video_pad) {
4558 GST_LOG_OBJECT (playsink, "ghosting videosink");
4559 playsink->video_pad =
4560 gst_ghost_pad_new_no_target (pad_name, GST_PAD_SINK);
4561 playsink->video_notify_caps_id =
4562 g_signal_connect (G_OBJECT (playsink->video_pad), "notify::caps",
4563 G_CALLBACK (caps_notify_cb), playsink);
4564 created = TRUE;
4565 }
4566 playsink->video_pad_raw = FALSE;
4567 res = playsink->video_pad;
4568 block_id = &playsink->video_block_id;
4569 break;
4570 case GST_PLAY_SINK_TYPE_TEXT:
4571 GST_LOG_OBJECT (playsink, "ghosting text");
4572 if (!playsink->text_pad) {
4573 playsink->text_pad =
4574 gst_ghost_pad_new_no_target ("text_sink", GST_PAD_SINK);
4575 created = TRUE;
4576 }
4577 res = playsink->text_pad;
4578 block_id = &playsink->text_block_id;
4579 break;
4580 case GST_PLAY_SINK_TYPE_FLUSHING:
4581 {
4582 gchar *padname;
4583
4584 /* we need a unique padname for the flushing pad. */
4585 padname = g_strdup_printf ("flushing_%u", playsink->count);
4586 res = gst_ghost_pad_new_no_target (padname, GST_PAD_SINK);
4587 g_free (padname);
4588 playsink->count++;
4589 activate = FALSE;
4590 created = TRUE;
4591 break;
4592 }
4593 default:
4594 res = NULL;
4595 break;
4596 }
4597 GST_PLAY_SINK_UNLOCK (playsink);
4598
4599 if (created && res) {
4600 /* we have to add the pad when it's active or we get an error when the
4601 * element is 'running' */
4602 gst_pad_set_active (res, TRUE);
4603 gst_element_add_pad (GST_ELEMENT_CAST (playsink), res);
4604
4605 GST_PLAY_SINK_LOCK (playsink);
4606 if (block_id && *block_id == 0) {
4607 GstPad *blockpad =
4608 GST_PAD_CAST (gst_proxy_pad_get_internal (GST_PROXY_PAD (res)));
4609
4610 if (playsink->vis_pad_block_id)
4611 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->
4612 blockpad, playsink->vis_pad_block_id);
4613 playsink->vis_pad_block_id = 0;
4614
4615 *block_id =
4616 gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
4617 sinkpad_blocked_cb, playsink, NULL);
4618 PENDING_FLAG_SET (playsink, type);
4619 gst_object_unref (blockpad);
4620 }
4621 GST_PLAY_SINK_UNLOCK (playsink);
4622 if (!activate)
4623 gst_pad_set_active (res, activate);
4624 }
4625
4626 return res;
4627 }
4628
4629
4630 static GstPad *
gst_play_sink_request_new_pad(GstElement * element,GstPadTemplate * templ,const gchar * name,const GstCaps * caps)4631 gst_play_sink_request_new_pad (GstElement * element, GstPadTemplate * templ,
4632 const gchar * name, const GstCaps * caps)
4633 {
4634 GstPlaySink *psink;
4635 GstPad *pad;
4636 GstPlaySinkType type;
4637 const gchar *tplname;
4638
4639 g_return_val_if_fail (templ != NULL, NULL);
4640
4641 GST_DEBUG_OBJECT (element, "name:%s", name);
4642
4643 psink = GST_PLAY_SINK (element);
4644 tplname = GST_PAD_TEMPLATE_NAME_TEMPLATE (templ);
4645
4646 /* Figure out the GstPlaySinkType based on the template */
4647 if (!strcmp (tplname, "audio_sink"))
4648 type = GST_PLAY_SINK_TYPE_AUDIO;
4649 else if (!strcmp (tplname, "audio_raw_sink"))
4650 type = GST_PLAY_SINK_TYPE_AUDIO_RAW;
4651 else if (!strcmp (tplname, "video_sink"))
4652 type = GST_PLAY_SINK_TYPE_VIDEO;
4653 else if (!strcmp (tplname, "video_raw_sink"))
4654 type = GST_PLAY_SINK_TYPE_VIDEO_RAW;
4655 else if (!strcmp (tplname, "text_sink"))
4656 type = GST_PLAY_SINK_TYPE_TEXT;
4657 else
4658 goto unknown_template;
4659
4660 pad = gst_play_sink_request_pad (psink, type);
4661 return pad;
4662
4663 unknown_template:
4664 GST_WARNING_OBJECT (element, "Unknown pad template");
4665 return NULL;
4666 }
4667
4668 void
gst_play_sink_release_pad(GstPlaySink * playsink,GstPad * pad)4669 gst_play_sink_release_pad (GstPlaySink * playsink, GstPad * pad)
4670 {
4671 GstPad **res = NULL;
4672 gboolean untarget = TRUE;
4673
4674 GST_DEBUG_OBJECT (playsink, "release pad %" GST_PTR_FORMAT, pad);
4675
4676 GST_PLAY_SINK_LOCK (playsink);
4677 if (pad == playsink->video_pad) {
4678 res = &playsink->video_pad;
4679 g_signal_handler_disconnect (playsink->video_pad,
4680 playsink->video_notify_caps_id);
4681 video_set_blocked (playsink, FALSE);
4682 } else if (pad == playsink->audio_pad) {
4683 res = &playsink->audio_pad;
4684 g_signal_handler_disconnect (playsink->audio_pad,
4685 playsink->audio_notify_caps_id);
4686 audio_set_blocked (playsink, FALSE);
4687 } else if (pad == playsink->text_pad) {
4688 res = &playsink->text_pad;
4689 text_set_blocked (playsink, FALSE);
4690 } else {
4691 /* try to release the given pad anyway, these could be the FLUSHING pads. */
4692 res = &pad;
4693 untarget = FALSE;
4694 }
4695 GST_PLAY_SINK_UNLOCK (playsink);
4696
4697 if (*res) {
4698 GST_DEBUG_OBJECT (playsink, "deactivate pad %" GST_PTR_FORMAT, *res);
4699 gst_pad_set_active (*res, FALSE);
4700 if (untarget) {
4701 GST_DEBUG_OBJECT (playsink, "untargeting pad %" GST_PTR_FORMAT, *res);
4702 gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (*res), NULL);
4703 }
4704 GST_DEBUG_OBJECT (playsink, "remove pad %" GST_PTR_FORMAT, *res);
4705 gst_element_remove_pad (GST_ELEMENT_CAST (playsink), *res);
4706 *res = NULL;
4707 }
4708 }
4709
4710 static void
gst_play_sink_release_request_pad(GstElement * element,GstPad * pad)4711 gst_play_sink_release_request_pad (GstElement * element, GstPad * pad)
4712 {
4713 GstPlaySink *psink = GST_PLAY_SINK (element);
4714
4715 gst_play_sink_release_pad (psink, pad);
4716 }
4717
4718 static void
gst_play_sink_handle_message(GstBin * bin,GstMessage * message)4719 gst_play_sink_handle_message (GstBin * bin, GstMessage * message)
4720 {
4721 GstPlaySink *playsink;
4722
4723 playsink = GST_PLAY_SINK_CAST (bin);
4724
4725 switch (GST_MESSAGE_TYPE (message)) {
4726 case GST_MESSAGE_STEP_DONE:
4727 {
4728 GstFormat format;
4729 guint64 amount;
4730 gdouble rate;
4731 gboolean flush, intermediate, eos;
4732 guint64 duration;
4733
4734 GST_INFO_OBJECT (playsink, "Handling step-done message");
4735 gst_message_parse_step_done (message, &format, &amount, &rate, &flush,
4736 &intermediate, &duration, &eos);
4737
4738 if (format == GST_FORMAT_BUFFERS) {
4739 /* for the buffer format, we align the other streams */
4740 if (playsink->audiochain
4741 && !gst_object_has_as_ancestor (GST_MESSAGE_SRC (message),
4742 GST_OBJECT (playsink->audiochain->chain.bin))) {
4743 GstEvent *event;
4744
4745 event =
4746 gst_event_new_step (GST_FORMAT_TIME, duration, rate, flush,
4747 intermediate);
4748
4749 if (!gst_element_send_event (playsink->audiochain->chain.bin, event)) {
4750 GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink");
4751 }
4752 }
4753 }
4754 GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message);
4755 break;
4756 }
4757 case GST_MESSAGE_ELEMENT:{
4758 if (gst_is_video_overlay_prepare_window_handle_message (message)) {
4759 GstVideoOverlay *overlay;
4760
4761 GST_OBJECT_LOCK (playsink);
4762 if (playsink->overlay_element
4763 && GST_OBJECT_CAST (playsink->overlay_element) !=
4764 GST_MESSAGE_SRC (message)) {
4765 gst_object_unref (playsink->overlay_element);
4766 playsink->overlay_element = NULL;
4767 }
4768
4769 if (!playsink->overlay_element)
4770 playsink->overlay_element =
4771 GST_VIDEO_OVERLAY (gst_object_ref (GST_MESSAGE_SRC (message)));
4772 overlay =
4773 GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
4774 GST_OBJECT_UNLOCK (playsink);
4775
4776 GST_DEBUG_OBJECT (playsink, "Got prepare-xwindow-id message");
4777
4778 if (playsink->overlay_handle_set)
4779 gst_video_overlay_set_window_handle (playsink->overlay_element,
4780 playsink->overlay_handle);
4781 if (playsink->overlay_handle_events_set)
4782 gst_video_overlay_handle_events (playsink->overlay_element,
4783 playsink->overlay_handle_events);
4784 if (playsink->overlay_render_rectangle_set)
4785 gst_video_overlay_set_render_rectangle (playsink->overlay_element,
4786 playsink->overlay_x, playsink->overlay_y,
4787 playsink->overlay_width, playsink->overlay_height);
4788
4789 gst_object_unref (overlay);
4790 gst_message_unref (message);
4791 gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (playsink));
4792 } else {
4793 GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin,
4794 message);
4795 }
4796 break;
4797 }
4798 default:
4799 GST_BIN_CLASS (gst_play_sink_parent_class)->handle_message (bin, message);
4800 break;
4801 }
4802 }
4803
4804 /* Send an event to our sinks until one of them works; don't then send to the
4805 * remaining sinks (unlike GstBin)
4806 * Special case: If a text sink is set we need to send the event
4807 * to them in case it's source is different from the a/v stream's source.
4808 */
4809 static gboolean
gst_play_sink_send_event_to_sink(GstPlaySink * playsink,GstEvent * event,gboolean force_video)4810 gst_play_sink_send_event_to_sink (GstPlaySink * playsink, GstEvent * event,
4811 gboolean force_video)
4812 {
4813 gboolean res = TRUE;
4814 if (playsink->send_event_mode == MODE_FIRST || force_video) {
4815 if (playsink->textchain && playsink->textchain->sink) {
4816 gst_event_ref (event);
4817 if ((res =
4818 gst_element_send_event (playsink->textchain->chain.bin, event))) {
4819 GST_DEBUG_OBJECT (playsink, "Sent event successfully to text sink");
4820 } else {
4821 GST_DEBUG_OBJECT (playsink, "Event failed when sent to text sink");
4822 }
4823 }
4824
4825 if (playsink->videochain) {
4826 gst_event_ref (event);
4827 if ((res =
4828 gst_element_send_event (playsink->videochain->chain.bin,
4829 event))) {
4830 GST_DEBUG_OBJECT (playsink, "Sent event successfully to video sink");
4831 goto done;
4832 }
4833 GST_DEBUG_OBJECT (playsink, "Event failed when sent to video sink");
4834 }
4835 if (!force_video && playsink->audiochain) {
4836 gst_event_ref (event);
4837 if ((res =
4838 gst_element_send_event (playsink->audiochain->chain.bin,
4839 event))) {
4840 GST_DEBUG_OBJECT (playsink, "Sent event successfully to audio sink");
4841 goto done;
4842 }
4843 GST_DEBUG_OBJECT (playsink, "Event failed when sent to audio sink");
4844 } else {
4845 res = FALSE;
4846 }
4847 } else {
4848 return
4849 GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event
4850 (GST_ELEMENT_CAST (playsink), event);
4851 }
4852
4853 done:
4854 gst_event_unref (event);
4855 return res;
4856 }
4857
4858 /* We only want to send the event to a single sink (overriding GstBin's
4859 * behaviour), but we want to keep GstPipeline's behaviour - wrapping seek
4860 * events appropriately. So, this is a messy duplication of code. */
4861 static gboolean
gst_play_sink_send_event(GstElement * element,GstEvent * event)4862 gst_play_sink_send_event (GstElement * element, GstEvent * event)
4863 {
4864 gboolean res = FALSE;
4865 GstEventType event_type = GST_EVENT_TYPE (event);
4866 GstPlaySink *playsink;
4867 playsink = GST_PLAY_SINK_CAST (element);
4868 switch (event_type) {
4869 case GST_EVENT_SEEK:
4870 GST_DEBUG_OBJECT (element, "Sending event to a sink");
4871 res = gst_play_sink_send_event_to_sink (playsink, event, FALSE);
4872 break;
4873 case GST_EVENT_STEP:
4874 {
4875 GstFormat format;
4876 guint64 amount;
4877 gdouble rate;
4878 gboolean flush, intermediate;
4879 gst_event_parse_step (event, &format, &amount, &rate, &flush,
4880 &intermediate);
4881 if (format == GST_FORMAT_BUFFERS) {
4882 /* for buffers, we will try to step video frames, for other formats we
4883 * send the step to all sinks */
4884 res = gst_play_sink_send_event_to_sink (playsink, event, TRUE);
4885 } else {
4886 res =
4887 GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event (element,
4888 event);
4889 }
4890 break;
4891 }
4892 default:
4893 res =
4894 GST_ELEMENT_CLASS (gst_play_sink_parent_class)->send_event (element,
4895 event);
4896 break;
4897 }
4898 return res;
4899 }
4900
4901 static GstStateChangeReturn
gst_play_sink_change_state(GstElement * element,GstStateChange transition)4902 gst_play_sink_change_state (GstElement * element, GstStateChange transition)
4903 {
4904 GstStateChangeReturn ret;
4905 GstStateChangeReturn bret;
4906 GstPlaySink *playsink;
4907 playsink = GST_PLAY_SINK (element);
4908 switch (transition) {
4909 case GST_STATE_CHANGE_READY_TO_PAUSED:
4910 playsink->need_async_start = TRUE;
4911 /* we want to go async to PAUSED until we managed to configure and add the
4912 * sinks */
4913 do_async_start (playsink);
4914 ret = GST_STATE_CHANGE_ASYNC;
4915
4916 /* block all pads here */
4917 if (!gst_play_sink_reconfigure (playsink)) {
4918 ret = GST_STATE_CHANGE_FAILURE;
4919 goto activate_failed;
4920 }
4921 break;
4922 case GST_STATE_CHANGE_PAUSED_TO_READY:
4923 /* unblock all pads here */
4924 GST_PLAY_SINK_LOCK (playsink);
4925 video_set_blocked (playsink, FALSE);
4926 audio_set_blocked (playsink, FALSE);
4927 text_set_blocked (playsink, FALSE);
4928 if (playsink->vis_pad_block_id)
4929 gst_pad_remove_probe (((GstPlayVisChain *) playsink->vischain)->
4930 blockpad, playsink->vis_pad_block_id);
4931 playsink->vis_pad_block_id = 0;
4932
4933 GST_PLAY_SINK_UNLOCK (playsink);
4934 /* fall through */
4935 case GST_STATE_CHANGE_READY_TO_NULL:
4936 if (playsink->audiochain && playsink->audiochain->sink_volume) {
4937 /* remove our links to the volume elements when they were
4938 * provided by a sink */
4939 disconnect_audio_chain (playsink->audiochain, playsink);
4940 if (playsink->audiochain->volume)
4941 gst_object_unref (playsink->audiochain->volume);
4942 playsink->audiochain->volume = NULL;
4943 }
4944
4945 if (playsink->audiochain && playsink->audiochain->ts_offset) {
4946 gst_object_unref (playsink->audiochain->ts_offset);
4947 playsink->audiochain->ts_offset = NULL;
4948 }
4949
4950 if (playsink->videochain && playsink->videochain->ts_offset) {
4951 gst_object_unref (playsink->videochain->ts_offset);
4952 playsink->videochain->ts_offset = NULL;
4953 }
4954
4955 GST_OBJECT_LOCK (playsink);
4956 if (playsink->overlay_element)
4957 gst_object_unref (playsink->overlay_element);
4958 playsink->overlay_element = NULL;
4959
4960 if (playsink->colorbalance_element) {
4961 g_signal_handler_disconnect (playsink->colorbalance_element,
4962 playsink->colorbalance_value_changed_id);
4963 playsink->colorbalance_value_changed_id = 0;
4964 gst_object_unref (playsink->colorbalance_element);
4965 }
4966 playsink->colorbalance_element = NULL;
4967 GST_OBJECT_UNLOCK (playsink);
4968
4969 ret = GST_STATE_CHANGE_SUCCESS;
4970 break;
4971 default:
4972 /* all other state changes return SUCCESS by default, this value can be
4973 * overridden by the result of the children */
4974 ret = GST_STATE_CHANGE_SUCCESS;
4975 break;
4976 }
4977
4978 /* do the state change of the children */
4979 bret =
4980 GST_ELEMENT_CLASS (gst_play_sink_parent_class)->change_state (element,
4981 transition);
4982 /* now look at the result of our children and adjust the return value */
4983 switch (bret) {
4984 case GST_STATE_CHANGE_FAILURE:
4985 /* failure, we stop */
4986 goto activate_failed;
4987 case GST_STATE_CHANGE_NO_PREROLL:
4988 /* some child returned NO_PREROLL. This is strange but we never know. We
4989 * commit our async state change (if any) and return the NO_PREROLL */
4990 do_async_done (playsink);
4991 ret = bret;
4992 break;
4993 case GST_STATE_CHANGE_ASYNC:
4994 /* some child was async, return this */
4995 ret = bret;
4996 break;
4997 default:
4998 /* return our previously configured return value */
4999 break;
5000 }
5001
5002 switch (transition) {
5003 case GST_STATE_CHANGE_READY_TO_PAUSED:
5004 break;
5005 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
5006 /* FIXME Release audio device when we implement that */
5007 playsink->need_async_start = TRUE;
5008 break;
5009 case GST_STATE_CHANGE_PAUSED_TO_READY:{
5010 if (playsink->video_sinkpad_stream_synchronizer) {
5011 gst_element_release_request_pad (GST_ELEMENT_CAST
5012 (playsink->stream_synchronizer),
5013 playsink->video_sinkpad_stream_synchronizer);
5014 gst_object_unref (playsink->video_sinkpad_stream_synchronizer);
5015 playsink->video_sinkpad_stream_synchronizer = NULL;
5016 gst_object_unref (playsink->video_srcpad_stream_synchronizer);
5017 playsink->video_srcpad_stream_synchronizer = NULL;
5018 }
5019 if (playsink->audio_sinkpad_stream_synchronizer) {
5020 gst_element_release_request_pad (GST_ELEMENT_CAST
5021 (playsink->stream_synchronizer),
5022 playsink->audio_sinkpad_stream_synchronizer);
5023 gst_object_unref (playsink->audio_sinkpad_stream_synchronizer);
5024 playsink->audio_sinkpad_stream_synchronizer = NULL;
5025 gst_object_unref (playsink->audio_srcpad_stream_synchronizer);
5026 playsink->audio_srcpad_stream_synchronizer = NULL;
5027
5028 gst_play_sink_remove_audio_ssync_queue (playsink);
5029 }
5030 if (playsink->text_sinkpad_stream_synchronizer) {
5031 gst_element_release_request_pad (GST_ELEMENT_CAST
5032 (playsink->stream_synchronizer),
5033 playsink->text_sinkpad_stream_synchronizer);
5034 gst_object_unref (playsink->text_sinkpad_stream_synchronizer);
5035 playsink->text_sinkpad_stream_synchronizer = NULL;
5036 gst_object_unref (playsink->text_srcpad_stream_synchronizer);
5037 playsink->text_srcpad_stream_synchronizer = NULL;
5038 }
5039 }
5040 /* fall through */
5041 case GST_STATE_CHANGE_READY_TO_NULL:
5042 /* remove sinks we added */
5043 if (playsink->videodeinterlacechain) {
5044 activate_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain),
5045 FALSE);
5046 add_chain (GST_PLAY_CHAIN (playsink->videodeinterlacechain), FALSE);
5047 }
5048 if (playsink->videochain) {
5049 activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
5050 add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
5051 }
5052 if (playsink->audiochain) {
5053 activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
5054 add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
5055 }
5056 if (playsink->vischain) {
5057 activate_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE);
5058 add_chain (GST_PLAY_CHAIN (playsink->vischain), FALSE);
5059 }
5060 if (playsink->textchain) {
5061 activate_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE);
5062 add_chain (GST_PLAY_CHAIN (playsink->textchain), FALSE);
5063 }
5064 do_async_done (playsink);
5065 /* when going to READY, keep elements around as long as possible,
5066 * so they may be re-used faster next time/url around.
5067 * when really going to NULL, clean up everything completely. */
5068 if (transition == GST_STATE_CHANGE_READY_TO_NULL) {
5069
5070 /* Unparent the sinks to allow reuse */
5071 if (playsink->videochain && playsink->videochain->sink)
5072 gst_bin_remove (GST_BIN_CAST (playsink->videochain->chain.bin),
5073 playsink->videochain->sink);
5074 if (playsink->audiochain && playsink->audiochain->sink)
5075 gst_bin_remove (GST_BIN_CAST (playsink->audiochain->chain.bin),
5076 playsink->audiochain->sink);
5077 if (playsink->textchain && playsink->textchain->sink)
5078 gst_bin_remove (GST_BIN_CAST (playsink->textchain->chain.bin),
5079 playsink->textchain->sink);
5080 if (playsink->audio_sink != NULL)
5081 gst_element_set_state (playsink->audio_sink, GST_STATE_NULL);
5082 if (playsink->video_sink != NULL)
5083 gst_element_set_state (playsink->video_sink, GST_STATE_NULL);
5084 if (playsink->visualisation != NULL)
5085 gst_element_set_state (playsink->visualisation, GST_STATE_NULL);
5086 if (playsink->text_sink != NULL)
5087 gst_element_set_state (playsink->text_sink, GST_STATE_NULL);
5088
5089 /* Unparent the filters to allow reuse */
5090 if (playsink->videochain && playsink->videochain->filter)
5091 gst_bin_remove (GST_BIN_CAST (playsink->videochain->chain.bin),
5092 playsink->videochain->filter);
5093 if (playsink->audiochain && playsink->audiochain->filter)
5094 gst_bin_remove (GST_BIN_CAST (playsink->audiochain->chain.bin),
5095 playsink->audiochain->filter);
5096 if (playsink->audio_filter != NULL)
5097 gst_element_set_state (playsink->audio_filter, GST_STATE_NULL);
5098 if (playsink->video_filter != NULL)
5099 gst_element_set_state (playsink->video_filter, GST_STATE_NULL);
5100
5101 free_chain ((GstPlayChain *) playsink->videodeinterlacechain);
5102 playsink->videodeinterlacechain = NULL;
5103 free_chain ((GstPlayChain *) playsink->videochain);
5104 playsink->videochain = NULL;
5105 free_chain ((GstPlayChain *) playsink->audiochain);
5106 playsink->audiochain = NULL;
5107 free_chain ((GstPlayChain *) playsink->vischain);
5108 playsink->vischain = NULL;
5109 free_chain ((GstPlayChain *) playsink->textchain);
5110 playsink->textchain = NULL;
5111 }
5112 break;
5113 default:
5114 break;
5115 }
5116 return ret;
5117 /* ERRORS */
5118 activate_failed:
5119 {
5120 GST_DEBUG_OBJECT (element,
5121 "element failed to change states -- activation problem?");
5122 do_async_done (playsink);
5123 return GST_STATE_CHANGE_FAILURE;
5124 }
5125 }
5126
5127 static void
gst_play_sink_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * spec)5128 gst_play_sink_set_property (GObject * object, guint prop_id,
5129 const GValue * value, GParamSpec * spec)
5130 {
5131 GstPlaySink *playsink = GST_PLAY_SINK (object);
5132 switch (prop_id) {
5133 case PROP_FLAGS:
5134 gst_play_sink_set_flags (playsink, g_value_get_flags (value));
5135 break;
5136 case PROP_VOLUME:
5137 gst_play_sink_set_volume (playsink, g_value_get_double (value));
5138 break;
5139 case PROP_MUTE:
5140 gst_play_sink_set_mute (playsink, g_value_get_boolean (value));
5141 break;
5142 case PROP_FONT_DESC:
5143 gst_play_sink_set_font_desc (playsink, g_value_get_string (value));
5144 break;
5145 case PROP_SUBTITLE_ENCODING:
5146 gst_play_sink_set_subtitle_encoding (playsink,
5147 g_value_get_string (value));
5148 break;
5149 case PROP_VIS_PLUGIN:
5150 gst_play_sink_set_vis_plugin (playsink, g_value_get_object (value));
5151 break;
5152 case PROP_AV_OFFSET:
5153 gst_play_sink_set_av_offset (playsink, g_value_get_int64 (value));
5154 break;
5155 case PROP_TEXT_OFFSET:
5156 gst_play_sink_set_text_offset (playsink, g_value_get_int64 (value));
5157 break;
5158 case PROP_VIDEO_FILTER:
5159 gst_play_sink_set_filter (playsink, GST_PLAY_SINK_TYPE_VIDEO,
5160 g_value_get_object (value));
5161 break;
5162 case PROP_AUDIO_FILTER:
5163 gst_play_sink_set_filter (playsink, GST_PLAY_SINK_TYPE_AUDIO,
5164 g_value_get_object (value));
5165 break;
5166 case PROP_VIDEO_SINK:
5167 gst_play_sink_set_sink (playsink, GST_PLAY_SINK_TYPE_VIDEO,
5168 g_value_get_object (value));
5169 break;
5170 case PROP_AUDIO_SINK:
5171 gst_play_sink_set_sink (playsink, GST_PLAY_SINK_TYPE_AUDIO,
5172 g_value_get_object (value));
5173 break;
5174 case PROP_TEXT_SINK:
5175 gst_play_sink_set_sink (playsink, GST_PLAY_SINK_TYPE_TEXT,
5176 g_value_get_object (value));
5177 break;
5178 case PROP_SEND_EVENT_MODE:
5179 playsink->send_event_mode = g_value_get_enum (value);
5180 break;
5181 case PROP_FORCE_ASPECT_RATIO:{
5182 GstPlayVideoChain *chain;
5183 GstElement *elem;
5184
5185 playsink->force_aspect_ratio = g_value_get_boolean (value);
5186
5187 GST_PLAY_SINK_LOCK (playsink);
5188 if (playsink->videochain) {
5189 chain = (GstPlayVideoChain *) playsink->videochain;
5190
5191 if (chain->sink) {
5192 elem =
5193 gst_play_sink_find_property_sinks (playsink, chain->sink,
5194 "force-aspect-ratio", G_TYPE_BOOLEAN);
5195
5196 if (elem)
5197 g_object_set (elem, "force-aspect-ratio",
5198 playsink->force_aspect_ratio, NULL);
5199 }
5200 }
5201 GST_PLAY_SINK_UNLOCK (playsink);
5202 break;
5203 }
5204 default:
5205 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, spec);
5206 break;
5207 }
5208 }
5209
5210 static void
gst_play_sink_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * spec)5211 gst_play_sink_get_property (GObject * object, guint prop_id,
5212 GValue * value, GParamSpec * spec)
5213 {
5214 GstPlaySink *playsink = GST_PLAY_SINK (object);
5215 switch (prop_id) {
5216 case PROP_FLAGS:
5217 g_value_set_flags (value, gst_play_sink_get_flags (playsink));
5218 break;
5219 case PROP_VOLUME:
5220 g_value_set_double (value, gst_play_sink_get_volume (playsink));
5221 break;
5222 case PROP_MUTE:
5223 g_value_set_boolean (value, gst_play_sink_get_mute (playsink));
5224 break;
5225 case PROP_FONT_DESC:
5226 g_value_take_string (value, gst_play_sink_get_font_desc (playsink));
5227 break;
5228 case PROP_SUBTITLE_ENCODING:
5229 g_value_take_string (value,
5230 gst_play_sink_get_subtitle_encoding (playsink));
5231 break;
5232 case PROP_VIS_PLUGIN:
5233 g_value_take_object (value, gst_play_sink_get_vis_plugin (playsink));
5234 break;
5235 case PROP_SAMPLE:
5236 gst_value_take_sample (value, gst_play_sink_get_last_sample (playsink));
5237 break;
5238 case PROP_AV_OFFSET:
5239 g_value_set_int64 (value, gst_play_sink_get_av_offset (playsink));
5240 break;
5241 case PROP_TEXT_OFFSET:
5242 g_value_set_int64 (value, gst_play_sink_get_text_offset (playsink));
5243 break;
5244 case PROP_VIDEO_FILTER:
5245 g_value_take_object (value, gst_play_sink_get_filter (playsink,
5246 GST_PLAY_SINK_TYPE_VIDEO));
5247 break;
5248 case PROP_AUDIO_FILTER:
5249 g_value_take_object (value, gst_play_sink_get_filter (playsink,
5250 GST_PLAY_SINK_TYPE_AUDIO));
5251 break;
5252 case PROP_VIDEO_SINK:
5253 g_value_take_object (value, gst_play_sink_get_sink (playsink,
5254 GST_PLAY_SINK_TYPE_VIDEO));
5255 break;
5256 case PROP_AUDIO_SINK:
5257 g_value_take_object (value, gst_play_sink_get_sink (playsink,
5258 GST_PLAY_SINK_TYPE_AUDIO));
5259 break;
5260 case PROP_TEXT_SINK:
5261 g_value_take_object (value, gst_play_sink_get_sink (playsink,
5262 GST_PLAY_SINK_TYPE_TEXT));
5263 break;
5264 case PROP_SEND_EVENT_MODE:
5265 g_value_set_enum (value, playsink->send_event_mode);
5266 break;
5267 case PROP_FORCE_ASPECT_RATIO:
5268 g_value_set_boolean (value, playsink->force_aspect_ratio);
5269 break;
5270 default:
5271 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, spec);
5272 break;
5273 }
5274 }
5275
5276 static void
gst_play_sink_overlay_expose(GstVideoOverlay * overlay)5277 gst_play_sink_overlay_expose (GstVideoOverlay * overlay)
5278 {
5279 GstPlaySink *playsink = GST_PLAY_SINK (overlay);
5280 GstVideoOverlay *overlay_element;
5281
5282 GST_OBJECT_LOCK (playsink);
5283 if (playsink->overlay_element)
5284 overlay_element =
5285 GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
5286 else
5287 overlay_element = NULL;
5288 GST_OBJECT_UNLOCK (playsink);
5289
5290 if (overlay_element) {
5291 gst_video_overlay_expose (overlay_element);
5292 gst_object_unref (overlay_element);
5293 }
5294 }
5295
5296 static void
gst_play_sink_overlay_handle_events(GstVideoOverlay * overlay,gboolean handle_events)5297 gst_play_sink_overlay_handle_events (GstVideoOverlay * overlay,
5298 gboolean handle_events)
5299 {
5300 GstPlaySink *playsink = GST_PLAY_SINK (overlay);
5301 GstVideoOverlay *overlay_element;
5302
5303 GST_OBJECT_LOCK (playsink);
5304 if (playsink->overlay_element)
5305 overlay_element =
5306 GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
5307 else
5308 overlay_element = NULL;
5309 GST_OBJECT_UNLOCK (playsink);
5310
5311 playsink->overlay_handle_events_set = TRUE;
5312 playsink->overlay_handle_events = handle_events;
5313
5314 if (overlay_element) {
5315 gst_video_overlay_handle_events (overlay_element, handle_events);
5316 gst_object_unref (overlay_element);
5317 }
5318 }
5319
5320 static void
gst_play_sink_overlay_set_render_rectangle(GstVideoOverlay * overlay,gint x,gint y,gint width,gint height)5321 gst_play_sink_overlay_set_render_rectangle (GstVideoOverlay * overlay, gint x,
5322 gint y, gint width, gint height)
5323 {
5324 GstPlaySink *playsink = GST_PLAY_SINK (overlay);
5325 GstVideoOverlay *overlay_element;
5326
5327 GST_OBJECT_LOCK (playsink);
5328 if (playsink->overlay_element)
5329 overlay_element =
5330 GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
5331 else
5332 overlay_element = NULL;
5333 GST_OBJECT_UNLOCK (playsink);
5334
5335 playsink->overlay_render_rectangle_set = TRUE;
5336 playsink->overlay_x = x;
5337 playsink->overlay_y = y;
5338 playsink->overlay_width = width;
5339 playsink->overlay_height = height;
5340
5341 if (overlay_element) {
5342 gst_video_overlay_set_render_rectangle (overlay_element, x, y, width,
5343 height);
5344 gst_object_unref (overlay_element);
5345 }
5346 }
5347
5348 static void
gst_play_sink_overlay_set_window_handle(GstVideoOverlay * overlay,guintptr handle)5349 gst_play_sink_overlay_set_window_handle (GstVideoOverlay * overlay,
5350 guintptr handle)
5351 {
5352 GstPlaySink *playsink = GST_PLAY_SINK (overlay);
5353 GstVideoOverlay *overlay_element;
5354
5355 GST_OBJECT_LOCK (playsink);
5356 if (playsink->overlay_element)
5357 overlay_element =
5358 GST_VIDEO_OVERLAY (gst_object_ref (playsink->overlay_element));
5359 else
5360 overlay_element = NULL;
5361 GST_OBJECT_UNLOCK (playsink);
5362
5363 playsink->overlay_handle_set = TRUE;
5364 playsink->overlay_handle = handle;
5365
5366 if (overlay_element) {
5367 gst_video_overlay_set_window_handle (overlay_element, handle);
5368 gst_object_unref (overlay_element);
5369 }
5370 }
5371
5372 static void
gst_play_sink_overlay_init(gpointer g_iface,gpointer g_iface_data)5373 gst_play_sink_overlay_init (gpointer g_iface, gpointer g_iface_data)
5374 {
5375 GstVideoOverlayInterface *iface = (GstVideoOverlayInterface *) g_iface;
5376 iface->expose = gst_play_sink_overlay_expose;
5377 iface->handle_events = gst_play_sink_overlay_handle_events;
5378 iface->set_render_rectangle = gst_play_sink_overlay_set_render_rectangle;
5379 iface->set_window_handle = gst_play_sink_overlay_set_window_handle;
5380 }
5381
5382 static void
gst_play_sink_navigation_send_event(GstNavigation * navigation,GstStructure * structure)5383 gst_play_sink_navigation_send_event (GstNavigation * navigation,
5384 GstStructure * structure)
5385 {
5386 GstPlaySink *playsink = GST_PLAY_SINK (navigation);
5387 GstBin *bin = NULL;
5388
5389 GST_PLAY_SINK_LOCK (playsink);
5390 if (playsink->videochain && playsink->videochain->chain.bin)
5391 bin = GST_BIN (gst_object_ref (playsink->videochain->chain.bin));
5392 GST_PLAY_SINK_UNLOCK (playsink);
5393
5394 if (bin) {
5395 GstElement *nav = gst_bin_get_by_interface (bin, GST_TYPE_NAVIGATION);
5396
5397 if (nav) {
5398 gst_navigation_send_event (GST_NAVIGATION (nav), structure);
5399 structure = NULL;
5400 gst_object_unref (nav);
5401 } else {
5402 GstEvent *event = gst_event_new_navigation (structure);
5403 structure = NULL;
5404 gst_element_send_event (GST_ELEMENT (bin), event);
5405 }
5406
5407 gst_object_unref (bin);
5408 }
5409
5410 if (structure)
5411 gst_structure_free (structure);
5412 }
5413
5414 static void
gst_play_sink_navigation_init(gpointer g_iface,gpointer g_iface_data)5415 gst_play_sink_navigation_init (gpointer g_iface, gpointer g_iface_data)
5416 {
5417 GstNavigationInterface *iface = (GstNavigationInterface *) g_iface;
5418
5419 iface->send_event = gst_play_sink_navigation_send_event;
5420 }
5421
5422 static const GList *
gst_play_sink_colorbalance_list_channels(GstColorBalance * balance)5423 gst_play_sink_colorbalance_list_channels (GstColorBalance * balance)
5424 {
5425 GstPlaySink *playsink = GST_PLAY_SINK (balance);
5426
5427 return playsink->colorbalance_channels;
5428 }
5429
5430 static void
gst_play_sink_colorbalance_set_value(GstColorBalance * balance,GstColorBalanceChannel * proxy,gint value)5431 gst_play_sink_colorbalance_set_value (GstColorBalance * balance,
5432 GstColorBalanceChannel * proxy, gint value)
5433 {
5434 GstPlaySink *playsink = GST_PLAY_SINK (balance);
5435 GList *l;
5436 gint i;
5437 GstColorBalance *balance_element = NULL;
5438
5439 GST_OBJECT_LOCK (playsink);
5440 if (playsink->colorbalance_element)
5441 balance_element =
5442 GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element));
5443 GST_OBJECT_UNLOCK (playsink);
5444
5445 for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
5446 GstColorBalanceChannel *proxy_tmp = l->data;
5447 gdouble new_val;
5448
5449 if (proxy_tmp != proxy)
5450 continue;
5451
5452 playsink->colorbalance_values[i] = value;
5453
5454 if (balance_element) {
5455 GstColorBalanceChannel *channel = NULL;
5456 const GList *channels, *k;
5457
5458 channels = gst_color_balance_list_channels (balance_element);
5459 for (k = channels; k; k = k->next) {
5460 GstColorBalanceChannel *tmp = k->data;
5461
5462 if (g_strrstr (tmp->label, proxy->label)) {
5463 channel = tmp;
5464 break;
5465 }
5466 }
5467
5468 g_assert (channel);
5469
5470 /* Convert to [0, 1] range */
5471 new_val =
5472 ((gdouble) value -
5473 (gdouble) proxy->min_value) / ((gdouble) proxy->max_value -
5474 (gdouble) proxy->min_value);
5475 /* Convert to channel range */
5476 new_val =
5477 channel->min_value + new_val * ((gdouble) channel->max_value -
5478 (gdouble) channel->min_value);
5479
5480 gst_color_balance_set_value (balance_element, channel,
5481 (gint) (new_val + 0.5));
5482
5483 gst_object_unref (balance_element);
5484 }
5485
5486 gst_color_balance_value_changed (balance, proxy, value);
5487 break;
5488 }
5489 }
5490
5491 static gint
gst_play_sink_colorbalance_get_value(GstColorBalance * balance,GstColorBalanceChannel * proxy)5492 gst_play_sink_colorbalance_get_value (GstColorBalance * balance,
5493 GstColorBalanceChannel * proxy)
5494 {
5495 GstPlaySink *playsink = GST_PLAY_SINK (balance);
5496 GList *l;
5497 gint i;
5498
5499 for (i = 0, l = playsink->colorbalance_channels; l; l = l->next, i++) {
5500 GstColorBalanceChannel *proxy_tmp = l->data;
5501
5502 if (proxy_tmp != proxy)
5503 continue;
5504
5505 return playsink->colorbalance_values[i];
5506 }
5507
5508 g_return_val_if_reached (0);
5509 }
5510
5511 static GstColorBalanceType
gst_play_sink_colorbalance_get_balance_type(GstColorBalance * balance)5512 gst_play_sink_colorbalance_get_balance_type (GstColorBalance * balance)
5513 {
5514 GstPlaySink *playsink = GST_PLAY_SINK (balance);
5515 GstColorBalance *balance_element = NULL;
5516 GstColorBalanceType t = GST_COLOR_BALANCE_SOFTWARE;
5517
5518 GST_OBJECT_LOCK (playsink);
5519 if (playsink->colorbalance_element)
5520 balance_element =
5521 GST_COLOR_BALANCE (gst_object_ref (playsink->colorbalance_element));
5522 GST_OBJECT_UNLOCK (playsink);
5523
5524 if (balance_element) {
5525 t = gst_color_balance_get_balance_type (balance_element);
5526 gst_object_unref (balance_element);
5527 }
5528
5529 return t;
5530 }
5531
5532 static void
gst_play_sink_colorbalance_init(gpointer g_iface,gpointer g_iface_data)5533 gst_play_sink_colorbalance_init (gpointer g_iface, gpointer g_iface_data)
5534 {
5535 GstColorBalanceInterface *iface = (GstColorBalanceInterface *) g_iface;
5536
5537 iface->list_channels = gst_play_sink_colorbalance_list_channels;
5538 iface->set_value = gst_play_sink_colorbalance_set_value;
5539 iface->get_value = gst_play_sink_colorbalance_get_value;
5540 iface->get_balance_type = gst_play_sink_colorbalance_get_balance_type;
5541 }
5542
5543 gboolean
gst_play_sink_plugin_init(GstPlugin * plugin)5544 gst_play_sink_plugin_init (GstPlugin * plugin)
5545 {
5546 GST_DEBUG_CATEGORY_INIT (gst_play_sink_debug, "playsink", 0, "play bin");
5547 return gst_element_register (plugin, "playsink", GST_RANK_NONE,
5548 GST_TYPE_PLAY_SINK);
5549 }
5550