1 /*
2 * GStreamer
3 * Copyright (C) 2016 Vivia Nikolaidou <vivia@toolsonair.com>
4 *
5 * Based on gstvideoframe-audiolevel.c:
6 * Copyright (C) 2015 Vivia Nikolaidou <vivia@toolsonair.com>
7 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Library General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Library General Public License for more details.
17 *
18 * You should have received a copy of the GNU Library General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
21 * Boston, MA 02110-1301, USA.
22 */
23
24 /**
25 * SECTION:element-avwait
26 * @title: avwait
27 *
28 * This element will drop all buffers until a specific timecode or running
29 * time has been reached. It will then pass-through both audio and video,
30 * starting from that specific timecode or running time, making sure that
31 * audio starts as early as possible after the video (or at the same time as
32 * the video). In the "video-first" mode, it only drops audio buffers until
33 * video has started.
34 *
35 * The "recording" property acts essentially like a valve connected before
36 * everything else. If recording is FALSE, all buffers are dropped regardless
37 * of settings. If recording is TRUE, the other settings (mode,
38 * target-timecode, target-running-time, etc) are taken into account. Audio
39 * will always start and end together with the video, as long as the stream
40 * itself doesn't start too late or end too early.
41 *
42 * ## Example launch line
43 * |[
44 * gst-launch-1.0 filesrc location="my_file" ! decodebin name=d ! "audio/x-raw" ! avwait name=l target-timecode-str="00:00:04:00" ! autoaudiosink d. ! "video/x-raw" ! timecodestamper ! l. l. ! queue ! timeoverlay time-mode=time-code ! autovideosink
45 *
46 */
47
48 #ifdef HAVE_CONFIG_H
49 #include "config.h"
50 #endif
51
52 #include "gstavwait.h"
53
54 #define GST_CAT_DEFAULT gst_avwait_debug
55 GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
56
57 static GstStaticPadTemplate audio_sink_template =
58 GST_STATIC_PAD_TEMPLATE ("asink",
59 GST_PAD_SINK,
60 GST_PAD_ALWAYS,
61 GST_STATIC_CAPS ("audio/x-raw")
62 );
63
64 static GstStaticPadTemplate audio_src_template =
65 GST_STATIC_PAD_TEMPLATE ("asrc",
66 GST_PAD_SRC,
67 GST_PAD_ALWAYS,
68 GST_STATIC_CAPS ("audio/x-raw")
69 );
70
71 static GstStaticPadTemplate video_sink_template =
72 GST_STATIC_PAD_TEMPLATE ("vsink",
73 GST_PAD_SINK,
74 GST_PAD_ALWAYS,
75 GST_STATIC_CAPS ("video/x-raw")
76 );
77
78 static GstStaticPadTemplate video_src_template =
79 GST_STATIC_PAD_TEMPLATE ("vsrc",
80 GST_PAD_SRC,
81 GST_PAD_ALWAYS,
82 GST_STATIC_CAPS ("video/x-raw")
83 );
84
85 #define parent_class gst_avwait_parent_class
86 G_DEFINE_TYPE (GstAvWait, gst_avwait, GST_TYPE_ELEMENT);
87 GST_ELEMENT_REGISTER_DEFINE (avwait, "avwait", GST_RANK_NONE, GST_TYPE_AVWAIT);
88
89 enum
90 {
91 PROP_0,
92 PROP_TARGET_TIME_CODE,
93 PROP_TARGET_TIME_CODE_STRING,
94 PROP_TARGET_RUNNING_TIME,
95 PROP_END_TIME_CODE,
96 PROP_END_RUNNING_TIME,
97 PROP_RECORDING,
98 PROP_MODE
99 };
100
101 #define DEFAULT_TARGET_TIMECODE_STR "00:00:00:00"
102 #define DEFAULT_TARGET_RUNNING_TIME GST_CLOCK_TIME_NONE
103 #define DEFAULT_END_RUNNING_TIME GST_CLOCK_TIME_NONE
104 #define DEFAULT_MODE MODE_TIMECODE
105
106 /* flags for self->must_send_end_message */
107 enum
108 {
109 END_MESSAGE_NORMAL = 0,
110 END_MESSAGE_STREAM_ENDED = 1,
111 END_MESSAGE_VIDEO_PUSHED = 2,
112 END_MESSAGE_AUDIO_PUSHED = 4
113 };
114
115 static void gst_avwait_set_property (GObject * object,
116 guint prop_id, const GValue * value, GParamSpec * pspec);
117 static void gst_avwait_get_property (GObject * object,
118 guint prop_id, GValue * value, GParamSpec * pspec);
119
120 static GstFlowReturn gst_avwait_asink_chain (GstPad * pad,
121 GstObject * parent, GstBuffer * inbuf);
122 static GstFlowReturn gst_avwait_vsink_chain (GstPad * pad,
123 GstObject * parent, GstBuffer * inbuf);
124 static gboolean gst_avwait_asink_event (GstPad * pad,
125 GstObject * parent, GstEvent * event);
126 static gboolean gst_avwait_vsink_event (GstPad * pad,
127 GstObject * parent, GstEvent * event);
128 static GstIterator *gst_avwait_iterate_internal_links (GstPad *
129 pad, GstObject * parent);
130
131 static void gst_avwait_finalize (GObject * gobject);
132
133 static GstStateChangeReturn gst_avwait_change_state (GstElement *
134 element, GstStateChange transition);
135
136 static GType
gst_avwait_mode_get_type(void)137 gst_avwait_mode_get_type (void)
138 {
139 static GType gtype = 0;
140
141 if (gtype == 0) {
142 static const GEnumValue values[] = {
143 {MODE_TIMECODE, "time code (default)", "timecode"},
144 {MODE_RUNNING_TIME, "running time", "running-time"},
145 {MODE_VIDEO_FIRST, "video first", "video-first"},
146 {0, NULL, NULL}
147 };
148
149 gtype = g_enum_register_static ("GstAvWaitMode", values);
150 }
151 return gtype;
152 }
153
154 static void
gst_avwait_class_init(GstAvWaitClass * klass)155 gst_avwait_class_init (GstAvWaitClass * klass)
156 {
157 GstElementClass *gstelement_class;
158 GObjectClass *gobject_class = (GObjectClass *) klass;
159
160 GST_DEBUG_CATEGORY_INIT (gst_avwait_debug, "avwait", 0, "avwait");
161
162 gstelement_class = (GstElementClass *) klass;
163
164 gst_element_class_set_static_metadata (gstelement_class,
165 "Timecode Wait", "Filter/Audio/Video",
166 "Drops all audio/video until a specific timecode or running time has been reached",
167 "Vivia Nikolaidou <vivia@toolsonair.com>");
168
169 gobject_class->set_property = gst_avwait_set_property;
170 gobject_class->get_property = gst_avwait_get_property;
171
172 g_object_class_install_property (gobject_class, PROP_TARGET_TIME_CODE_STRING,
173 g_param_spec_string ("target-timecode-string", "Target timecode (string)",
174 "Timecode to wait for in timecode mode (string). Must take the form 00:00:00:00",
175 DEFAULT_TARGET_TIMECODE_STR,
176 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
177
178 g_object_class_install_property (gobject_class, PROP_TARGET_TIME_CODE,
179 g_param_spec_boxed ("target-timecode", "Target timecode (object)",
180 "Timecode to wait for in timecode mode (object)",
181 GST_TYPE_VIDEO_TIME_CODE,
182 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
183 G_PARAM_STATIC_STRINGS));
184
185 g_object_class_install_property (gobject_class, PROP_TARGET_RUNNING_TIME,
186 g_param_spec_uint64 ("target-running-time", "Target running time",
187 "Running time to wait for in running-time mode",
188 0, G_MAXUINT64,
189 DEFAULT_TARGET_RUNNING_TIME,
190 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
191 G_PARAM_STATIC_STRINGS));
192
193 g_object_class_install_property (gobject_class, PROP_MODE,
194 g_param_spec_enum ("mode", "Mode",
195 "Operation mode: What to wait for",
196 GST_TYPE_AVWAIT_MODE,
197 DEFAULT_MODE,
198 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
199 G_PARAM_STATIC_STRINGS));
200
201 g_object_class_install_property (gobject_class, PROP_END_TIME_CODE,
202 g_param_spec_boxed ("end-timecode", "End timecode (object)",
203 "Timecode to end at in timecode mode (object)",
204 GST_TYPE_VIDEO_TIME_CODE,
205 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
206
207 g_object_class_install_property (gobject_class, PROP_END_RUNNING_TIME,
208 g_param_spec_uint64 ("end-running-time", "End running time",
209 "Running time to end at in running-time mode",
210 0, G_MAXUINT64,
211 DEFAULT_END_RUNNING_TIME,
212 GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
213 G_PARAM_STATIC_STRINGS));
214
215 g_object_class_install_property (gobject_class, PROP_RECORDING,
216 g_param_spec_boolean ("recording",
217 "Recording state",
218 "Whether the element is stopped or recording. "
219 "If set to FALSE, all buffers will be dropped regardless of settings.",
220 TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
221
222 gobject_class->finalize = gst_avwait_finalize;
223 gstelement_class->change_state = gst_avwait_change_state;
224
225 gst_element_class_add_static_pad_template (gstelement_class,
226 &audio_src_template);
227 gst_element_class_add_static_pad_template (gstelement_class,
228 &audio_sink_template);
229
230 gst_element_class_add_static_pad_template (gstelement_class,
231 &video_src_template);
232 gst_element_class_add_static_pad_template (gstelement_class,
233 &video_sink_template);
234
235 gst_type_mark_as_plugin_api (GST_TYPE_AVWAIT_MODE, 0);
236 }
237
238 static void
gst_avwait_init(GstAvWait * self)239 gst_avwait_init (GstAvWait * self)
240 {
241 self->asinkpad =
242 gst_pad_new_from_static_template (&audio_sink_template, "asink");
243 gst_pad_set_chain_function (self->asinkpad,
244 GST_DEBUG_FUNCPTR (gst_avwait_asink_chain));
245 gst_pad_set_event_function (self->asinkpad,
246 GST_DEBUG_FUNCPTR (gst_avwait_asink_event));
247 gst_pad_set_iterate_internal_links_function (self->asinkpad,
248 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
249 gst_element_add_pad (GST_ELEMENT (self), self->asinkpad);
250
251 self->vsinkpad =
252 gst_pad_new_from_static_template (&video_sink_template, "vsink");
253 gst_pad_set_chain_function (self->vsinkpad,
254 GST_DEBUG_FUNCPTR (gst_avwait_vsink_chain));
255 gst_pad_set_event_function (self->vsinkpad,
256 GST_DEBUG_FUNCPTR (gst_avwait_vsink_event));
257 gst_pad_set_iterate_internal_links_function (self->vsinkpad,
258 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
259 gst_element_add_pad (GST_ELEMENT (self), self->vsinkpad);
260
261 self->asrcpad =
262 gst_pad_new_from_static_template (&audio_src_template, "asrc");
263 gst_pad_set_iterate_internal_links_function (self->asrcpad,
264 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
265 gst_element_add_pad (GST_ELEMENT (self), self->asrcpad);
266
267 self->vsrcpad =
268 gst_pad_new_from_static_template (&video_src_template, "vsrc");
269 gst_pad_set_iterate_internal_links_function (self->vsrcpad,
270 GST_DEBUG_FUNCPTR (gst_avwait_iterate_internal_links));
271 gst_element_add_pad (GST_ELEMENT (self), self->vsrcpad);
272
273 GST_PAD_SET_PROXY_CAPS (self->asinkpad);
274 GST_PAD_SET_PROXY_ALLOCATION (self->asinkpad);
275
276 GST_PAD_SET_PROXY_CAPS (self->asrcpad);
277 GST_PAD_SET_PROXY_SCHEDULING (self->asrcpad);
278
279 GST_PAD_SET_PROXY_CAPS (self->vsinkpad);
280 GST_PAD_SET_PROXY_ALLOCATION (self->vsinkpad);
281
282 GST_PAD_SET_PROXY_CAPS (self->vsrcpad);
283 GST_PAD_SET_PROXY_SCHEDULING (self->vsrcpad);
284
285 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
286 self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
287 self->first_audio_running_time = GST_CLOCK_TIME_NONE;
288 self->last_seen_tc = NULL;
289
290 self->video_eos_flag = FALSE;
291 self->audio_eos_flag = FALSE;
292 self->video_flush_flag = FALSE;
293 self->audio_flush_flag = FALSE;
294 self->shutdown_flag = FALSE;
295 self->dropping = TRUE;
296 self->tc = gst_video_time_code_new_empty ();
297 self->end_tc = NULL;
298 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
299 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
300 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
301 self->recording = TRUE;
302
303 self->target_running_time = DEFAULT_TARGET_RUNNING_TIME;
304 self->end_running_time = DEFAULT_TARGET_RUNNING_TIME;
305 self->mode = DEFAULT_MODE;
306
307 gst_video_info_init (&self->vinfo);
308 g_mutex_init (&self->mutex);
309 g_cond_init (&self->cond);
310 g_cond_init (&self->audio_cond);
311 }
312
313 static void
gst_avwait_send_element_message(GstAvWait * self,gboolean dropping,GstClockTime running_time)314 gst_avwait_send_element_message (GstAvWait * self, gboolean dropping,
315 GstClockTime running_time)
316 {
317 if (!gst_element_post_message (GST_ELEMENT (self),
318 gst_message_new_element (GST_OBJECT (self),
319 gst_structure_new ("avwait-status",
320 "dropping", G_TYPE_BOOLEAN, dropping,
321 "running-time", GST_TYPE_CLOCK_TIME, running_time, NULL)))) {
322 GST_ERROR_OBJECT (self, "Unable to send element message!");
323 g_assert_not_reached ();
324 }
325 }
326
327 static GstStateChangeReturn
gst_avwait_change_state(GstElement * element,GstStateChange transition)328 gst_avwait_change_state (GstElement * element, GstStateChange transition)
329 {
330 GstStateChangeReturn ret;
331 GstAvWait *self = GST_AVWAIT (element);
332
333 switch (transition) {
334 case GST_STATE_CHANGE_PAUSED_TO_READY:
335 g_mutex_lock (&self->mutex);
336 self->shutdown_flag = TRUE;
337 g_cond_signal (&self->cond);
338 g_cond_signal (&self->audio_cond);
339 g_mutex_unlock (&self->mutex);
340 break;
341 case GST_STATE_CHANGE_READY_TO_PAUSED:
342 g_mutex_lock (&self->mutex);
343 self->shutdown_flag = FALSE;
344 self->video_eos_flag = FALSE;
345 self->audio_eos_flag = FALSE;
346 self->video_flush_flag = FALSE;
347 self->audio_flush_flag = FALSE;
348 self->must_send_end_message = END_MESSAGE_NORMAL;
349 g_mutex_unlock (&self->mutex);
350 default:
351 break;
352 }
353
354 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
355
356 switch (transition) {
357 case GST_STATE_CHANGE_PAUSED_TO_READY:{
358 gboolean send_message = FALSE;
359
360 g_mutex_lock (&self->mutex);
361 if (self->mode != MODE_RUNNING_TIME) {
362 GST_DEBUG_OBJECT (self, "First time reset in paused to ready");
363 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
364 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
365 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
366 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
367 }
368 if (!self->dropping) {
369 self->dropping = TRUE;
370 send_message = TRUE;
371 }
372 gst_segment_init (&self->asegment, GST_FORMAT_UNDEFINED);
373 self->asegment.position = GST_CLOCK_TIME_NONE;
374 gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED);
375 self->vsegment.position = GST_CLOCK_TIME_NONE;
376 gst_video_info_init (&self->vinfo);
377 self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
378 self->first_audio_running_time = GST_CLOCK_TIME_NONE;
379 if (self->last_seen_tc)
380 gst_video_time_code_free (self->last_seen_tc);
381 self->last_seen_tc = NULL;
382 g_mutex_unlock (&self->mutex);
383
384 if (send_message)
385 gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
386 break;
387 }
388 default:
389 break;
390 }
391
392 return ret;
393 }
394
395 static void
gst_avwait_finalize(GObject * object)396 gst_avwait_finalize (GObject * object)
397 {
398 GstAvWait *self = GST_AVWAIT (object);
399
400 if (self->tc) {
401 gst_video_time_code_free (self->tc);
402 self->tc = NULL;
403 }
404
405 if (self->end_tc) {
406 gst_video_time_code_free (self->end_tc);
407 self->end_tc = NULL;
408 }
409
410 g_mutex_clear (&self->mutex);
411 g_cond_clear (&self->cond);
412 g_cond_clear (&self->audio_cond);
413
414 G_OBJECT_CLASS (parent_class)->finalize (object);
415 }
416
417 static void
gst_avwait_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)418 gst_avwait_get_property (GObject * object, guint prop_id,
419 GValue * value, GParamSpec * pspec)
420 {
421 GstAvWait *self = GST_AVWAIT (object);
422
423 switch (prop_id) {
424 case PROP_TARGET_TIME_CODE_STRING:{
425 g_mutex_lock (&self->mutex);
426 if (self->tc)
427 g_value_take_string (value, gst_video_time_code_to_string (self->tc));
428 else
429 g_value_set_string (value, DEFAULT_TARGET_TIMECODE_STR);
430 g_mutex_unlock (&self->mutex);
431 break;
432 }
433 case PROP_TARGET_TIME_CODE:{
434 g_mutex_lock (&self->mutex);
435 g_value_set_boxed (value, self->tc);
436 g_mutex_unlock (&self->mutex);
437 break;
438 }
439 case PROP_END_TIME_CODE:{
440 g_mutex_lock (&self->mutex);
441 g_value_set_boxed (value, self->end_tc);
442 g_mutex_unlock (&self->mutex);
443 break;
444 }
445 case PROP_TARGET_RUNNING_TIME:{
446 g_mutex_lock (&self->mutex);
447 g_value_set_uint64 (value, self->target_running_time);
448 g_mutex_unlock (&self->mutex);
449 break;
450 }
451 case PROP_END_RUNNING_TIME:{
452 g_mutex_lock (&self->mutex);
453 g_value_set_uint64 (value, self->end_running_time);
454 g_mutex_unlock (&self->mutex);
455 break;
456 }
457 case PROP_RECORDING:{
458 g_mutex_lock (&self->mutex);
459 g_value_set_boolean (value, self->recording);
460 g_mutex_unlock (&self->mutex);
461 break;
462 }
463 case PROP_MODE:{
464 g_mutex_lock (&self->mutex);
465 g_value_set_enum (value, self->mode);
466 g_mutex_unlock (&self->mutex);
467 break;
468 }
469 default:
470 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
471 break;
472 }
473 }
474
475 static void
gst_avwait_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)476 gst_avwait_set_property (GObject * object, guint prop_id,
477 const GValue * value, GParamSpec * pspec)
478 {
479 GstAvWait *self = GST_AVWAIT (object);
480
481 switch (prop_id) {
482 case PROP_TARGET_TIME_CODE_STRING:{
483 gchar **parts;
484 const gchar *tc_str;
485 guint hours, minutes, seconds, frames;
486
487 tc_str = g_value_get_string (value);
488 parts = g_strsplit (tc_str, ":", 4);
489 if (!parts || parts[3] == NULL) {
490 GST_ERROR_OBJECT (self,
491 "Error: Could not parse timecode %s. Please input a timecode in the form 00:00:00:00",
492 tc_str);
493 g_strfreev (parts);
494 return;
495 }
496 hours = g_ascii_strtoll (parts[0], NULL, 10);
497 minutes = g_ascii_strtoll (parts[1], NULL, 10);
498 seconds = g_ascii_strtoll (parts[2], NULL, 10);
499 frames = g_ascii_strtoll (parts[3], NULL, 10);
500 g_mutex_lock (&self->mutex);
501 if (self->tc)
502 gst_video_time_code_free (self->tc);
503 self->tc = gst_video_time_code_new (0, 1, NULL, 0, hours, minutes,
504 seconds, frames, 0);
505 if (GST_VIDEO_INFO_FORMAT (&self->vinfo) != GST_VIDEO_FORMAT_UNKNOWN
506 && self->vinfo.fps_n != 0) {
507 self->tc->config.fps_n = self->vinfo.fps_n;
508 self->tc->config.fps_d = self->vinfo.fps_d;
509 }
510 g_mutex_unlock (&self->mutex);
511 g_strfreev (parts);
512 break;
513 }
514 case PROP_TARGET_TIME_CODE:{
515 g_mutex_lock (&self->mutex);
516 if (self->tc)
517 gst_video_time_code_free (self->tc);
518 self->tc = g_value_dup_boxed (value);
519 if (self->tc && self->tc->config.fps_n == 0
520 && GST_VIDEO_INFO_FORMAT (&self->vinfo) !=
521 GST_VIDEO_FORMAT_UNKNOWN && self->vinfo.fps_n != 0) {
522 self->tc->config.fps_n = self->vinfo.fps_n;
523 self->tc->config.fps_d = self->vinfo.fps_d;
524 }
525 g_mutex_unlock (&self->mutex);
526 break;
527 }
528 case PROP_END_TIME_CODE:{
529 g_mutex_lock (&self->mutex);
530 if (self->end_tc)
531 gst_video_time_code_free (self->end_tc);
532 self->end_tc = g_value_dup_boxed (value);
533 if (self->end_tc && self->end_tc->config.fps_n == 0
534 && GST_VIDEO_INFO_FORMAT (&self->vinfo) !=
535 GST_VIDEO_FORMAT_UNKNOWN && self->vinfo.fps_n != 0) {
536 self->end_tc->config.fps_n = self->vinfo.fps_n;
537 self->end_tc->config.fps_d = self->vinfo.fps_d;
538 }
539 g_mutex_unlock (&self->mutex);
540 break;
541 }
542 case PROP_TARGET_RUNNING_TIME:{
543 g_mutex_lock (&self->mutex);
544 self->target_running_time = g_value_get_uint64 (value);
545 if (self->mode == MODE_RUNNING_TIME) {
546 if (self->target_running_time > self->last_seen_video_running_time) {
547 self->dropping = TRUE;
548 }
549 }
550 g_mutex_unlock (&self->mutex);
551 break;
552 }
553 case PROP_END_RUNNING_TIME:{
554 g_mutex_lock (&self->mutex);
555 self->end_running_time = g_value_get_uint64 (value);
556 if (self->mode == MODE_RUNNING_TIME) {
557 if (self->end_running_time >= self->last_seen_video_running_time) {
558 self->dropping = TRUE;
559 }
560 }
561 g_mutex_unlock (&self->mutex);
562 break;
563 }
564 case PROP_MODE:{
565 GstAvWaitMode old_mode;
566
567 g_mutex_lock (&self->mutex);
568 old_mode = self->mode;
569 self->mode = g_value_get_enum (value);
570 if (self->mode != old_mode) {
571 switch (self->mode) {
572 case MODE_TIMECODE:
573 if (self->last_seen_tc && self->tc &&
574 gst_video_time_code_compare (self->last_seen_tc,
575 self->tc) < 0) {
576 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
577 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
578 self->dropping = TRUE;
579 }
580 break;
581 case MODE_RUNNING_TIME:
582 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
583 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
584 if (self->target_running_time > self->last_seen_video_running_time
585 || self->end_running_time >=
586 self->last_seen_video_running_time) {
587 self->dropping = TRUE;
588 }
589 break;
590 /* Let the chain functions handle the rest */
591 case MODE_VIDEO_FIRST:
592 /* pass-through */
593 default:
594 break;
595 }
596 }
597 g_mutex_unlock (&self->mutex);
598 break;
599 }
600 case PROP_RECORDING:{
601 g_mutex_lock (&self->mutex);
602 self->recording = g_value_get_boolean (value);
603 g_mutex_unlock (&self->mutex);
604 break;
605 }
606 default:
607 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
608 break;
609 }
610 }
611
612 static gboolean
gst_avwait_vsink_event(GstPad * pad,GstObject * parent,GstEvent * event)613 gst_avwait_vsink_event (GstPad * pad, GstObject * parent, GstEvent * event)
614 {
615 GstAvWait *self = GST_AVWAIT (parent);
616 GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
617
618 switch (GST_EVENT_TYPE (event)) {
619 case GST_EVENT_SEGMENT:{
620 GstSegment segment;
621 gboolean send_message = FALSE;
622 gboolean segment_changed;
623
624 g_mutex_lock (&self->mutex);
625 gst_event_copy_segment (event, &segment);
626 segment.position = self->vsegment.position;
627 segment_changed = !gst_segment_is_equal (&segment, &self->vsegment);
628 self->vsegment = segment;
629 if (self->vsegment.format != GST_FORMAT_TIME) {
630 GST_ERROR_OBJECT (self, "Invalid segment format");
631 g_mutex_unlock (&self->mutex);
632 gst_event_unref (event);
633 return FALSE;
634 }
635 if (segment_changed) {
636 GST_DEBUG_OBJECT (self, "First time reset in video segment");
637 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
638 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
639 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
640 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
641 if (!self->dropping) {
642 self->dropping = TRUE;
643 send_message = TRUE;
644 }
645 self->vsegment.position = GST_CLOCK_TIME_NONE;
646 }
647 g_mutex_unlock (&self->mutex);
648
649 if (send_message)
650 gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
651 break;
652 }
653 case GST_EVENT_GAP:
654 gst_event_unref (event);
655 return TRUE;
656 case GST_EVENT_EOS:{
657 GstClockTime running_time;
658 gboolean send_message = FALSE;
659 GstClockTime audio_running_time_to_end_at;
660
661 g_mutex_lock (&self->mutex);
662 self->video_eos_flag = TRUE;
663
664 /* If we were recording then we'd be done with it at EOS of the video
665 * pad once the audio has caught up, if it has to */
666 running_time = self->last_seen_video_running_time;
667 if (self->was_recording) {
668 GST_INFO_OBJECT (self, "Recording stopped at EOS at %" GST_TIME_FORMAT,
669 GST_TIME_ARGS (running_time));
670
671 if (running_time > self->running_time_to_wait_for
672 && running_time <= self->running_time_to_end_at) {
673 /* We just stopped recording: synchronise the audio */
674 self->audio_running_time_to_end_at = running_time;
675 self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
676 } else if (running_time < self->running_time_to_wait_for
677 && self->running_time_to_wait_for != GST_CLOCK_TIME_NONE) {
678 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
679 }
680 }
681
682 g_cond_signal (&self->cond);
683
684 if (self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED) {
685 self->must_send_end_message = END_MESSAGE_NORMAL;
686 send_message = TRUE;
687 audio_running_time_to_end_at = self->audio_running_time_to_end_at;
688 } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
689 self->must_send_end_message |= END_MESSAGE_VIDEO_PUSHED;
690 }
691 g_mutex_unlock (&self->mutex);
692
693 if (send_message)
694 gst_avwait_send_element_message (self, TRUE,
695 audio_running_time_to_end_at);
696 break;
697 }
698 case GST_EVENT_FLUSH_START:
699 g_mutex_lock (&self->mutex);
700 self->video_flush_flag = TRUE;
701 g_cond_signal (&self->audio_cond);
702 g_mutex_unlock (&self->mutex);
703 break;
704 case GST_EVENT_FLUSH_STOP:{
705 gboolean send_message = FALSE;
706
707 g_mutex_lock (&self->mutex);
708 self->video_flush_flag = FALSE;
709 GST_DEBUG_OBJECT (self, "First time reset in video flush");
710 self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
711 self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
712 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
713 self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
714 if (!self->dropping) {
715 self->dropping = TRUE;
716 send_message = TRUE;
717 }
718 gst_segment_init (&self->vsegment, GST_FORMAT_UNDEFINED);
719 self->vsegment.position = GST_CLOCK_TIME_NONE;
720 g_mutex_unlock (&self->mutex);
721
722 if (send_message)
723 gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
724 break;
725 }
726 case GST_EVENT_CAPS:{
727 GstCaps *caps;
728 gst_event_parse_caps (event, &caps);
729 GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps);
730 g_mutex_lock (&self->mutex);
731 if (!gst_video_info_from_caps (&self->vinfo, caps)) {
732 gst_event_unref (event);
733 g_mutex_unlock (&self->mutex);
734 return FALSE;
735 }
736 if (self->tc && self->tc->config.fps_n == 0 && self->vinfo.fps_n != 0) {
737 self->tc->config.fps_n = self->vinfo.fps_n;
738 self->tc->config.fps_d = self->vinfo.fps_d;
739 }
740 if (self->end_tc && self->end_tc->config.fps_n == 0
741 && self->vinfo.fps_n != 0) {
742 self->end_tc->config.fps_n = self->vinfo.fps_n;
743 self->end_tc->config.fps_d = self->vinfo.fps_d;
744 }
745 g_mutex_unlock (&self->mutex);
746 break;
747 }
748 default:
749 break;
750 }
751 return gst_pad_event_default (pad, parent, event);
752 }
753
754 static gboolean
gst_avwait_asink_event(GstPad * pad,GstObject * parent,GstEvent * event)755 gst_avwait_asink_event (GstPad * pad, GstObject * parent, GstEvent * event)
756 {
757 GstAvWait *self = GST_AVWAIT (parent);
758 GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
759
760 switch (GST_EVENT_TYPE (event)) {
761 case GST_EVENT_SEGMENT:{
762 GstSegment segment;
763 gboolean segment_changed;
764
765 g_mutex_lock (&self->mutex);
766 gst_event_copy_segment (event, &segment);
767 segment.position = self->asegment.position;
768 segment_changed = !gst_segment_is_equal (&segment, &self->asegment);
769 self->asegment = segment;
770
771 if (self->asegment.format != GST_FORMAT_TIME) {
772 GST_ERROR_OBJECT (self, "Invalid segment format");
773 g_mutex_unlock (&self->mutex);
774 gst_event_unref (event);
775 return FALSE;
776 }
777
778 if (segment_changed) {
779 self->asegment.position = GST_CLOCK_TIME_NONE;
780 }
781 g_mutex_unlock (&self->mutex);
782 break;
783 }
784 case GST_EVENT_FLUSH_START:
785 g_mutex_lock (&self->mutex);
786 self->audio_flush_flag = TRUE;
787 g_cond_signal (&self->cond);
788 g_mutex_unlock (&self->mutex);
789 break;
790 case GST_EVENT_EOS:{
791 gboolean send_message = FALSE;
792 GstClockTime audio_running_time_to_end_at;
793
794 g_mutex_lock (&self->mutex);
795 self->audio_eos_flag = TRUE;
796 g_cond_signal (&self->audio_cond);
797
798 if ((self->must_send_end_message & END_MESSAGE_VIDEO_PUSHED)) {
799 self->must_send_end_message = END_MESSAGE_NORMAL;
800 audio_running_time_to_end_at = self->audio_running_time_to_end_at;
801 send_message = TRUE;
802 } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
803 self->must_send_end_message |= END_MESSAGE_AUDIO_PUSHED;
804 } else {
805 self->must_send_end_message = END_MESSAGE_NORMAL;
806 }
807 g_mutex_unlock (&self->mutex);
808
809 if (send_message)
810 gst_avwait_send_element_message (self, TRUE,
811 audio_running_time_to_end_at);
812 break;
813 }
814 case GST_EVENT_FLUSH_STOP:
815 g_mutex_lock (&self->mutex);
816 self->audio_flush_flag = FALSE;
817 gst_segment_init (&self->asegment, GST_FORMAT_UNDEFINED);
818 self->asegment.position = GST_CLOCK_TIME_NONE;
819 g_mutex_unlock (&self->mutex);
820 break;
821 case GST_EVENT_CAPS:{
822 GstCaps *caps;
823 gst_event_parse_caps (event, &caps);
824 GST_DEBUG_OBJECT (self, "Got caps %" GST_PTR_FORMAT, caps);
825 g_mutex_lock (&self->mutex);
826 if (!gst_audio_info_from_caps (&self->ainfo, caps)) {
827 g_mutex_unlock (&self->mutex);
828 gst_event_unref (event);
829 return FALSE;
830 }
831 g_mutex_unlock (&self->mutex);
832 break;
833 }
834 default:
835 break;
836 }
837
838 return gst_pad_event_default (pad, parent, event);
839 }
840
841 static GstFlowReturn
gst_avwait_vsink_chain(GstPad * pad,GstObject * parent,GstBuffer * inbuf)842 gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
843 {
844 GstClockTime timestamp;
845 GstAvWait *self = GST_AVWAIT (parent);
846 GstClockTime running_time;
847 GstVideoTimeCode *tc = NULL;
848 GstVideoTimeCodeMeta *tc_meta;
849 gboolean retry = FALSE;
850 gboolean ret = GST_FLOW_OK;
851 gboolean send_message = FALSE;
852 GstClockTime message_running_time;
853 gboolean message_dropping;
854
855 timestamp = GST_BUFFER_TIMESTAMP (inbuf);
856 if (timestamp == GST_CLOCK_TIME_NONE) {
857 gst_buffer_unref (inbuf);
858 return GST_FLOW_ERROR;
859 }
860
861 g_mutex_lock (&self->mutex);
862 self->vsegment.position = timestamp;
863 running_time =
864 gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
865 self->vsegment.position);
866 self->last_seen_video_running_time = running_time;
867
868 tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
869 if (tc_meta) {
870 tc = gst_video_time_code_copy (&tc_meta->tc);
871 if (self->last_seen_tc) {
872 gst_video_time_code_free (self->last_seen_tc);
873 }
874 self->last_seen_tc = tc;
875 }
876
877 while (self->mode == MODE_VIDEO_FIRST
878 && self->first_audio_running_time == GST_CLOCK_TIME_NONE
879 && !self->audio_eos_flag
880 && !self->shutdown_flag && !self->video_flush_flag) {
881 GST_DEBUG_OBJECT (self, "Waiting for first audio buffer");
882 g_cond_wait (&self->audio_cond, &self->mutex);
883 }
884
885 if (self->video_flush_flag || self->shutdown_flag) {
886 GST_DEBUG_OBJECT (self, "Shutting down, ignoring buffer");
887 gst_buffer_unref (inbuf);
888 g_mutex_unlock (&self->mutex);
889 return GST_FLOW_FLUSHING;
890 }
891
892 switch (self->mode) {
893 case MODE_TIMECODE:{
894 if (self->tc && self->end_tc
895 && gst_video_time_code_compare (self->tc, self->end_tc) != -1) {
896 gchar *tc_str, *end_tc;
897
898 tc_str = gst_video_time_code_to_string (self->tc);
899 end_tc = gst_video_time_code_to_string (self->end_tc);
900 GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
901 ("End timecode %s must be after start timecode %s. Start timecode rejected",
902 end_tc, tc_str));
903 g_free (end_tc);
904 g_free (tc_str);
905 gst_buffer_unref (inbuf);
906 g_mutex_unlock (&self->mutex);
907 return GST_FLOW_ERROR;
908 }
909
910 if (self->tc != NULL && tc != NULL) {
911 gboolean emit_passthrough_signal = FALSE;
912
913 if (gst_video_time_code_compare (tc, self->tc) < 0
914 && self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
915 GST_DEBUG_OBJECT (self, "Timecode not yet reached, ignoring frame");
916 gst_buffer_unref (inbuf);
917 inbuf = NULL;
918 } else if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
919 GST_INFO_OBJECT (self, "Target timecode reached at %" GST_TIME_FORMAT,
920 GST_TIME_ARGS (self->vsegment.position));
921 /* Don't emit a signal if we weren't dropping (e.g. settings changed
922 * mid-flight) */
923 emit_passthrough_signal = self->dropping;
924 self->dropping = FALSE;
925 self->running_time_to_wait_for = running_time;
926 if (self->recording) {
927 self->audio_running_time_to_wait_for =
928 self->running_time_to_wait_for;
929 }
930 }
931
932 if (self->end_tc && gst_video_time_code_compare (tc, self->end_tc) >= 0) {
933 if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
934 GST_INFO_OBJECT (self, "End timecode reached at %" GST_TIME_FORMAT,
935 GST_TIME_ARGS (self->vsegment.position));
936 self->dropping = TRUE;
937 self->running_time_to_end_at = running_time;
938 if (self->recording) {
939 self->audio_running_time_to_end_at = self->running_time_to_end_at;
940 self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
941 }
942 }
943
944 if (inbuf) {
945 gst_buffer_unref (inbuf);
946 inbuf = NULL;
947 }
948 } else if (emit_passthrough_signal && self->recording) {
949 send_message = TRUE;
950 message_running_time = self->running_time_to_wait_for;
951 message_dropping = FALSE;
952 }
953 }
954 break;
955 }
956 case MODE_RUNNING_TIME:{
957 gboolean emit_passthrough_signal = FALSE;
958
959 if (self->target_running_time != GST_CLOCK_TIME_NONE
960 && running_time < self->target_running_time) {
961 GST_DEBUG_OBJECT (self,
962 "Have %" GST_TIME_FORMAT ", waiting for %" GST_TIME_FORMAT,
963 GST_TIME_ARGS (running_time),
964 GST_TIME_ARGS (self->target_running_time));
965 gst_buffer_unref (inbuf);
966 inbuf = NULL;
967 } else if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
968 /* Don't emit a signal if we weren't dropping (e.g. settings changed
969 * mid-flight) */
970 emit_passthrough_signal = self->dropping;
971 self->dropping = FALSE;
972 self->running_time_to_wait_for = running_time;
973 if (self->recording) {
974 self->audio_running_time_to_wait_for = running_time;
975 }
976 if (self->recording) {
977 send_message = TRUE;
978 message_running_time = running_time;
979 message_dropping = FALSE;
980 }
981 }
982
983 if (GST_CLOCK_TIME_IS_VALID (self->end_running_time)
984 && running_time >= self->end_running_time) {
985 if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
986 GST_INFO_OBJECT (self,
987 "End running time %" GST_TIME_FORMAT " reached at %"
988 GST_TIME_FORMAT, GST_TIME_ARGS (self->end_running_time),
989 GST_TIME_ARGS (self->vsegment.position));
990 self->dropping = TRUE;
991 self->running_time_to_end_at = running_time;
992 if (self->recording) {
993 self->audio_running_time_to_end_at = running_time;
994 self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
995 }
996 }
997
998 if (inbuf) {
999 gst_buffer_unref (inbuf);
1000 inbuf = NULL;
1001 }
1002 } else if (emit_passthrough_signal && self->recording) {
1003 send_message = TRUE;
1004 message_running_time = self->running_time_to_wait_for;
1005 message_dropping = FALSE;
1006 }
1007
1008 break;
1009 }
1010 case MODE_VIDEO_FIRST:{
1011 if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE) {
1012 self->running_time_to_wait_for = running_time;
1013 GST_DEBUG_OBJECT (self, "First video running time is %" GST_TIME_FORMAT,
1014 GST_TIME_ARGS (self->running_time_to_wait_for));
1015 if (self->recording) {
1016 self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
1017 }
1018 if (self->dropping) {
1019 self->dropping = FALSE;
1020 if (self->recording) {
1021 send_message = TRUE;
1022 message_running_time = self->running_time_to_wait_for;
1023 message_dropping = FALSE;
1024 }
1025 }
1026 }
1027 break;
1028 }
1029 }
1030
1031 if (!self->recording) {
1032 if (self->was_recording) {
1033 GST_INFO_OBJECT (self, "Recording stopped at %" GST_TIME_FORMAT,
1034 GST_TIME_ARGS (running_time));
1035
1036 if (running_time > self->running_time_to_wait_for
1037 && (running_time <= self->running_time_to_end_at
1038 || self->running_time_to_end_at == GST_CLOCK_TIME_NONE)) {
1039 /* We just stopped recording: synchronise the audio */
1040 if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE)
1041 self->running_time_to_end_at = running_time;
1042 self->audio_running_time_to_end_at = running_time;
1043 self->must_send_end_message |= END_MESSAGE_STREAM_ENDED;
1044 } else if (running_time < self->running_time_to_wait_for
1045 && self->running_time_to_wait_for != GST_CLOCK_TIME_NONE) {
1046 self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
1047 }
1048 }
1049
1050 /* Recording is FALSE: we drop all buffers */
1051 if (inbuf) {
1052 gst_buffer_unref (inbuf);
1053 inbuf = NULL;
1054 }
1055 } else {
1056 if (!self->was_recording) {
1057 GST_INFO_OBJECT (self,
1058 "Recording started at %" GST_TIME_FORMAT " waiting for %"
1059 GST_TIME_FORMAT " inbuf %p", GST_TIME_ARGS (running_time),
1060 GST_TIME_ARGS (self->running_time_to_wait_for), inbuf);
1061
1062 if (self->mode != MODE_VIDEO_FIRST ||
1063 self->first_audio_running_time <= running_time ||
1064 self->audio_eos_flag) {
1065 if (running_time < self->running_time_to_end_at ||
1066 self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
1067 /* We are before the end of the recording. Check if we just actually
1068 * started */
1069 if (self->running_time_to_wait_for != GST_CLOCK_TIME_NONE
1070 && running_time > self->running_time_to_wait_for) {
1071 /* We just started recording: synchronise the audio */
1072 self->audio_running_time_to_wait_for = running_time;
1073 send_message = TRUE;
1074 message_running_time = running_time;
1075 message_dropping = FALSE;
1076 } else {
1077 /* We will start in the future when running_time_to_wait_for is
1078 * reached */
1079 self->audio_running_time_to_wait_for =
1080 self->running_time_to_wait_for;
1081 }
1082 self->audio_running_time_to_end_at = self->running_time_to_end_at;
1083 }
1084 } else {
1085 /* We are in video-first mode and behind the first audio timestamp. We
1086 * should drop all video buffers until the first audio timestamp, so
1087 * we can catch up with it. (In timecode mode and running-time mode, we
1088 * don't care about when the audio starts, we start as soon as the
1089 * target timecode or running time has been reached) */
1090 if (inbuf) {
1091 gst_buffer_unref (inbuf);
1092 inbuf = NULL;
1093 }
1094 retry = TRUE;
1095 }
1096 }
1097 }
1098
1099 if (!retry)
1100 self->was_recording = self->recording;
1101 g_cond_signal (&self->cond);
1102 g_mutex_unlock (&self->mutex);
1103
1104 if (send_message)
1105 gst_avwait_send_element_message (self, message_dropping,
1106 message_running_time);
1107 send_message = FALSE;
1108
1109 if (inbuf) {
1110 GST_DEBUG_OBJECT (self,
1111 "Pass video buffer %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
1112 GST_TIME_ARGS (gst_segment_to_running_time (&self->vsegment,
1113 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (inbuf))),
1114 GST_TIME_ARGS (gst_segment_to_running_time (&self->vsegment,
1115 GST_FORMAT_TIME,
1116 GST_BUFFER_TIMESTAMP (inbuf) + GST_BUFFER_DURATION (inbuf))));
1117 ret = gst_pad_push (self->vsrcpad, inbuf);
1118 }
1119
1120 g_mutex_lock (&self->mutex);
1121 if (self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED) {
1122 self->must_send_end_message = END_MESSAGE_NORMAL;
1123 send_message = TRUE;
1124 message_dropping = TRUE;
1125 message_running_time = self->audio_running_time_to_end_at;
1126 } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
1127 if (self->audio_eos_flag) {
1128 self->must_send_end_message = END_MESSAGE_NORMAL;
1129 send_message = TRUE;
1130 message_dropping = TRUE;
1131 message_running_time = self->audio_running_time_to_end_at;
1132 } else {
1133 self->must_send_end_message |= END_MESSAGE_VIDEO_PUSHED;
1134 }
1135 }
1136 g_mutex_unlock (&self->mutex);
1137
1138 if (send_message)
1139 gst_avwait_send_element_message (self, message_dropping,
1140 message_running_time);
1141
1142 return ret;
1143 }
1144
1145 /*
1146 * assumes sign1 and sign2 are either 1 or -1
1147 * returns 0 if sign1*num1 == sign2*num2
1148 * -1 if sign1*num1 < sign2*num2
1149 * 1 if sign1*num1 > sign2*num2
1150 */
1151 static gint
gst_avwait_compare_guint64_with_signs(gint sign1,guint64 num1,gint sign2,guint64 num2)1152 gst_avwait_compare_guint64_with_signs (gint sign1,
1153 guint64 num1, gint sign2, guint64 num2)
1154 {
1155 if (sign1 != sign2)
1156 return sign1;
1157 else if (num1 == num2)
1158 return 0;
1159 else
1160 return num1 > num2 ? sign1 : -sign1;
1161 }
1162
1163 static GstFlowReturn
gst_avwait_asink_chain(GstPad * pad,GstObject * parent,GstBuffer * inbuf)1164 gst_avwait_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
1165 {
1166 GstClockTime timestamp;
1167 GstAvWait *self = GST_AVWAIT (parent);
1168 GstClockTime current_running_time;
1169 GstClockTime video_running_time = GST_CLOCK_TIME_NONE;
1170 GstClockTime duration;
1171 GstClockTime running_time_at_end = GST_CLOCK_TIME_NONE;
1172 gint asign, vsign = 1, esign = 1;
1173 GstFlowReturn ret = GST_FLOW_OK;
1174 /* Make sure the video thread doesn't send the element message before we
1175 * actually call gst_pad_push */
1176 gboolean send_element_message = FALSE;
1177
1178 timestamp = GST_BUFFER_TIMESTAMP (inbuf);
1179 if (timestamp == GST_CLOCK_TIME_NONE) {
1180 gst_buffer_unref (inbuf);
1181 return GST_FLOW_ERROR;
1182 }
1183
1184 g_mutex_lock (&self->mutex);
1185 self->asegment.position = timestamp;
1186 asign =
1187 gst_segment_to_running_time_full (&self->asegment, GST_FORMAT_TIME,
1188 self->asegment.position, ¤t_running_time);
1189 if (asign == 0) {
1190 g_mutex_unlock (&self->mutex);
1191 gst_buffer_unref (inbuf);
1192 GST_ERROR_OBJECT (self, "Could not get current running time");
1193 return GST_FLOW_ERROR;
1194 }
1195
1196 if (self->first_audio_running_time == GST_CLOCK_TIME_NONE) {
1197 self->first_audio_running_time = current_running_time;
1198 }
1199
1200 g_cond_signal (&self->audio_cond);
1201 if (self->vsegment.format == GST_FORMAT_TIME) {
1202 vsign =
1203 gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
1204 self->vsegment.position, &video_running_time);
1205 if (vsign == 0) {
1206 video_running_time = GST_CLOCK_TIME_NONE;
1207 }
1208 }
1209
1210 duration =
1211 gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
1212 GST_SECOND, self->ainfo.rate);
1213 if (duration != GST_CLOCK_TIME_NONE) {
1214 esign =
1215 gst_segment_to_running_time_full (&self->asegment, GST_FORMAT_TIME,
1216 self->asegment.position + duration, &running_time_at_end);
1217 if (esign == 0) {
1218 g_mutex_unlock (&self->mutex);
1219 GST_ERROR_OBJECT (self, "Could not get running time at end");
1220 gst_buffer_unref (inbuf);
1221 return GST_FLOW_ERROR;
1222 }
1223 }
1224
1225 while (!(self->video_eos_flag || self->audio_flush_flag
1226 || self->shutdown_flag) &&
1227 /* Start at timecode */
1228 /* Wait if we haven't received video yet */
1229 (video_running_time == GST_CLOCK_TIME_NONE
1230 /* Wait if audio is after the video: dunno what to do */
1231 || gst_avwait_compare_guint64_with_signs (asign,
1232 running_time_at_end, vsign, video_running_time) == 1)) {
1233 GST_DEBUG_OBJECT (self,
1234 "Waiting for video: audio at %s%" GST_TIME_FORMAT ", video at %s%"
1235 GST_TIME_FORMAT, asign < 0 ? "-" : "+",
1236 GST_TIME_ARGS (running_time_at_end), vsign < 0 ? "-" : "+",
1237 GST_TIME_ARGS (video_running_time));
1238 g_cond_wait (&self->cond, &self->mutex);
1239 vsign =
1240 gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
1241 self->vsegment.position, &video_running_time);
1242 if (vsign == 0) {
1243 video_running_time = GST_CLOCK_TIME_NONE;
1244 }
1245 }
1246
1247 if (self->audio_flush_flag || self->shutdown_flag) {
1248 GST_DEBUG_OBJECT (self, "Shutting down, ignoring frame");
1249 gst_buffer_unref (inbuf);
1250 g_mutex_unlock (&self->mutex);
1251 return GST_FLOW_FLUSHING;
1252 }
1253
1254 if (self->audio_running_time_to_wait_for == GST_CLOCK_TIME_NONE
1255 /* Audio ends before start : drop */
1256 || gst_avwait_compare_guint64_with_signs (esign,
1257 running_time_at_end, 1, self->audio_running_time_to_wait_for) == -1
1258 /* Audio starts after end: drop */
1259 || current_running_time >= self->audio_running_time_to_end_at) {
1260 GST_DEBUG_OBJECT (self,
1261 "Dropped an audio buf at %" GST_TIME_FORMAT " waiting for %"
1262 GST_TIME_FORMAT " video time %" GST_TIME_FORMAT,
1263 GST_TIME_ARGS (current_running_time),
1264 GST_TIME_ARGS (self->audio_running_time_to_wait_for),
1265 GST_TIME_ARGS (video_running_time));
1266 GST_DEBUG_OBJECT (self, "Would have ended at %i %" GST_TIME_FORMAT,
1267 esign, GST_TIME_ARGS (running_time_at_end));
1268 gst_buffer_unref (inbuf);
1269 inbuf = NULL;
1270 if (current_running_time >= self->audio_running_time_to_end_at &&
1271 (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) &&
1272 !(self->must_send_end_message & END_MESSAGE_AUDIO_PUSHED)) {
1273 send_element_message = TRUE;
1274 }
1275 } else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
1276 1, self->audio_running_time_to_wait_for) >= 0
1277 && gst_avwait_compare_guint64_with_signs (esign, running_time_at_end, 1,
1278 self->audio_running_time_to_end_at) == -1) {
1279 /* Audio ends after start, but before end: clip */
1280 GstSegment asegment2 = self->asegment;
1281 guint64 start;
1282 gint ssign;
1283
1284 ssign = gst_segment_position_from_running_time_full (&asegment2,
1285 GST_FORMAT_TIME, self->audio_running_time_to_wait_for, &start);
1286 if (ssign > 0) {
1287 asegment2.start = start;
1288 } else {
1289 /* Starting before the start of the audio segment?! */
1290 /* This shouldn't happen: we already know that the current audio is
1291 * inside the segment, and that the end is after the current audio
1292 * position */
1293 GST_ELEMENT_ERROR (self, CORE, FAILED,
1294 ("Failed to clip audio: it should have started before the current segment"),
1295 NULL);
1296 }
1297
1298 inbuf =
1299 gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
1300 self->ainfo.bpf);
1301 } else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
1302 1, self->audio_running_time_to_end_at) >= 0) {
1303 /* Audio starts after start, but before end: clip from the other side */
1304 GstSegment asegment2 = self->asegment;
1305 guint64 stop;
1306 gint ssign;
1307
1308 ssign =
1309 gst_segment_position_from_running_time_full (&asegment2,
1310 GST_FORMAT_TIME, self->audio_running_time_to_end_at, &stop);
1311 if (ssign > 0) {
1312 asegment2.stop = stop;
1313 } else {
1314 /* Stopping before the start of the audio segment?! */
1315 /* This shouldn't happen: we already know that the current audio is
1316 * inside the segment, and that the end is after the current audio
1317 * position */
1318 GST_ELEMENT_ERROR (self, CORE, FAILED,
1319 ("Failed to clip audio: it should have ended before the current segment"),
1320 NULL);
1321 }
1322 inbuf =
1323 gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
1324 self->ainfo.bpf);
1325 if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
1326 send_element_message = TRUE;
1327 }
1328 } else {
1329 /* Programming error? Shouldn't happen */
1330 g_assert_not_reached ();
1331 }
1332 g_mutex_unlock (&self->mutex);
1333
1334 if (inbuf) {
1335 GstClockTime new_duration =
1336 gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
1337 GST_SECOND, self->ainfo.rate);
1338 GstClockTime new_running_time_at_end =
1339 gst_segment_to_running_time (&self->asegment, GST_FORMAT_TIME,
1340 GST_BUFFER_TIMESTAMP (inbuf) + new_duration);
1341
1342 GST_DEBUG_OBJECT (self,
1343 "Pass audio buffer %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
1344 GST_TIME_ARGS (gst_segment_to_running_time (&self->asegment,
1345 GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (inbuf))),
1346 GST_TIME_ARGS (new_running_time_at_end));
1347 ret = gst_pad_push (self->asrcpad, inbuf);
1348 }
1349
1350 if (send_element_message) {
1351 gboolean send_message = FALSE;
1352 GstClockTime audio_running_time_to_end_at;
1353
1354 g_mutex_lock (&self->mutex);
1355 if ((self->must_send_end_message & END_MESSAGE_VIDEO_PUSHED) ||
1356 self->video_eos_flag) {
1357 self->must_send_end_message = END_MESSAGE_NORMAL;
1358 send_message = TRUE;
1359 audio_running_time_to_end_at = self->audio_running_time_to_end_at;
1360 } else if (self->must_send_end_message & END_MESSAGE_STREAM_ENDED) {
1361 self->must_send_end_message |= END_MESSAGE_AUDIO_PUSHED;
1362 } else {
1363 g_assert_not_reached ();
1364 }
1365 g_mutex_unlock (&self->mutex);
1366
1367 if (send_message)
1368 gst_avwait_send_element_message (self, TRUE,
1369 audio_running_time_to_end_at);
1370 }
1371 send_element_message = FALSE;
1372 return ret;
1373 }
1374
1375 static GstIterator *
gst_avwait_iterate_internal_links(GstPad * pad,GstObject * parent)1376 gst_avwait_iterate_internal_links (GstPad * pad, GstObject * parent)
1377 {
1378 GstIterator *it = NULL;
1379 GstPad *opad;
1380 GValue val = G_VALUE_INIT;
1381 GstAvWait *self = GST_AVWAIT (parent);
1382
1383 if (self->asinkpad == pad)
1384 opad = gst_object_ref (self->asrcpad);
1385 else if (self->asrcpad == pad)
1386 opad = gst_object_ref (self->asinkpad);
1387 else if (self->vsinkpad == pad)
1388 opad = gst_object_ref (self->vsrcpad);
1389 else if (self->vsrcpad == pad)
1390 opad = gst_object_ref (self->vsinkpad);
1391 else
1392 goto out;
1393
1394 g_value_init (&val, GST_TYPE_PAD);
1395 g_value_set_object (&val, opad);
1396 it = gst_iterator_new_single (GST_TYPE_PAD, &val);
1397 g_value_unset (&val);
1398
1399 gst_object_unref (opad);
1400
1401 out:
1402 return it;
1403 }
1404