1 /* GStreamer
2 * Copyright (C) 2011 David Schleef <ds@entropywave.com>
3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
4 * Copyright (C) 2015 Florian Langlois <florian.langlois@fr.thalesgroup.com>
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
15 *
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
19 * Boston, MA 02110-1335, USA.
20 */
21 /**
22 * SECTION:element-decklinkvideosrc
23 * @short_description: Inputs Video from a BlackMagic DeckLink Device
24 *
25 * Capture Video from a BlackMagic DeckLink Device.
26 *
27 * ## Sample pipeline
28 * |[
29 * gst-launch-1.0 \
30 * decklinkvideosrc device-number=0 connection=sdi mode=1080p25 ! \
31 * autovideosink
32 * ]|
33 * Capturing 1080p25 video from the SDI-In of Card 0. Devices are numbered
34 * starting with 0.
35 *
36 * # Duplex-Mode:
37 * Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
38 * independent SDI units with two connectors each. These units can operate either
39 * in half- or in full-duplex mode.
40 *
41 * The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
42 * Cards that to not support Duplex-Modes are not influenced by the property.
43 *
44 * ## Half-Duplex-Mode (default):
45 * By default decklinkvideosrc will configure them into half-duplex mode, so that
46 * each connector acts as if it were an independent DeckLink Card which can either
47 * be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
48 * In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
49 *
50 * |[
51 * gst-launch-1.0 \
52 * decklinkvideosrc device-number=0 mode=1080p25 ! c. \
53 * decklinkvideosrc device-number=1 mode=1080p25 ! c. \
54 * decklinkvideosrc device-number=2 mode=1080p25 ! c. \
55 * decklinkvideosrc device-number=3 mode=1080p25 ! c. \
56 * compositor name=c \
57 * sink_0::xpos=0 sink_0::ypos=0 sink_0::width=960 sink_0::height=540 \
58 * sink_1::xpos=960 sink_1::ypos=0 sink_1::width=960 sink_1::height=540 \
59 * sink_2::xpos=0 sink_2::ypos=540 sink_2::width=960 sink_2::height=540 \
60 * sink_3::xpos=960 sink_3::ypos=540 sink_3::width=960 sink_3::height=540 ! \
61 * video/x-raw,width=1920,height=1080 ! \
62 * autovideosink
63 * ]|
64 * Capture 1080p25 from the first 4 units in the System (ie. the 4 Connectors of
65 * a Duo2 Card) and compose them into a 2x2 grid.
66 *
67 * |[
68 * gst-launch-1.0 \
69 * videotestsrc foreground-color=0x0000ff00 ! decklinkvideosink device-number=0 mode=1080p25 \
70 * decklinkvideosrc device-number=1 mode=1080p25 ! autovideosink \
71 * decklinkvideosrc device-number=2 mode=1080p25 ! autovideosink \
72 * videotestsrc foreground-color=0x00ff0000 ! decklinkvideosink device-number=3 mode=1080p25
73 * ]|
74 * Capture 1080p25 from the second and third unit in the System,
75 * Playout a Test-Screen with colored Snow on the first and fourth unit
76 * (ie. the Connectors 1-4 of a Duo2 unit).
77 *
78 * ## Device-Number-Mapping in Half-Duplex-Mode
79 * The device-number to connector-mapping in half-duplex-mode is as follows for the Duo2
80 * - `device-number=0` SDI1
81 * - `device-number=1` SDI3
82 * - `device-number=2` SDI2
83 * - `device-number=3` SDI4
84 *
85 * And for the Quad2
86 * - `device-number=0` SDI1
87 * - `device-number=1` SDI3
88 * - `device-number=2` SDI5
89 * - `device-number=3` SDI7
90 * - `device-number=4` SDI2
91 * - `device-number=5` SDI4
92 * - `device-number=6` SDI6
93 * - `device-number=7` SDI8
94 *
95 * ## Full-Duplex-Mode:
96 * When operating in full-duplex mode, two connectors of a unit are combined to
97 * a single device, performing extra processing with the second connection.
98 *
99 * This mode is most useful for Playout. See @decklinkvideosink.
100 * For Capturing the options are as follows:
101 *
102 * When capturing from a duplex-unit, the secondary port outputs the captured image
103 * unchanged.
104 * |[
105 * gst-launch-1.0 \
106 * decklinkvideosrc device-number=0 mode=1080p25 duplex-mode=full ! \
107 * autovideosink
108 * ]|
109 *
110 * When simultaneously capturing and playing out onto the same device, the
111 * secondary port outputs the played out video. Note, that this can also be
112 * achieved using half-duplex mode.
113 * |[
114 * gst-launch-1.0 \
115 * decklinkvideosrc device-number=0 mode=1080p25 duplex-mode=full ! \
116 * videoflip video-direction=vert ! \
117 * decklinkvideosink device-number=0 mode=1080p25 duplex-mode=full
118 * ]|
119 * Capturing Video on the primary port of device 0, output flipped version of the
120 * video on secondary port of the same device.
121 *
122 * ## Device-Number-Mapping in Full-Duplex-Mode
123 * The device-number to connector-mapping in full-duplex-mode is as follows for the Duo2
124 * - `device-number=0` SDI1 primary, SDI2 secondary
125 * - `device-number=1` SDI3 primaty, SDI4 secondary
126 *
127 * And for the Quad2
128 * - `device-number=0` SDI1 primary, SDI2 secondary
129 * - `device-number=1` SDI3 primaty, SDI4 secondary
130 * - `device-number=2` SDI5 primary, SDI6 secondary
131 * - `device-number=3` SDI7 primary, SDI8 secondary
132 */
133
134 #ifdef HAVE_CONFIG_H
135 #include "config.h"
136 #endif
137
138 #include "gstdecklinkvideosrc.h"
139 #include <string.h>
140
141 GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_src_debug);
142 #define GST_CAT_DEFAULT gst_decklink_video_src_debug
143
144 #define DEFAULT_MODE (GST_DECKLINK_MODE_AUTO)
145 #define DEFAULT_CONNECTION (GST_DECKLINK_CONNECTION_AUTO)
146 #define DEFAULT_BUFFER_SIZE (5)
147 #define DEFAULT_OUTPUT_STREAM_TIME (FALSE)
148 #define DEFAULT_SKIP_FIRST_TIME (0)
149 #define DEFAULT_DROP_NO_SIGNAL_FRAMES (FALSE)
150 #define DEFAULT_OUTPUT_CC (FALSE)
151 #define DEFAULT_OUTPUT_AFD_BAR (FALSE)
152
153 #ifndef ABSDIFF
154 #define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
155 #endif
156
157 enum
158 {
159 PROP_0,
160 PROP_MODE,
161 PROP_CONNECTION,
162 PROP_DEVICE_NUMBER,
163 PROP_BUFFER_SIZE,
164 PROP_VIDEO_FORMAT,
165 PROP_PROFILE_ID,
166 PROP_TIMECODE_FORMAT,
167 PROP_OUTPUT_STREAM_TIME,
168 PROP_SKIP_FIRST_TIME,
169 PROP_DROP_NO_SIGNAL_FRAMES,
170 PROP_SIGNAL,
171 PROP_HW_SERIAL_NUMBER,
172 PROP_OUTPUT_CC,
173 PROP_OUTPUT_AFD_BAR,
174 };
175
176 typedef struct
177 {
178 IDeckLinkVideoInputFrame *frame;
179 GstClockTime timestamp, duration;
180 GstClockTime stream_timestamp;
181 GstClockTime stream_duration;
182 GstClockTime hardware_timestamp;
183 GstClockTime hardware_duration;
184 GstDecklinkModeEnum mode;
185 BMDPixelFormat format;
186 GstVideoTimeCode *tc;
187 gboolean no_signal;
188 } CaptureFrame;
189
190 static void
capture_frame_clear(CaptureFrame * frame)191 capture_frame_clear (CaptureFrame * frame)
192 {
193 if (frame->frame)
194 frame->frame->Release ();
195 if (frame->tc)
196 gst_video_time_code_free (frame->tc);
197 memset (frame, 0, sizeof (*frame));
198 }
199
200 typedef struct
201 {
202 IDeckLinkVideoInputFrame *frame;
203 IDeckLinkInput *input;
204 } VideoFrame;
205
206 static void
video_frame_free(void * data)207 video_frame_free (void *data)
208 {
209 VideoFrame *frame = (VideoFrame *) data;
210
211 frame->frame->Release ();
212 frame->input->Release ();
213 g_free (frame);
214 }
215
216 static void gst_decklink_video_src_set_property (GObject * object,
217 guint property_id, const GValue * value, GParamSpec * pspec);
218 static void gst_decklink_video_src_get_property (GObject * object,
219 guint property_id, GValue * value, GParamSpec * pspec);
220 static void gst_decklink_video_src_finalize (GObject * object);
221
222 static GstStateChangeReturn
223 gst_decklink_video_src_change_state (GstElement * element,
224 GstStateChange transition);
225
226 static GstCaps *gst_decklink_video_src_get_caps (GstBaseSrc * bsrc,
227 GstCaps * filter);
228 static gboolean gst_decklink_video_src_query (GstBaseSrc * bsrc,
229 GstQuery * query);
230 static gboolean gst_decklink_video_src_unlock (GstBaseSrc * bsrc);
231 static gboolean gst_decklink_video_src_unlock_stop (GstBaseSrc * bsrc);
232
233 static GstFlowReturn gst_decklink_video_src_create (GstPushSrc * psrc,
234 GstBuffer ** buffer);
235
236 static gboolean gst_decklink_video_src_open (GstDecklinkVideoSrc * self);
237 static gboolean gst_decklink_video_src_close (GstDecklinkVideoSrc * self);
238
239 static gboolean gst_decklink_video_src_stop (GstDecklinkVideoSrc * self);
240
241 static void gst_decklink_video_src_start_streams (GstElement * element);
242
243 #define parent_class gst_decklink_video_src_parent_class
244 G_DEFINE_TYPE (GstDecklinkVideoSrc, gst_decklink_video_src, GST_TYPE_PUSH_SRC);
245 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (decklinkvideosrc, "decklinkvideosrc", GST_RANK_NONE,
246 GST_TYPE_DECKLINK_VIDEO_SRC, decklink_element_init (plugin));
247
248 static void
gst_decklink_video_src_class_init(GstDecklinkVideoSrcClass * klass)249 gst_decklink_video_src_class_init (GstDecklinkVideoSrcClass * klass)
250 {
251 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
252 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
253 GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
254 GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS (klass);
255 GstCaps *templ_caps;
256
257 gobject_class->set_property = gst_decklink_video_src_set_property;
258 gobject_class->get_property = gst_decklink_video_src_get_property;
259 gobject_class->finalize = gst_decklink_video_src_finalize;
260
261 element_class->change_state =
262 GST_DEBUG_FUNCPTR (gst_decklink_video_src_change_state);
263
264 basesrc_class->query = GST_DEBUG_FUNCPTR (gst_decklink_video_src_query);
265 basesrc_class->negotiate = NULL;
266 basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_decklink_video_src_get_caps);
267 basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_decklink_video_src_unlock);
268 basesrc_class->unlock_stop =
269 GST_DEBUG_FUNCPTR (gst_decklink_video_src_unlock_stop);
270
271 pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_decklink_video_src_create);
272
273 g_object_class_install_property (gobject_class, PROP_MODE,
274 g_param_spec_enum ("mode", "Playback Mode",
275 "Video Mode to use for playback",
276 GST_TYPE_DECKLINK_MODE, DEFAULT_MODE,
277 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
278 G_PARAM_CONSTRUCT)));
279
280 g_object_class_install_property (gobject_class, PROP_CONNECTION,
281 g_param_spec_enum ("connection", "Connection",
282 "Video input connection to use",
283 GST_TYPE_DECKLINK_CONNECTION, DEFAULT_CONNECTION,
284 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
285 G_PARAM_CONSTRUCT)));
286
287 g_object_class_install_property (gobject_class, PROP_DEVICE_NUMBER,
288 g_param_spec_int ("device-number", "Device number",
289 "Output device instance to use", 0, G_MAXINT, 0,
290 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
291 G_PARAM_CONSTRUCT)));
292
293 g_object_class_install_property (gobject_class, PROP_BUFFER_SIZE,
294 g_param_spec_uint ("buffer-size", "Buffer Size",
295 "Size of internal buffer in number of video frames", 1,
296 G_MAXINT, DEFAULT_BUFFER_SIZE,
297 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
298
299 g_object_class_install_property (gobject_class, PROP_VIDEO_FORMAT,
300 g_param_spec_enum ("video-format", "Video format",
301 "Video format type to use for input (Only use auto for mode=auto)",
302 GST_TYPE_DECKLINK_VIDEO_FORMAT, GST_DECKLINK_VIDEO_FORMAT_AUTO,
303 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
304 G_PARAM_CONSTRUCT)));
305
306 /**
307 * GstDecklinkVideoSrc:profile
308 *
309 * Specifies decklink profile to use.
310 *
311 * Since: 1.20
312 */
313 g_object_class_install_property (gobject_class, PROP_PROFILE_ID,
314 g_param_spec_enum ("profile", "Profile",
315 "Certain DeckLink devices such as the DeckLink 8K Pro, the DeckLink "
316 "Quad 2 and the DeckLink Duo 2 support multiple profiles to "
317 "configure the capture and playback behavior of its sub-devices."
318 "For the DeckLink Duo 2 and DeckLink Quad 2, a profile is shared "
319 "between any 2 sub-devices that utilize the same connectors. For the "
320 "DeckLink 8K Pro, a profile is shared between all 4 sub-devices. Any "
321 "sub-devices that share a profile are considered to be part of the "
322 "same profile group."
323 "DeckLink Duo 2 support configuration of the duplex mode of "
324 "individual sub-devices.",
325 GST_TYPE_DECKLINK_PROFILE_ID, GST_DECKLINK_PROFILE_ID_DEFAULT,
326 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
327 G_PARAM_CONSTRUCT)));
328
329 g_object_class_install_property (gobject_class, PROP_TIMECODE_FORMAT,
330 g_param_spec_enum ("timecode-format", "Timecode format",
331 "Timecode format type to use for input",
332 GST_TYPE_DECKLINK_TIMECODE_FORMAT,
333 GST_DECKLINK_TIMECODE_FORMAT_RP188ANY,
334 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
335 G_PARAM_CONSTRUCT)));
336
337 g_object_class_install_property (gobject_class, PROP_OUTPUT_STREAM_TIME,
338 g_param_spec_boolean ("output-stream-time", "Output Stream Time",
339 "Output stream time directly instead of translating to pipeline clock",
340 DEFAULT_OUTPUT_STREAM_TIME,
341 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
342
343 g_object_class_install_property (gobject_class, PROP_SKIP_FIRST_TIME,
344 g_param_spec_uint64 ("skip-first-time", "Skip First Time",
345 "Skip that much time of initial frames after starting", 0,
346 G_MAXUINT64, DEFAULT_SKIP_FIRST_TIME,
347 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
348
349 g_object_class_install_property (gobject_class, PROP_DROP_NO_SIGNAL_FRAMES,
350 g_param_spec_boolean ("drop-no-signal-frames", "Drop No Signal Frames",
351 "Drop frames that are marked as having no input signal",
352 DEFAULT_DROP_NO_SIGNAL_FRAMES,
353 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
354
355 g_object_class_install_property (gobject_class, PROP_SIGNAL,
356 g_param_spec_boolean ("signal", "Input signal available",
357 "True if there is a valid input signal available",
358 FALSE, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
359
360 g_object_class_install_property (gobject_class, PROP_HW_SERIAL_NUMBER,
361 g_param_spec_string ("hw-serial-number", "Hardware serial number",
362 "The serial number (hardware ID) of the Decklink card",
363 NULL, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
364
365 g_object_class_install_property (gobject_class, PROP_OUTPUT_CC,
366 g_param_spec_boolean ("output-cc", "Output Closed Caption",
367 "Extract and output CC as GstMeta (if present)",
368 DEFAULT_OUTPUT_CC,
369 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
370
371 g_object_class_install_property (gobject_class, PROP_OUTPUT_AFD_BAR,
372 g_param_spec_boolean ("output-afd-bar", "Output AFD/Bar data",
373 "Extract and output AFD/Bar as GstMeta (if present)",
374 DEFAULT_OUTPUT_AFD_BAR,
375 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
376
377 templ_caps = gst_decklink_mode_get_template_caps (TRUE);
378 gst_element_class_add_pad_template (element_class,
379 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, templ_caps));
380 gst_caps_unref (templ_caps);
381
382 gst_element_class_set_static_metadata (element_class, "Decklink Video Source",
383 "Video/Source/Hardware", "Decklink Source",
384 "David Schleef <ds@entropywave.com>, "
385 "Sebastian Dröge <sebastian@centricular.com>");
386
387 GST_DEBUG_CATEGORY_INIT (gst_decklink_video_src_debug, "decklinkvideosrc",
388 0, "debug category for decklinkvideosrc element");
389 }
390
391 static void
gst_decklink_video_src_init(GstDecklinkVideoSrc * self)392 gst_decklink_video_src_init (GstDecklinkVideoSrc * self)
393 {
394 self->mode = DEFAULT_MODE;
395 self->caps_mode = GST_DECKLINK_MODE_AUTO;
396 self->caps_format = bmdFormat8BitYUV;
397 self->connection = DEFAULT_CONNECTION;
398 self->device_number = 0;
399 self->buffer_size = DEFAULT_BUFFER_SIZE;
400 self->video_format = GST_DECKLINK_VIDEO_FORMAT_AUTO;
401 self->profile_id = GST_DECKLINK_PROFILE_ID_DEFAULT;
402 self->timecode_format = bmdTimecodeRP188Any;
403 self->signal_state = SIGNAL_STATE_UNKNOWN;
404 self->output_stream_time = DEFAULT_OUTPUT_STREAM_TIME;
405 self->skip_first_time = DEFAULT_SKIP_FIRST_TIME;
406 self->drop_no_signal_frames = DEFAULT_DROP_NO_SIGNAL_FRAMES;
407 self->output_cc = DEFAULT_OUTPUT_CC;
408 self->output_afd_bar = DEFAULT_OUTPUT_AFD_BAR;
409
410 self->window_size = 64;
411 self->times = g_new (GstClockTime, 4 * self->window_size);
412 self->times_temp = self->times + 2 * self->window_size;
413 self->window_fill = 0;
414 self->window_skip = 1;
415 self->window_skip_count = 0;
416 self->skipped_last = 0;
417 self->skip_from_timestamp = GST_CLOCK_TIME_NONE;
418 self->skip_to_timestamp = GST_CLOCK_TIME_NONE;
419
420 gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
421 gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
422
423 gst_pad_use_fixed_caps (GST_BASE_SRC_PAD (self));
424
425 g_mutex_init (&self->lock);
426 g_cond_init (&self->cond);
427
428 self->current_frames =
429 gst_queue_array_new_for_struct (sizeof (CaptureFrame),
430 DEFAULT_BUFFER_SIZE);
431 }
432
433 void
gst_decklink_video_src_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)434 gst_decklink_video_src_set_property (GObject * object, guint property_id,
435 const GValue * value, GParamSpec * pspec)
436 {
437 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (object);
438
439 switch (property_id) {
440 case PROP_MODE:
441 self->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
442 /* setting the default value for caps_mode here: if mode==auto then we
443 * configure caps_mode from the caps, if mode!=auto we set caps_mode to
444 * the same value as the mode. so self->caps_mode is essentially
445 * self->mode with mode=auto filtered into whatever we got from the
446 * negotiation */
447 if (self->mode != GST_DECKLINK_MODE_AUTO)
448 self->caps_mode = self->mode;
449 break;
450 case PROP_CONNECTION:
451 self->connection = (GstDecklinkConnectionEnum) g_value_get_enum (value);
452 break;
453 case PROP_DEVICE_NUMBER:
454 self->device_number = g_value_get_int (value);
455 break;
456 case PROP_BUFFER_SIZE:
457 self->buffer_size = g_value_get_uint (value);
458 break;
459 case PROP_VIDEO_FORMAT:
460 self->video_format = (GstDecklinkVideoFormat) g_value_get_enum (value);
461 switch (self->video_format) {
462 case GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV:
463 case GST_DECKLINK_VIDEO_FORMAT_10BIT_YUV:
464 case GST_DECKLINK_VIDEO_FORMAT_8BIT_ARGB:
465 case GST_DECKLINK_VIDEO_FORMAT_8BIT_BGRA:
466 self->caps_format =
467 gst_decklink_pixel_format_from_type (self->video_format);
468 case GST_DECKLINK_VIDEO_FORMAT_AUTO:
469 break;
470 default:
471 GST_ELEMENT_WARNING (GST_ELEMENT (self), CORE, NOT_IMPLEMENTED,
472 ("Format %d not supported", self->video_format), (NULL));
473 break;
474 }
475 break;
476 case PROP_PROFILE_ID:
477 self->profile_id = (GstDecklinkProfileId) g_value_get_enum (value);
478 break;
479 case PROP_TIMECODE_FORMAT:
480 self->timecode_format =
481 gst_decklink_timecode_format_from_enum ((GstDecklinkTimecodeFormat)
482 g_value_get_enum (value));
483 break;
484 case PROP_OUTPUT_STREAM_TIME:
485 self->output_stream_time = g_value_get_boolean (value);
486 break;
487 case PROP_SKIP_FIRST_TIME:
488 self->skip_first_time = g_value_get_uint64 (value);
489 break;
490 case PROP_DROP_NO_SIGNAL_FRAMES:
491 self->drop_no_signal_frames = g_value_get_boolean (value);
492 break;
493 case PROP_OUTPUT_CC:
494 self->output_cc = g_value_get_boolean (value);
495 break;
496 case PROP_OUTPUT_AFD_BAR:
497 self->output_afd_bar = g_value_get_boolean (value);
498 break;
499 default:
500 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
501 break;
502 }
503 }
504
505 void
gst_decklink_video_src_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)506 gst_decklink_video_src_get_property (GObject * object, guint property_id,
507 GValue * value, GParamSpec * pspec)
508 {
509 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (object);
510
511 switch (property_id) {
512 case PROP_MODE:
513 g_value_set_enum (value, self->mode);
514 break;
515 case PROP_CONNECTION:
516 g_value_set_enum (value, self->connection);
517 break;
518 case PROP_DEVICE_NUMBER:
519 g_value_set_int (value, self->device_number);
520 break;
521 case PROP_BUFFER_SIZE:
522 g_value_set_uint (value, self->buffer_size);
523 break;
524 case PROP_VIDEO_FORMAT:
525 g_value_set_enum (value, self->video_format);
526 break;
527 case PROP_PROFILE_ID:
528 g_value_set_enum (value, self->profile_id);
529 break;
530 case PROP_TIMECODE_FORMAT:
531 g_value_set_enum (value,
532 gst_decklink_timecode_format_to_enum (self->timecode_format));
533 break;
534 case PROP_OUTPUT_STREAM_TIME:
535 g_value_set_boolean (value, self->output_stream_time);
536 break;
537 case PROP_SKIP_FIRST_TIME:
538 g_value_set_uint64 (value, self->skip_first_time);
539 break;
540 case PROP_DROP_NO_SIGNAL_FRAMES:
541 g_value_set_boolean (value, self->drop_no_signal_frames);
542 break;
543 case PROP_SIGNAL:
544 g_value_set_boolean (value, self->signal_state == SIGNAL_STATE_AVAILABLE);
545 break;
546 case PROP_HW_SERIAL_NUMBER:
547 if (self->input)
548 g_value_set_string (value, self->input->hw_serial_number);
549 else
550 g_value_set_string (value, NULL);
551 break;
552 case PROP_OUTPUT_CC:
553 g_value_set_boolean (value, self->output_cc);
554 break;
555 case PROP_OUTPUT_AFD_BAR:
556 g_value_set_boolean (value, self->output_afd_bar);
557 break;
558 default:
559 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
560 break;
561 }
562 }
563
564 void
gst_decklink_video_src_finalize(GObject * object)565 gst_decklink_video_src_finalize (GObject * object)
566 {
567 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (object);
568
569 g_free (self->times);
570 self->times = NULL;
571 g_mutex_clear (&self->lock);
572 g_cond_clear (&self->cond);
573
574 if (self->current_frames) {
575 while (gst_queue_array_get_length (self->current_frames) > 0) {
576 CaptureFrame *tmp = (CaptureFrame *)
577 gst_queue_array_pop_head_struct (self->current_frames);
578 capture_frame_clear (tmp);
579 }
580 gst_queue_array_free (self->current_frames);
581 self->current_frames = NULL;
582 }
583
584 G_OBJECT_CLASS (parent_class)->finalize (object);
585 }
586
587 static gboolean
gst_decklink_video_src_start(GstDecklinkVideoSrc * self)588 gst_decklink_video_src_start (GstDecklinkVideoSrc * self)
589 {
590 const GstDecklinkMode *mode;
591 BMDVideoInputFlags flags;
592 HRESULT ret;
593 BMDPixelFormat format;
594
595 g_mutex_lock (&self->input->lock);
596 if (self->input->video_enabled) {
597 g_mutex_unlock (&self->input->lock);
598 return TRUE;
599 }
600 g_mutex_unlock (&self->input->lock);
601
602 if (self->input->config && self->connection != GST_DECKLINK_CONNECTION_AUTO) {
603 ret = self->input->config->SetInt (bmdDeckLinkConfigVideoInputConnection,
604 gst_decklink_get_connection (self->connection));
605 if (ret != S_OK) {
606 GST_ERROR_OBJECT (self,
607 "Failed to set configuration (input source): 0x%08lx",
608 (unsigned long) ret);
609 return FALSE;
610 }
611
612 if (self->connection == GST_DECKLINK_CONNECTION_COMPOSITE) {
613 ret = self->input->config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags,
614 bmdAnalogVideoFlagCompositeSetup75);
615 if (ret != S_OK) {
616 GST_ERROR_OBJECT (self,
617 "Failed to set configuration (composite setup): 0x%08lx",
618 (unsigned long) ret);
619 return FALSE;
620 }
621 }
622 }
623
624 flags = bmdVideoInputFlagDefault;
625 if (self->mode == GST_DECKLINK_MODE_AUTO) {
626 bool autoDetection = false;
627
628 if (self->input->attributes) {
629 ret =
630 self->input->
631 attributes->GetFlag (BMDDeckLinkSupportsInputFormatDetection,
632 &autoDetection);
633 if (ret != S_OK) {
634 GST_ERROR_OBJECT (self,
635 "Failed to get attribute (autodetection): 0x%08lx",
636 (unsigned long) ret);
637 return FALSE;
638 }
639 if (autoDetection)
640 flags |= bmdVideoInputEnableFormatDetection;
641 }
642 if (!autoDetection) {
643 GST_ERROR_OBJECT (self, "Failed to activate auto-detection");
644 return FALSE;
645 }
646 }
647
648 mode = gst_decklink_get_mode (self->mode);
649 g_assert (mode != NULL);
650
651 format = self->caps_format;
652 ret = self->input->input->EnableVideoInput (mode->mode, format, flags);
653 if (ret != S_OK) {
654 GST_WARNING_OBJECT (self, "Failed to enable video input: 0x%08lx",
655 (unsigned long) ret);
656 return FALSE;
657 }
658
659 g_mutex_lock (&self->input->lock);
660 self->input->mode = mode;
661 self->input->video_enabled = TRUE;
662 if (self->input->start_streams)
663 self->input->start_streams (self->input->videosrc);
664 g_mutex_unlock (&self->input->lock);
665
666 self->skipped_last = 0;
667 self->skip_from_timestamp = GST_CLOCK_TIME_NONE;
668 self->skip_to_timestamp = GST_CLOCK_TIME_NONE;
669 self->aspect_ratio_flag = -1;
670
671 return TRUE;
672 }
673
674 static void
gst_decklink_video_src_update_time_mapping(GstDecklinkVideoSrc * self,GstClockTime capture_time,GstClockTime stream_time)675 gst_decklink_video_src_update_time_mapping (GstDecklinkVideoSrc * self,
676 GstClockTime capture_time, GstClockTime stream_time)
677 {
678 if (self->window_skip_count == 0) {
679 GstClockTime num, den, b, xbase;
680 gdouble r_squared;
681
682 self->times[2 * self->window_fill] = stream_time;
683 self->times[2 * self->window_fill + 1] = capture_time;
684
685 self->window_fill++;
686 self->window_skip_count++;
687 if (self->window_skip_count >= self->window_skip)
688 self->window_skip_count = 0;
689
690 if (self->window_fill >= self->window_size) {
691 guint fps =
692 ((gdouble) self->info.fps_n + self->info.fps_d -
693 1) / ((gdouble) self->info.fps_d);
694
695 /* Start by updating first every frame, once full every second frame,
696 * etc. until we update once every 4 seconds */
697 if (self->window_skip < 4 * fps)
698 self->window_skip *= 2;
699 if (self->window_skip >= 4 * fps)
700 self->window_skip = 4 * fps;
701
702 self->window_fill = 0;
703 self->window_filled = TRUE;
704 }
705
706 /* First sample ever, create some basic mapping to start */
707 if (!self->window_filled && self->window_fill == 1) {
708 self->current_time_mapping.xbase = stream_time;
709 self->current_time_mapping.b = capture_time;
710 self->current_time_mapping.num = 1;
711 self->current_time_mapping.den = 1;
712 self->next_time_mapping_pending = FALSE;
713 }
714
715 /* Only bother calculating anything here once we had enough measurements,
716 * i.e. let's take the window size as a start */
717 if (self->window_filled &&
718 gst_calculate_linear_regression (self->times, self->times_temp,
719 self->window_size, &num, &den, &b, &xbase, &r_squared)) {
720
721 GST_DEBUG_OBJECT (self,
722 "Calculated new time mapping: pipeline time = %lf * (stream time - %"
723 G_GUINT64_FORMAT ") + %" G_GUINT64_FORMAT " (%lf)",
724 ((gdouble) num) / ((gdouble) den), xbase, b, r_squared);
725
726 self->next_time_mapping.xbase = xbase;
727 self->next_time_mapping.b = b;
728 self->next_time_mapping.num = num;
729 self->next_time_mapping.den = den;
730 self->next_time_mapping_pending = TRUE;
731 }
732 } else {
733 self->window_skip_count++;
734 if (self->window_skip_count >= self->window_skip)
735 self->window_skip_count = 0;
736 }
737
738 if (self->next_time_mapping_pending) {
739 GstClockTime expected, new_calculated, diff, max_diff;
740
741 expected =
742 gst_clock_adjust_with_calibration (NULL, stream_time,
743 self->current_time_mapping.xbase, self->current_time_mapping.b,
744 self->current_time_mapping.num, self->current_time_mapping.den);
745 new_calculated =
746 gst_clock_adjust_with_calibration (NULL, stream_time,
747 self->next_time_mapping.xbase, self->next_time_mapping.b,
748 self->next_time_mapping.num, self->next_time_mapping.den);
749
750 if (new_calculated > expected)
751 diff = new_calculated - expected;
752 else
753 diff = expected - new_calculated;
754
755 /* At most 5% frame duration change per update */
756 max_diff =
757 gst_util_uint64_scale (GST_SECOND / 20, self->info.fps_d,
758 self->info.fps_n);
759
760 GST_DEBUG_OBJECT (self,
761 "New time mapping causes difference of %" GST_TIME_FORMAT,
762 GST_TIME_ARGS (diff));
763 GST_DEBUG_OBJECT (self, "Maximum allowed per frame %" GST_TIME_FORMAT,
764 GST_TIME_ARGS (max_diff));
765
766 if (diff > max_diff) {
767 /* adjust so that we move that much closer */
768 if (new_calculated > expected) {
769 self->current_time_mapping.b = expected + max_diff;
770 self->current_time_mapping.xbase = stream_time;
771 } else {
772 self->current_time_mapping.b = expected - max_diff;
773 self->current_time_mapping.xbase = stream_time;
774 }
775 } else {
776 self->current_time_mapping.xbase = self->next_time_mapping.xbase;
777 self->current_time_mapping.b = self->next_time_mapping.b;
778 self->current_time_mapping.num = self->next_time_mapping.num;
779 self->current_time_mapping.den = self->next_time_mapping.den;
780 self->next_time_mapping_pending = FALSE;
781 }
782 }
783 }
784
785 static void
gst_decklink_video_src_got_frame(GstElement * element,IDeckLinkVideoInputFrame * frame,GstDecklinkModeEnum mode,GstClockTime capture_time,GstClockTime stream_time,GstClockTime stream_duration,GstClockTime hardware_time,GstClockTime hardware_duration,IDeckLinkTimecode * dtc,gboolean no_signal)786 gst_decklink_video_src_got_frame (GstElement * element,
787 IDeckLinkVideoInputFrame * frame, GstDecklinkModeEnum mode,
788 GstClockTime capture_time, GstClockTime stream_time,
789 GstClockTime stream_duration, GstClockTime hardware_time,
790 GstClockTime hardware_duration, IDeckLinkTimecode * dtc, gboolean no_signal)
791 {
792 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
793 GstClockTime timestamp, duration;
794
795 GST_LOG_OBJECT (self,
796 "Got video frame at %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT " (%"
797 GST_TIME_FORMAT "), no signal: %d", GST_TIME_ARGS (capture_time),
798 GST_TIME_ARGS (stream_time), GST_TIME_ARGS (stream_duration), no_signal);
799
800 g_mutex_lock (&self->lock);
801 if (self->first_time == GST_CLOCK_TIME_NONE)
802 self->first_time = stream_time;
803
804 if (self->skip_first_time > 0
805 && stream_time - self->first_time < self->skip_first_time) {
806 g_mutex_unlock (&self->lock);
807 GST_DEBUG_OBJECT (self,
808 "Skipping frame as requested: %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT,
809 GST_TIME_ARGS (stream_time),
810 GST_TIME_ARGS (self->skip_first_time + self->first_time));
811 return;
812 }
813
814 if (self->drop_no_signal_frames && no_signal) {
815 CaptureFrame f;
816 memset (&f, 0, sizeof (f));
817
818 /* Notify the streaming thread about the signal loss */
819 gst_queue_array_push_tail_struct (self->current_frames, &f);
820 g_cond_signal (&self->cond);
821 g_mutex_unlock (&self->lock);
822
823 return;
824 }
825
826 gst_decklink_video_src_update_time_mapping (self, capture_time, stream_time);
827 if (self->output_stream_time) {
828 timestamp = stream_time;
829 duration = stream_duration;
830 } else {
831 timestamp =
832 gst_clock_adjust_with_calibration (NULL, stream_time,
833 self->current_time_mapping.xbase, self->current_time_mapping.b,
834 self->current_time_mapping.num, self->current_time_mapping.den);
835 duration =
836 gst_util_uint64_scale (stream_duration, self->current_time_mapping.num,
837 self->current_time_mapping.den);
838 }
839
840 GST_LOG_OBJECT (self,
841 "Converted times to %" GST_TIME_FORMAT " (%"
842 GST_TIME_FORMAT ")", GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
843
844 if (!self->flushing) {
845 CaptureFrame f;
846 const GstDecklinkMode *bmode;
847 GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE;
848 guint field_count = 0;
849 guint skipped_frames = 0;
850
851 while (gst_queue_array_get_length (self->current_frames) >=
852 self->buffer_size) {
853 CaptureFrame *tmp = (CaptureFrame *)
854 gst_queue_array_pop_head_struct (self->current_frames);
855 if (tmp->frame) {
856 if (skipped_frames == 0 && self->skipped_last == 0)
857 self->skip_from_timestamp = tmp->timestamp;
858 skipped_frames++;
859 self->skip_to_timestamp = tmp->timestamp;
860 }
861 capture_frame_clear (tmp);
862 }
863
864 if (self->skipped_last == 0 && skipped_frames > 0) {
865 GST_WARNING_OBJECT (self, "Starting to drop frames");
866 }
867
868 if (skipped_frames == 0 && self->skipped_last > 0) {
869 GST_ELEMENT_WARNING_WITH_DETAILS (self,
870 STREAM, FAILED,
871 ("Dropped %u old frames from %" GST_TIME_FORMAT " to %"
872 GST_TIME_FORMAT, self->skipped_last,
873 GST_TIME_ARGS (self->skip_from_timestamp),
874 GST_TIME_ARGS (self->skip_to_timestamp)),
875 (NULL),
876 ("dropped", G_TYPE_UINT, self->skipped_last,
877 "from", G_TYPE_UINT64, self->skip_from_timestamp,
878 "to", G_TYPE_UINT64, self->skip_to_timestamp, NULL));
879 self->skipped_last = 0;
880 }
881
882 self->skipped_last += skipped_frames;
883
884 memset (&f, 0, sizeof (f));
885 f.frame = frame;
886 f.timestamp = timestamp;
887 f.duration = duration;
888 f.stream_timestamp = stream_time;
889 f.stream_duration = stream_duration;
890 f.hardware_timestamp = hardware_time;
891 f.hardware_duration = hardware_duration;
892 f.mode = mode;
893 f.format = frame->GetPixelFormat ();
894 f.no_signal = no_signal;
895 if (dtc != NULL) {
896 uint8_t hours, minutes, seconds, frames;
897 HRESULT res;
898
899 res = dtc->GetComponents (&hours, &minutes, &seconds, &frames);
900 if (res != S_OK) {
901 GST_ERROR ("Could not get components for timecode %p: 0x%08lx", dtc,
902 (unsigned long) res);
903 f.tc = NULL;
904 } else {
905 GST_DEBUG_OBJECT (self, "Got timecode %02d:%02d:%02d:%02d",
906 hours, minutes, seconds, frames);
907 bmode = gst_decklink_get_mode (mode);
908 if (bmode->interlaced)
909 flags =
910 (GstVideoTimeCodeFlags) (flags |
911 GST_VIDEO_TIME_CODE_FLAGS_INTERLACED);
912 if (bmode->fps_d == 1001) {
913 if (bmode->fps_n == 30000 || bmode->fps_n == 60000) {
914 /* Some occurrences have been spotted where the driver mistakenly
915 * fails to set the drop-frame flag for drop-frame timecodes.
916 * Assume always drop-frame for 29.97 and 59.94 FPS */
917 flags =
918 (GstVideoTimeCodeFlags) (flags |
919 GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME);
920 } else {
921 /* Drop-frame isn't defined for any other framerates (e.g. 23.976)
922 * */
923 flags =
924 (GstVideoTimeCodeFlags) (flags &
925 ~GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME);
926 }
927 }
928 f.tc =
929 gst_video_time_code_new (bmode->fps_n, bmode->fps_d, NULL, flags,
930 hours, minutes, seconds, frames, field_count);
931 }
932 dtc->Release ();
933 } else {
934 f.tc = NULL;
935 }
936
937 frame->AddRef ();
938 gst_queue_array_push_tail_struct (self->current_frames, &f);
939 g_cond_signal (&self->cond);
940 }
941 g_mutex_unlock (&self->lock);
942 }
943
944 static void
extract_vbi_line(GstDecklinkVideoSrc * self,GstBuffer ** buffer,IDeckLinkVideoFrameAncillary * vanc_frame,guint field2_offset,guint line,gboolean * found_cc_out,gboolean * found_afd_bar_out)945 extract_vbi_line (GstDecklinkVideoSrc * self, GstBuffer ** buffer,
946 IDeckLinkVideoFrameAncillary * vanc_frame, guint field2_offset, guint line,
947 gboolean * found_cc_out, gboolean * found_afd_bar_out)
948 {
949 GstVideoAncillary gstanc;
950 const guint8 *vancdata;
951 gboolean found_cc = FALSE, found_afd_bar = FALSE;
952
953 if (vanc_frame->GetBufferForVerticalBlankingLine (field2_offset + line,
954 (void **) &vancdata) != S_OK)
955 return;
956
957 GST_DEBUG_OBJECT (self, "Checking for VBI data on field line %u (field %u)",
958 field2_offset + line, field2_offset ? 2 : 1);
959 gst_video_vbi_parser_add_line (self->vbiparser, vancdata);
960
961 /* Check if CC or AFD/Bar is on this line if we didn't find any on a
962 * previous line. Remember the line where we found them */
963
964 while (gst_video_vbi_parser_get_ancillary (self->vbiparser,
965 &gstanc) == GST_VIDEO_VBI_PARSER_RESULT_OK) {
966 switch (GST_VIDEO_ANCILLARY_DID16 (&gstanc)) {
967 case GST_VIDEO_ANCILLARY_DID16_S334_EIA_708:
968 if (*found_cc_out || !self->output_cc)
969 continue;
970
971 GST_DEBUG_OBJECT (self,
972 "Adding CEA-708 CDP meta to buffer for line %u",
973 field2_offset + line);
974 GST_MEMDUMP_OBJECT (self, "CDP", gstanc.data, gstanc.data_count);
975 gst_buffer_add_video_caption_meta (*buffer,
976 GST_VIDEO_CAPTION_TYPE_CEA708_CDP, gstanc.data, gstanc.data_count);
977
978 found_cc = TRUE;
979 if (field2_offset)
980 self->last_cc_vbi_line_field2 = line;
981 else
982 self->last_cc_vbi_line = line;
983 break;
984 case GST_VIDEO_ANCILLARY_DID16_S334_EIA_608:
985 if (*found_cc_out || !self->output_cc)
986 continue;
987
988 GST_DEBUG_OBJECT (self,
989 "Adding CEA-608 meta to buffer for line %u", field2_offset + line);
990 GST_MEMDUMP_OBJECT (self, "CEA608", gstanc.data, gstanc.data_count);
991 gst_buffer_add_video_caption_meta (*buffer,
992 GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, gstanc.data,
993 gstanc.data_count);
994
995 found_cc = TRUE;
996 if (field2_offset)
997 self->last_cc_vbi_line_field2 = line;
998 else
999 self->last_cc_vbi_line = line;
1000 break;
1001 case GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR:{
1002 GstVideoAFDValue afd;
1003 gboolean is_letterbox;
1004 guint16 bar1, bar2;
1005
1006 if (*found_afd_bar_out || !self->output_afd_bar)
1007 continue;
1008
1009 GST_DEBUG_OBJECT (self,
1010 "Adding AFD/Bar meta to buffer for line %u", field2_offset + line);
1011 GST_MEMDUMP_OBJECT (self, "AFD/Bar", gstanc.data, gstanc.data_count);
1012
1013 if (gstanc.data_count < 8) {
1014 GST_WARNING_OBJECT (self, "AFD/Bar data too small");
1015 continue;
1016 }
1017
1018 self->aspect_ratio_flag = (gstanc.data[0] >> 2) & 0x1;
1019
1020 afd = (GstVideoAFDValue) ((gstanc.data[0] >> 3) & 0xf);
1021 is_letterbox = ((gstanc.data[3] >> 4) & 0x3) == 0;
1022 bar1 = GST_READ_UINT16_BE (&gstanc.data[4]);
1023 bar2 = GST_READ_UINT16_BE (&gstanc.data[6]);
1024
1025 gst_buffer_add_video_afd_meta (*buffer, field2_offset ? 1 : 0,
1026 GST_VIDEO_AFD_SPEC_SMPTE_ST2016_1, afd);
1027 gst_buffer_add_video_bar_meta (*buffer, field2_offset ? 1 : 0,
1028 is_letterbox, bar1, bar2);
1029
1030 found_afd_bar = TRUE;
1031 if (field2_offset)
1032 self->last_afd_bar_vbi_line_field2 = line;
1033 else
1034 self->last_afd_bar_vbi_line = line;
1035 break;
1036 }
1037 default:
1038 /* otherwise continue looking */
1039 continue;
1040 }
1041 }
1042
1043 if (found_cc)
1044 *found_cc_out = TRUE;
1045 if (found_afd_bar)
1046 *found_afd_bar_out = TRUE;
1047 }
1048
1049 static void
extract_vbi(GstDecklinkVideoSrc * self,GstBuffer ** buffer,VideoFrame * vf)1050 extract_vbi (GstDecklinkVideoSrc * self, GstBuffer ** buffer, VideoFrame * vf)
1051 {
1052 IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
1053 gint line;
1054 GstVideoFormat videoformat;
1055 GstDecklinkModeEnum mode_enum;
1056 const GstDecklinkMode *mode;
1057 gboolean found_cc = FALSE, found_afd_bar = FALSE;
1058
1059 if (vf->frame->GetAncillaryData (&vanc_frame) != S_OK)
1060 return;
1061
1062 videoformat =
1063 gst_decklink_video_format_from_type (vanc_frame->GetPixelFormat ());
1064 mode_enum =
1065 gst_decklink_get_mode_enum_from_bmd (vanc_frame->GetDisplayMode ());
1066 mode = gst_decklink_get_mode (mode_enum);
1067
1068 if (videoformat == GST_VIDEO_FORMAT_UNKNOWN) {
1069 GST_DEBUG_OBJECT (self, "Unknown video format for Ancillary data");
1070 vanc_frame->Release ();
1071 return;
1072 }
1073
1074 if ((videoformat != self->anc_vformat || mode->width != self->anc_width)
1075 && self->vbiparser) {
1076 gst_video_vbi_parser_free (self->vbiparser);
1077 self->vbiparser = NULL;
1078 }
1079
1080 if (self->vbiparser == NULL) {
1081 self->vbiparser = gst_video_vbi_parser_new (videoformat, mode->width);
1082 self->anc_vformat = videoformat;
1083 self->anc_width = mode->width;
1084 }
1085
1086 GST_DEBUG_OBJECT (self, "Checking for ancillary data in VBI");
1087
1088 /* First check last known lines, if any */
1089 if (self->last_cc_vbi_line > 0) {
1090 extract_vbi_line (self, buffer, vanc_frame, 0, self->last_cc_vbi_line,
1091 &found_cc, &found_afd_bar);
1092 }
1093 if (self->last_afd_bar_vbi_line > 0
1094 && self->last_cc_vbi_line != self->last_afd_bar_vbi_line) {
1095 extract_vbi_line (self, buffer, vanc_frame, 0, self->last_afd_bar_vbi_line,
1096 &found_cc, &found_afd_bar);
1097 }
1098
1099 if (!found_cc)
1100 self->last_cc_vbi_line = -1;
1101 if (!found_afd_bar)
1102 self->last_afd_bar_vbi_line = -1;
1103
1104 if ((self->output_cc && !found_cc) || (self->output_afd_bar
1105 && !found_afd_bar)) {
1106 /* Otherwise loop through the first 21 lines and hope to find the data */
1107 /* FIXME: For the different formats the number of lines that can contain
1108 * VANC are different */
1109 for (line = 1; line < 22; line++) {
1110 extract_vbi_line (self, buffer, vanc_frame, 0, line, &found_cc,
1111 &found_afd_bar);
1112
1113 /* If we found everything we wanted to extract, stop here */
1114 if ((!self->output_cc || found_cc) &&
1115 (!self->output_afd_bar || found_afd_bar))
1116 break;
1117 }
1118 }
1119
1120 /* Do the same for field 2 in case of interlaced content */
1121 if (GST_VIDEO_INFO_IS_INTERLACED (&self->info)) {
1122 gboolean found_cc_field2 = FALSE, found_afd_bar_field2 = FALSE;
1123 guint field2_offset = 0;
1124
1125 /* The VANC lines for the second field are at an offset, depending on
1126 * the format in use
1127 */
1128 switch (self->info.height) {
1129 case 486:
1130 /* NTSC: 525 / 2 + 1 */
1131 field2_offset = 263;
1132 break;
1133 case 576:
1134 /* PAL: 625 / 2 + 1 */
1135 field2_offset = 313;
1136 break;
1137 case 1080:
1138 /* 1080i: 1125 / 2 + 1 */
1139 field2_offset = 563;
1140 break;
1141 default:
1142 g_assert_not_reached ();
1143 }
1144
1145 /* First try the same lines as for field 1 if we don't know yet */
1146 if (self->last_cc_vbi_line_field2 <= 0)
1147 self->last_cc_vbi_line_field2 = self->last_cc_vbi_line;
1148 if (self->last_afd_bar_vbi_line_field2 <= 0)
1149 self->last_afd_bar_vbi_line_field2 = self->last_afd_bar_vbi_line;
1150
1151 if (self->last_cc_vbi_line_field2 > 0) {
1152 extract_vbi_line (self, buffer, vanc_frame, field2_offset,
1153 self->last_cc_vbi_line_field2, &found_cc_field2,
1154 &found_afd_bar_field2);
1155 }
1156 if (self->last_afd_bar_vbi_line_field2 > 0
1157 && self->last_cc_vbi_line_field2 !=
1158 self->last_afd_bar_vbi_line_field2) {
1159 extract_vbi_line (self, buffer, vanc_frame, field2_offset,
1160 self->last_afd_bar_vbi_line_field2, &found_cc_field2,
1161 &found_afd_bar_field2);
1162 }
1163
1164 if (!found_cc_field2)
1165 self->last_cc_vbi_line_field2 = -1;
1166 if (!found_afd_bar_field2)
1167 self->last_afd_bar_vbi_line_field2 = -1;
1168
1169 if (((self->output_cc && !found_cc_field2) || (self->output_afd_bar
1170 && !found_afd_bar_field2))) {
1171 for (line = 1; line < 22; line++) {
1172 extract_vbi_line (self, buffer, vanc_frame, field2_offset, line,
1173 &found_cc_field2, &found_afd_bar_field2);
1174
1175 /* If we found everything we wanted to extract, stop here */
1176 if ((!self->output_cc || found_cc_field2) &&
1177 (!self->output_afd_bar || found_afd_bar_field2))
1178 break;
1179 }
1180 }
1181 }
1182
1183 vanc_frame->Release ();
1184 }
1185
1186 static GstFlowReturn
gst_decklink_video_src_create(GstPushSrc * bsrc,GstBuffer ** buffer)1187 gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer)
1188 {
1189 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1190 GstFlowReturn flow_ret = GST_FLOW_OK;
1191 const guint8 *data;
1192 gsize data_size;
1193 VideoFrame *vf;
1194 CaptureFrame f;
1195 GstCaps *caps;
1196 gboolean caps_changed = FALSE;
1197 const GstDecklinkMode *mode;
1198 static GstStaticCaps stream_reference =
1199 GST_STATIC_CAPS ("timestamp/x-decklink-stream");
1200 static GstStaticCaps hardware_reference =
1201 GST_STATIC_CAPS ("timestamp/x-decklink-hardware");
1202
1203 if (!gst_decklink_video_src_start (self)) {
1204 return GST_FLOW_NOT_NEGOTIATED;
1205 }
1206
1207 g_mutex_lock (&self->lock);
1208 retry:
1209 while (gst_queue_array_is_empty (self->current_frames) && !self->flushing) {
1210 g_cond_wait (&self->cond, &self->lock);
1211 }
1212
1213 if (self->flushing) {
1214 GST_DEBUG_OBJECT (self, "Flushing");
1215 g_mutex_unlock (&self->lock);
1216 return GST_FLOW_FLUSHING;
1217 }
1218
1219 f = *(CaptureFrame *) gst_queue_array_pop_head_struct (self->current_frames);
1220
1221 // We will have no frame if frames without signal are dropped immediately
1222 // but we still have to signal that it's lost here.
1223 if (f.no_signal || !f.frame) {
1224 if (self->signal_state != SIGNAL_STATE_LOST) {
1225 self->signal_state = SIGNAL_STATE_LOST;
1226 g_object_notify (G_OBJECT (self), "signal");
1227 GST_ELEMENT_WARNING (GST_ELEMENT (self), RESOURCE, READ, ("Signal lost"),
1228 ("No input source was detected - video frames invalid"));
1229 }
1230 // If we have no frame here, simply retry until we got one
1231 if (!f.frame) {
1232 capture_frame_clear (&f);
1233 goto retry;
1234 }
1235 } else {
1236 GstDecklinkSignalState previous_signal_state = self->signal_state;
1237
1238 if (previous_signal_state != SIGNAL_STATE_AVAILABLE) {
1239 self->signal_state = SIGNAL_STATE_AVAILABLE;
1240 g_object_notify (G_OBJECT (self), "signal");
1241 }
1242
1243 if (previous_signal_state == SIGNAL_STATE_LOST) {
1244 GST_ELEMENT_INFO (GST_ELEMENT (self), RESOURCE, READ,
1245 ("Signal recovered"), ("Input source detected"));
1246 }
1247 }
1248
1249 // If we're not flushing, we should have a valid frame from the queue
1250 g_assert (f.frame != NULL);
1251
1252 // Create output buffer
1253 f.frame->GetBytes ((gpointer *) & data);
1254 data_size = f.frame->GetHeight() * f.frame->GetRowBytes();
1255
1256 vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame));
1257
1258 *buffer =
1259 gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY,
1260 (gpointer) data, data_size, 0, data_size, vf,
1261 (GDestroyNotify) video_frame_free);
1262
1263 vf->frame = f.frame;
1264 f.frame->AddRef ();
1265 vf->input = self->input->input;
1266 vf->input->AddRef ();
1267
1268 // Reset aspect ratio flag if the mode has changed. The new mode might not
1269 // have AFD/Bar VANC.
1270 if (self->caps_mode != f.mode) {
1271 self->aspect_ratio_flag = -1;
1272 }
1273 // If we have a format that supports VANC and we are asked to extract CC,
1274 // then do it here.
1275 if ((self->output_cc || self->output_afd_bar)
1276 && self->signal_state != SIGNAL_STATE_LOST)
1277 extract_vbi (self, buffer, vf);
1278
1279 if (!gst_pad_has_current_caps (GST_BASE_SRC_PAD (self))) {
1280 caps_changed = TRUE;
1281 }
1282 // If there was AFD information with the aspect ratio flag set and the mode
1283 // is auto then we have to switch from normal NTSC/PAL to the widescreen
1284 // variants
1285 if (self->aspect_ratio_flag != -1 && self->mode == GST_DECKLINK_MODE_AUTO) {
1286 switch (f.mode) {
1287 case GST_DECKLINK_MODE_NTSC:
1288 f.mode =
1289 self->aspect_ratio_flag ==
1290 1 ? GST_DECKLINK_MODE_NTSC_WIDESCREEN : GST_DECKLINK_MODE_NTSC;
1291 break;
1292 case GST_DECKLINK_MODE_NTSC_P:
1293 f.mode =
1294 self->aspect_ratio_flag ==
1295 1 ? GST_DECKLINK_MODE_NTSC_P_WIDESCREEN : GST_DECKLINK_MODE_NTSC_P;
1296 break;
1297 case GST_DECKLINK_MODE_NTSC2398:
1298 f.mode =
1299 self->aspect_ratio_flag ==
1300 1 ? GST_DECKLINK_MODE_NTSC2398_WIDESCREEN :
1301 GST_DECKLINK_MODE_NTSC2398;
1302 break;
1303 case GST_DECKLINK_MODE_PAL:
1304 f.mode =
1305 self->aspect_ratio_flag ==
1306 1 ? GST_DECKLINK_MODE_PAL_WIDESCREEN : GST_DECKLINK_MODE_PAL;
1307 break;
1308 case GST_DECKLINK_MODE_PAL_P:
1309 f.mode =
1310 self->aspect_ratio_flag ==
1311 1 ? GST_DECKLINK_MODE_PAL_P_WIDESCREEN : GST_DECKLINK_MODE_PAL_P;
1312 break;
1313 default:
1314 break;
1315 }
1316 }
1317
1318 if (self->caps_mode != f.mode) {
1319 if (self->mode == GST_DECKLINK_MODE_AUTO
1320 || !gst_pad_has_current_caps (GST_BASE_SRC_PAD (self))) {
1321 GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode,
1322 f.mode);
1323 caps_changed = TRUE;
1324 self->caps_mode = f.mode;
1325 } else {
1326 g_mutex_unlock (&self->lock);
1327 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
1328 ("Invalid mode in captured frame"),
1329 ("Mode set to %d but captured %d", self->caps_mode, f.mode));
1330 capture_frame_clear (&f);
1331 gst_clear_buffer (buffer);
1332 return GST_FLOW_NOT_NEGOTIATED;
1333 }
1334 }
1335 if (self->caps_format != f.format) {
1336 if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO
1337 || !gst_pad_has_current_caps (GST_BASE_SRC_PAD (self))) {
1338 GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format,
1339 f.format);
1340 caps_changed = TRUE;
1341 self->caps_format = f.format;
1342 } else {
1343 g_mutex_unlock (&self->lock);
1344 GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
1345 ("Invalid pixel format in captured frame"),
1346 ("Format set to %d but captured %d", self->caps_format, f.format));
1347 capture_frame_clear (&f);
1348 gst_clear_buffer (buffer);
1349 return GST_FLOW_NOT_NEGOTIATED;
1350 }
1351 }
1352
1353 /* 1 ns error can be just a rounding error, so that's OK. The Decklink
1354 * drivers give us a really steady stream time, so anything above 1 ns can't
1355 * be a rounding error and is therefore something to worry about */
1356 if (self->expected_stream_time != GST_CLOCK_TIME_NONE &&
1357 ABSDIFF (self->expected_stream_time, f.stream_timestamp) > 1) {
1358 GstMessage *msg;
1359 GstClockTime running_time;
1360
1361 self->dropped += f.stream_timestamp - self->expected_stream_time;
1362 running_time = gst_segment_to_running_time (&GST_BASE_SRC (self)->segment,
1363 GST_FORMAT_TIME, f.timestamp);
1364
1365 msg =
1366 gst_message_new_qos (GST_OBJECT (self), TRUE, running_time,
1367 f.stream_timestamp, f.timestamp, f.duration);
1368 gst_message_set_qos_stats (msg, GST_FORMAT_TIME, self->processed,
1369 self->dropped);
1370 gst_element_post_message (GST_ELEMENT (self), msg);
1371 }
1372 if (self->first_stream_time == GST_CLOCK_TIME_NONE)
1373 self->first_stream_time = f.stream_timestamp;
1374 self->processed =
1375 f.stream_timestamp - self->dropped - self->first_stream_time;
1376 self->expected_stream_time = f.stream_timestamp + f.stream_duration;
1377
1378 g_mutex_unlock (&self->lock);
1379 if (caps_changed) {
1380 self->last_cc_vbi_line = -1;
1381 self->last_afd_bar_vbi_line = -1;
1382 self->last_cc_vbi_line_field2 = -1;
1383 self->last_afd_bar_vbi_line_field2 = -1;
1384 caps = gst_decklink_mode_get_caps (f.mode, f.format, TRUE);
1385 gst_video_info_from_caps (&self->info, caps);
1386 gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps);
1387 gst_element_post_message (GST_ELEMENT_CAST (self),
1388 gst_message_new_latency (GST_OBJECT_CAST (self)));
1389 gst_caps_unref (caps);
1390 if (self->vbiparser) {
1391 gst_video_vbi_parser_free (self->vbiparser);
1392 self->vbiparser = NULL;
1393 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1394 self->anc_width = 0;
1395 }
1396 }
1397
1398 if (f.no_signal)
1399 GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_GAP);
1400 GST_BUFFER_TIMESTAMP (*buffer) = f.timestamp;
1401 GST_BUFFER_DURATION (*buffer) = f.duration;
1402 if (f.tc != NULL)
1403 gst_buffer_add_video_time_code_meta (*buffer, f.tc);
1404 gst_buffer_add_reference_timestamp_meta (*buffer,
1405 gst_static_caps_get (&stream_reference), f.stream_timestamp,
1406 f.stream_duration);
1407 gst_buffer_add_reference_timestamp_meta (*buffer,
1408 gst_static_caps_get (&hardware_reference), f.hardware_timestamp,
1409 f.hardware_duration);
1410
1411 mode = gst_decklink_get_mode (self->caps_mode);
1412 if (mode->interlaced && mode->tff)
1413 GST_BUFFER_FLAG_SET (*buffer,
1414 GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_INTERLACED);
1415 else if (mode->interlaced)
1416 GST_BUFFER_FLAG_SET (*buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
1417
1418 GST_DEBUG_OBJECT (self,
1419 "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %"
1420 GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)),
1421 GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer)));
1422
1423 capture_frame_clear (&f);
1424
1425 return flow_ret;
1426 }
1427
1428 static GstCaps *
gst_decklink_video_src_get_caps(GstBaseSrc * bsrc,GstCaps * filter)1429 gst_decklink_video_src_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
1430 {
1431 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1432 GstCaps *caps;
1433
1434 if (self->mode != GST_DECKLINK_MODE_AUTO) {
1435 caps = gst_decklink_mode_get_caps (self->mode, self->caps_format, TRUE);
1436 } else if (self->caps_mode != GST_DECKLINK_MODE_AUTO) {
1437 caps =
1438 gst_decklink_mode_get_caps (self->caps_mode, self->caps_format, TRUE);
1439 } else {
1440 caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (bsrc));
1441 }
1442
1443 if (filter) {
1444 GstCaps *tmp =
1445 gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
1446 gst_caps_unref (caps);
1447 caps = tmp;
1448 }
1449
1450 return caps;
1451 }
1452
1453 static gboolean
gst_decklink_video_src_query(GstBaseSrc * bsrc,GstQuery * query)1454 gst_decklink_video_src_query (GstBaseSrc * bsrc, GstQuery * query)
1455 {
1456 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1457 gboolean ret = TRUE;
1458
1459 switch (GST_QUERY_TYPE (query)) {
1460 case GST_QUERY_LATENCY:{
1461 if (self->input) {
1462 GstClockTime min, max;
1463 const GstDecklinkMode *mode;
1464
1465 g_mutex_lock (&self->lock);
1466 mode = gst_decklink_get_mode (self->caps_mode);
1467 g_mutex_unlock (&self->lock);
1468
1469 min = gst_util_uint64_scale_ceil (GST_SECOND, mode->fps_d, mode->fps_n);
1470 max = self->buffer_size * min;
1471
1472 gst_query_set_latency (query, TRUE, min, max);
1473 ret = TRUE;
1474 } else {
1475 ret = FALSE;
1476 }
1477
1478 break;
1479 }
1480 default:
1481 ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
1482 break;
1483 }
1484
1485 return ret;
1486 }
1487
1488 static gboolean
gst_decklink_video_src_unlock(GstBaseSrc * bsrc)1489 gst_decklink_video_src_unlock (GstBaseSrc * bsrc)
1490 {
1491 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1492
1493 g_mutex_lock (&self->lock);
1494 self->flushing = TRUE;
1495 g_cond_signal (&self->cond);
1496 g_mutex_unlock (&self->lock);
1497
1498 return TRUE;
1499 }
1500
1501 static gboolean
gst_decklink_video_src_unlock_stop(GstBaseSrc * bsrc)1502 gst_decklink_video_src_unlock_stop (GstBaseSrc * bsrc)
1503 {
1504 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc);
1505
1506 g_mutex_lock (&self->lock);
1507 self->flushing = FALSE;
1508 while (gst_queue_array_get_length (self->current_frames) > 0) {
1509 CaptureFrame *tmp =
1510 (CaptureFrame *) gst_queue_array_pop_head_struct (self->current_frames);
1511 capture_frame_clear (tmp);
1512 }
1513 g_mutex_unlock (&self->lock);
1514
1515 return TRUE;
1516 }
1517
1518 static gboolean
gst_decklink_video_src_open(GstDecklinkVideoSrc * self)1519 gst_decklink_video_src_open (GstDecklinkVideoSrc * self)
1520 {
1521 const GstDecklinkMode *mode;
1522
1523 GST_DEBUG_OBJECT (self, "Opening");
1524
1525 self->input =
1526 gst_decklink_acquire_nth_input (self->device_number,
1527 GST_ELEMENT_CAST (self), FALSE);
1528 if (!self->input) {
1529 GST_ERROR_OBJECT (self, "Failed to acquire input");
1530 return FALSE;
1531 }
1532
1533 g_object_notify (G_OBJECT (self), "hw-serial-number");
1534
1535 mode = gst_decklink_get_mode (self->mode);
1536 g_assert (mode != NULL);
1537 g_mutex_lock (&self->input->lock);
1538 self->input->mode = mode;
1539 self->input->format = self->caps_format;
1540 self->input->got_video_frame = gst_decklink_video_src_got_frame;
1541 self->input->start_streams = gst_decklink_video_src_start_streams;
1542 g_mutex_unlock (&self->input->lock);
1543
1544 return TRUE;
1545 }
1546
1547 static gboolean
gst_decklink_video_src_close(GstDecklinkVideoSrc * self)1548 gst_decklink_video_src_close (GstDecklinkVideoSrc * self)
1549 {
1550
1551 GST_DEBUG_OBJECT (self, "Closing");
1552
1553 if (self->input) {
1554 g_mutex_lock (&self->input->lock);
1555 self->input->got_video_frame = NULL;
1556 self->input->mode = NULL;
1557 self->input->video_enabled = FALSE;
1558 self->input->start_streams = NULL;
1559 g_mutex_unlock (&self->input->lock);
1560
1561 gst_decklink_release_nth_input (self->device_number,
1562 GST_ELEMENT_CAST (self), FALSE);
1563 self->input = NULL;
1564 }
1565
1566 return TRUE;
1567 }
1568
1569 static gboolean
gst_decklink_video_src_stop(GstDecklinkVideoSrc * self)1570 gst_decklink_video_src_stop (GstDecklinkVideoSrc * self)
1571 {
1572 GST_DEBUG_OBJECT (self, "Stopping");
1573
1574 while (gst_queue_array_get_length (self->current_frames) > 0) {
1575 CaptureFrame *tmp =
1576 (CaptureFrame *) gst_queue_array_pop_head_struct (self->current_frames);
1577 capture_frame_clear (tmp);
1578 }
1579 self->caps_mode = GST_DECKLINK_MODE_AUTO;
1580
1581 if (self->input && self->input->video_enabled) {
1582 g_mutex_lock (&self->input->lock);
1583 self->input->video_enabled = FALSE;
1584 g_mutex_unlock (&self->input->lock);
1585
1586 self->input->input->DisableVideoInput ();
1587 }
1588
1589 if (self->vbiparser) {
1590 gst_video_vbi_parser_free (self->vbiparser);
1591 self->vbiparser = NULL;
1592 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1593 self->anc_width = 0;
1594 }
1595
1596 return TRUE;
1597 }
1598
1599 static void
gst_decklink_video_src_start_streams(GstElement * element)1600 gst_decklink_video_src_start_streams (GstElement * element)
1601 {
1602 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
1603 HRESULT res;
1604
1605 if (self->input->video_enabled && (!self->input->audiosrc
1606 || self->input->audio_enabled)
1607 && (GST_STATE (self) == GST_STATE_PLAYING
1608 || GST_STATE_PENDING (self) == GST_STATE_PLAYING)) {
1609 GST_DEBUG_OBJECT (self, "Starting streams");
1610
1611 g_mutex_lock (&self->lock);
1612 self->first_time = GST_CLOCK_TIME_NONE;
1613 self->window_fill = 0;
1614 self->window_filled = FALSE;
1615 self->window_skip = 1;
1616 self->window_skip_count = 0;
1617 self->current_time_mapping.xbase = 0;
1618 self->current_time_mapping.b = 0;
1619 self->current_time_mapping.num = 1;
1620 self->current_time_mapping.den = 1;
1621 self->next_time_mapping.xbase = 0;
1622 self->next_time_mapping.b = 0;
1623 self->next_time_mapping.num = 1;
1624 self->next_time_mapping.den = 1;
1625 g_mutex_unlock (&self->lock);
1626 res = self->input->input->StartStreams ();
1627 if (res != S_OK) {
1628 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1629 (NULL), ("Failed to start streams: 0x%08lx", (unsigned long) res));
1630 return;
1631 }
1632 } else {
1633 GST_DEBUG_OBJECT (self, "Not starting streams yet");
1634 }
1635 }
1636
1637 static GstStateChangeReturn
gst_decklink_video_src_change_state(GstElement * element,GstStateChange transition)1638 gst_decklink_video_src_change_state (GstElement * element,
1639 GstStateChange transition)
1640 {
1641 GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (element);
1642 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1643
1644 switch (transition) {
1645 case GST_STATE_CHANGE_NULL_TO_READY:
1646 self->processed = 0;
1647 self->dropped = 0;
1648 self->expected_stream_time = GST_CLOCK_TIME_NONE;
1649 self->first_stream_time = GST_CLOCK_TIME_NONE;
1650 if (!gst_decklink_video_src_open (self)) {
1651 ret = GST_STATE_CHANGE_FAILURE;
1652 goto out;
1653 }
1654 if (self->mode == GST_DECKLINK_MODE_AUTO &&
1655 self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO) {
1656 GST_WARNING_OBJECT (self, "Warning: mode=auto and format!=auto may \
1657 not work");
1658 }
1659 self->vbiparser = NULL;
1660 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1661 self->anc_width = 0;
1662 break;
1663 case GST_STATE_CHANGE_READY_TO_PAUSED:
1664 self->flushing = FALSE;
1665 break;
1666 default:
1667 break;
1668 }
1669
1670 if (ret == GST_STATE_CHANGE_FAILURE)
1671 return ret;
1672 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1673 if (ret == GST_STATE_CHANGE_FAILURE)
1674 return ret;
1675
1676 switch (transition) {
1677 case GST_STATE_CHANGE_PAUSED_TO_READY:
1678 self->signal_state = SIGNAL_STATE_UNKNOWN;
1679
1680 gst_decklink_video_src_stop (self);
1681 break;
1682 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:{
1683 HRESULT res;
1684
1685 GST_DEBUG_OBJECT (self, "Stopping streams");
1686
1687 res = self->input->input->StopStreams ();
1688 if (res != S_OK) {
1689 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1690 (NULL), ("Failed to stop streams: 0x%08lx", (unsigned long) res));
1691 ret = GST_STATE_CHANGE_FAILURE;
1692 }
1693 break;
1694 }
1695 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
1696 g_mutex_lock (&self->input->lock);
1697 if (self->input->start_streams)
1698 self->input->start_streams (self->input->videosrc);
1699 g_mutex_unlock (&self->input->lock);
1700
1701 break;
1702 }
1703 case GST_STATE_CHANGE_READY_TO_NULL:
1704 gst_decklink_video_src_close (self);
1705 break;
1706 default:
1707 break;
1708 }
1709 out:
1710
1711 return ret;
1712 }
1713