1 /* GStreamer
2 * Copyright (C) 2011 David Schleef <ds@entropywave.com>
3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
18 * Boston, MA 02110-1335, USA.
19 */
20 /**
21 * SECTION:element-decklinkvideosink
22 * @short_description: Outputs Video to a BlackMagic DeckLink Device
23 *
24 * Playout Video to a BlackMagic DeckLink Device.
25 *
26 * ## Sample pipeline
27 * |[
28 * gst-launch-1.0 \
29 * videotestsrc ! \
30 * decklinkvideosink device-number=0 mode=1080p25
31 * ]|
32 * Playout a 1080p25 test-video to the SDI-Out of Card 0. Devices are numbered
33 * starting with 0.
34 *
35 * # Duplex-Mode:
36 * Certain DechLink Cards like the Duo2 or the Quad2 contain two or four
37 * independent SDI units with two connectors each. These units can operate either
38 * in half- or in full-duplex mode.
39 *
40 * The Duplex-Mode of a Card can be configured using the `duplex-mode`-Property.
41 * Cards that to not support Duplex-Modes are not influenced by the property.
42 *
43 * ## Half-Duplex-Mode (default):
44 * By default decklinkvideosink will configure them into half-duplex mode, so that
45 * each connector acts as if it were an independent DeckLink Card which can either
46 * be used as an Input or as an Output. In this mode the Duo2 can be used as as 4 SDI
47 * In-/Outputs and the Quad2 as 8 SDI In-/Outputs.
48 *
49 * |[
50 * gst-launch-1.0 \
51 * videotestsrc foreground-color=0x00ff0000 ! decklinkvideosink device-number=0 mode=1080p25 \
52 * videotestsrc foreground-color=0x0000ff00 ! decklinkvideosink device-number=1 mode=1080p25 \
53 * videotestsrc foreground-color=0x000000ff ! decklinkvideosink device-number=2 mode=1080p25 \
54 * videotestsrc foreground-color=0x00ffffff ! decklinkvideosink device-number=3 mode=1080p25
55 * ]|
56 * Playout four Test-Screen with colored Snow on the first four units in the System
57 * (ie. the Connectors 1-4 of a Duo2 unit).
58 *
59 * |[
60 * gst-launch-1.0 \
61 * videotestsrc is-live=true foreground-color=0x0000ff00 ! decklinkvideosink device-number=0 mode=1080p25 \
62 * decklinkvideosrc device-number=1 mode=1080p25 ! autovideosink \
63 * decklinkvideosrc device-number=2 mode=1080p25 ! autovideosink \
64 * videotestsrc is-live=true foreground-color=0x00ff0000 ! decklinkvideosink device-number=3 mode=1080p25
65 * ]|
66 * Capture 1080p25 from the second and third unit in the System,
67 * Playout a Test-Screen with colored Snow on the first and fourth unit
68 * (ie. the Connectors 1-4 of a Duo2 unit).
69 *
70 * ## Device-Number-Mapping in Half-Duplex-Mode
71 * The device-number to connector-mapping is as follows for the Duo2
72 * - `device-number=0` SDI1
73 * - `device-number=1` SDI3
74 * - `device-number=2` SDI2
75 * - `device-number=3` SDI4
76 *
77 * And for the Quad2
78 * - `device-number=0` SDI1
79 * - `device-number=1` SDI3
80 * - `device-number=2` SDI5
81 * - `device-number=3` SDI7
82 * - `device-number=4` SDI2
83 * - `device-number=5` SDI4
84 * - `device-number=6` SDI6
85 * - `device-number=7` SDI8
86 *
87 * ## Full-Duplex-Mode:
88 * When operating in full-duplex mode, two connectors of a unit are combined to
89 * a single device, performing keying with the second connection.
90 *
91 * ## Device-Number-Mapping in Full-Duplex-Mode
92 * The device-number to connector-mapping in full-duplex-mode is as follows for the Duo2
93 * - `device-number=0` SDI1 primary, SDI2 secondary
94 * - `device-number=1` SDI3 primaty, SDI4 secondary
95 *
96 * And for the Quad2
97 * - `device-number=0` SDI1 primary, SDI2 secondary
98 * - `device-number=1` SDI3 primaty, SDI4 secondary
99 * - `device-number=2` SDI5 primary, SDI6 secondary
100 * - `device-number=3` SDI7 primary, SDI8 secondary
101 *
102 * # Keying
103 * Keying is the process of overlaing Video with an Alpha-Channel on top of an
104 * existing Video-Stream. The Duo2 and Quad2-Cards can perform two different
105 * Keying-Modes when operated in full-duplex mode. Both modes expect Video with
106 * an Alpha-Channel.
107 *
108 * ## Internal Keyer:
109 * In internal Keying-Mode the primary port becomes an Input and the secondary port
110 * an Output. The unit overlays Video played back from the Computer onto the Input
111 * and outputs the combined Video-Stream to the Output.
112 *
113 * |[
114 * gst-launch-1.0 \
115 * videotestsrc foreground-color=0x00000000 background-color=0x00000000 ! \
116 * video/x-raw,format=BGRA,width=1920,height=1080 ! \
117 * decklinkvideosink device-number=0 duplex-mode=full keyer-mode=internal video-format=8bit-bgra mode=1080p25
118 * ]|
119 *
120 * ## External Keyer:
121 * In external Keying-Mode the primary port outputs the alpha-chanel as the
122 * luma-value (key-channel). Transparent pixels are black, opaque pixels are white.
123 * The RGB-Component of the Video are output on the secondary channel.
124 *
125 * |[
126 * gst-launch-1.0 \
127 * videotestsrc foreground-color=0x00000000 background-color=0x00000000 ! \
128 * video/x-raw,format=BGRA,width=1920,height=1080 ! \
129 * decklinkvideosink device-number=0 duplex-mode=full keyer-mode=external video-format=8bit-bgra mode=1080p25
130 * ]|
131 */
132
133 #ifdef HAVE_CONFIG_H
134 #include "config.h"
135 #endif
136
137 #include "gstdecklinkvideosink.h"
138 #include <string.h>
139
140 GST_DEBUG_CATEGORY_STATIC (gst_decklink_video_sink_debug);
141 #define GST_CAT_DEFAULT gst_decklink_video_sink_debug
142
143 class GStreamerVideoOutputCallback:public IDeckLinkVideoOutputCallback
144 {
145 public:
GStreamerVideoOutputCallback(GstDecklinkVideoSink * sink)146 GStreamerVideoOutputCallback (GstDecklinkVideoSink * sink)
147 :IDeckLinkVideoOutputCallback (), m_refcount (1)
148 {
149 m_sink = GST_DECKLINK_VIDEO_SINK_CAST (gst_object_ref (sink));
150 g_mutex_init (&m_mutex);
151 }
152
QueryInterface(REFIID,LPVOID *)153 virtual HRESULT WINAPI QueryInterface (REFIID, LPVOID *)
154 {
155 return E_NOINTERFACE;
156 }
157
AddRef(void)158 virtual ULONG WINAPI AddRef (void)
159 {
160 ULONG ret;
161
162 g_mutex_lock (&m_mutex);
163 m_refcount++;
164 ret = m_refcount;
165 g_mutex_unlock (&m_mutex);
166
167 return ret;
168 }
169
Release(void)170 virtual ULONG WINAPI Release (void)
171 {
172 ULONG ret;
173
174 g_mutex_lock (&m_mutex);
175 m_refcount--;
176 ret = m_refcount;
177 g_mutex_unlock (&m_mutex);
178
179 if (ret == 0) {
180 delete this;
181 }
182
183 return ret;
184 }
185
ScheduledFrameCompleted(IDeckLinkVideoFrame * completedFrame,BMDOutputFrameCompletionResult result)186 virtual HRESULT WINAPI ScheduledFrameCompleted (IDeckLinkVideoFrame *
187 completedFrame, BMDOutputFrameCompletionResult result)
188 {
189 switch (result) {
190 case bmdOutputFrameCompleted:
191 GST_LOG_OBJECT (m_sink, "Completed frame %p", completedFrame);
192 break;
193 case bmdOutputFrameDisplayedLate:
194 GST_INFO_OBJECT (m_sink, "Late Frame %p", completedFrame);
195 break;
196 case bmdOutputFrameDropped:
197 GST_INFO_OBJECT (m_sink, "Dropped Frame %p", completedFrame);
198 break;
199 case bmdOutputFrameFlushed:
200 GST_DEBUG_OBJECT (m_sink, "Flushed Frame %p", completedFrame);
201 break;
202 default:
203 GST_INFO_OBJECT (m_sink, "Unknown Frame %p: %d", completedFrame,
204 (gint) result);
205 break;
206 }
207
208 return S_OK;
209 }
210
ScheduledPlaybackHasStopped(void)211 virtual HRESULT WINAPI ScheduledPlaybackHasStopped (void)
212 {
213 GST_LOG_OBJECT (m_sink, "Scheduled playback stopped");
214
215 if (m_sink->output) {
216 g_mutex_lock (&m_sink->output->lock);
217 g_cond_signal (&m_sink->output->cond);
218 g_mutex_unlock (&m_sink->output->lock);
219 }
220
221 return S_OK;
222 }
223
~GStreamerVideoOutputCallback()224 virtual ~ GStreamerVideoOutputCallback () {
225 gst_object_unref (m_sink);
226 g_mutex_clear (&m_mutex);
227 }
228
229 private:
230 GstDecklinkVideoSink * m_sink;
231 GMutex m_mutex;
232 gint m_refcount;
233 };
234
235 enum
236 {
237 PROP_0,
238 PROP_MODE,
239 PROP_DEVICE_NUMBER,
240 PROP_VIDEO_FORMAT,
241 PROP_PROFILE_ID,
242 PROP_TIMECODE_FORMAT,
243 PROP_KEYER_MODE,
244 PROP_KEYER_LEVEL,
245 PROP_HW_SERIAL_NUMBER,
246 PROP_CC_LINE,
247 PROP_AFD_BAR_LINE,
248 };
249
250 static void gst_decklink_video_sink_set_property (GObject * object,
251 guint property_id, const GValue * value, GParamSpec * pspec);
252 static void gst_decklink_video_sink_get_property (GObject * object,
253 guint property_id, GValue * value, GParamSpec * pspec);
254 static void gst_decklink_video_sink_finalize (GObject * object);
255
256 static GstStateChangeReturn
257 gst_decklink_video_sink_change_state (GstElement * element,
258 GstStateChange transition);
259 static GstClock *gst_decklink_video_sink_provide_clock (GstElement * element);
260
261 static GstCaps *gst_decklink_video_sink_get_caps (GstBaseSink * bsink,
262 GstCaps * filter);
263 static gboolean gst_decklink_video_sink_set_caps (GstBaseSink * bsink,
264 GstCaps * caps);
265 static GstFlowReturn gst_decklink_video_sink_prepare (GstBaseSink * bsink,
266 GstBuffer * buffer);
267 static GstFlowReturn gst_decklink_video_sink_render (GstBaseSink * bsink,
268 GstBuffer * buffer);
269 static gboolean gst_decklink_video_sink_open (GstBaseSink * bsink);
270 static gboolean gst_decklink_video_sink_close (GstBaseSink * bsink);
271 static gboolean gst_decklink_video_sink_stop (GstDecklinkVideoSink * self);
272 static gboolean gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink,
273 GstQuery * query);
274 static gboolean gst_decklink_video_sink_event (GstBaseSink * bsink,
275 GstEvent * event);
276
277 static void
278 gst_decklink_video_sink_start_scheduled_playback (GstElement * element);
279
280 #define parent_class gst_decklink_video_sink_parent_class
281 G_DEFINE_TYPE (GstDecklinkVideoSink, gst_decklink_video_sink,
282 GST_TYPE_BASE_SINK);
283 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (decklinkvideosink, "decklinkvideosink", GST_RANK_NONE,
284 GST_TYPE_DECKLINK_VIDEO_SINK, decklink_element_init (plugin));
285
286 static gboolean
reset_framerate(GstCapsFeatures * features,GstStructure * structure,gpointer user_data)287 reset_framerate (GstCapsFeatures * features, GstStructure * structure,
288 gpointer user_data)
289 {
290 gst_structure_set (structure, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
291 G_MAXINT, 1, NULL);
292
293 return TRUE;
294 }
295
296 static void
gst_decklink_video_sink_class_init(GstDecklinkVideoSinkClass * klass)297 gst_decklink_video_sink_class_init (GstDecklinkVideoSinkClass * klass)
298 {
299 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
300 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
301 GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
302 GstCaps *templ_caps;
303
304 gobject_class->set_property = gst_decklink_video_sink_set_property;
305 gobject_class->get_property = gst_decklink_video_sink_get_property;
306 gobject_class->finalize = gst_decklink_video_sink_finalize;
307
308 element_class->change_state =
309 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_change_state);
310 element_class->provide_clock =
311 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_provide_clock);
312
313 basesink_class->get_caps =
314 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_get_caps);
315 basesink_class->set_caps =
316 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_set_caps);
317 basesink_class->prepare = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_prepare);
318 basesink_class->render = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_render);
319 // FIXME: These are misnamed in basesink!
320 basesink_class->start = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_open);
321 basesink_class->stop = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_close);
322 basesink_class->propose_allocation =
323 GST_DEBUG_FUNCPTR (gst_decklink_video_sink_propose_allocation);
324 basesink_class->event = GST_DEBUG_FUNCPTR (gst_decklink_video_sink_event);
325
326 g_object_class_install_property (gobject_class, PROP_MODE,
327 g_param_spec_enum ("mode", "Playback Mode",
328 "Video Mode to use for playback",
329 GST_TYPE_DECKLINK_MODE, GST_DECKLINK_MODE_NTSC,
330 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
331 G_PARAM_CONSTRUCT)));
332
333 g_object_class_install_property (gobject_class, PROP_DEVICE_NUMBER,
334 g_param_spec_int ("device-number", "Device number",
335 "Output device instance to use", 0, G_MAXINT, 0,
336 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
337 G_PARAM_CONSTRUCT)));
338
339 g_object_class_install_property (gobject_class, PROP_VIDEO_FORMAT,
340 g_param_spec_enum ("video-format", "Video format",
341 "Video format type to use for playback",
342 GST_TYPE_DECKLINK_VIDEO_FORMAT, GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV,
343 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
344 G_PARAM_CONSTRUCT)));
345
346 /**
347 * GstDecklinkVideoSink:profile
348 *
349 * Specifies decklink profile to use.
350 *
351 * Since: 1.20
352 */
353 g_object_class_install_property (gobject_class, PROP_PROFILE_ID,
354 g_param_spec_enum ("profile", "Profile",
355 "Certain DeckLink devices such as the DeckLink 8K Pro, the DeckLink "
356 "Quad 2 and the DeckLink Duo 2 support multiple profiles to "
357 "configure the capture and playback behavior of its sub-devices."
358 "For the DeckLink Duo 2 and DeckLink Quad 2, a profile is shared "
359 "between any 2 sub-devices that utilize the same connectors. For the "
360 "DeckLink 8K Pro, a profile is shared between all 4 sub-devices. Any "
361 "sub-devices that share a profile are considered to be part of the "
362 "same profile group."
363 "DeckLink Duo 2 support configuration of the duplex mode of "
364 "individual sub-devices.",
365 GST_TYPE_DECKLINK_PROFILE_ID, GST_DECKLINK_PROFILE_ID_DEFAULT,
366 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
367 G_PARAM_CONSTRUCT)));
368
369 g_object_class_install_property (gobject_class, PROP_TIMECODE_FORMAT,
370 g_param_spec_enum ("timecode-format", "Timecode format",
371 "Timecode format type to use for playback",
372 GST_TYPE_DECKLINK_TIMECODE_FORMAT,
373 GST_DECKLINK_TIMECODE_FORMAT_RP188ANY,
374 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
375 G_PARAM_CONSTRUCT)));
376
377 g_object_class_install_property (gobject_class, PROP_KEYER_MODE,
378 g_param_spec_enum ("keyer-mode", "Keyer mode",
379 "Keyer mode to be enabled",
380 GST_TYPE_DECKLINK_KEYER_MODE,
381 GST_DECKLINK_KEYER_MODE_OFF,
382 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
383 G_PARAM_CONSTRUCT)));
384
385 g_object_class_install_property (gobject_class, PROP_KEYER_LEVEL,
386 g_param_spec_int ("keyer-level", "Keyer level",
387 "Keyer level", 0, 255, 255,
388 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
389 G_PARAM_CONSTRUCT)));
390
391 g_object_class_install_property (gobject_class, PROP_HW_SERIAL_NUMBER,
392 g_param_spec_string ("hw-serial-number", "Hardware serial number",
393 "The serial number (hardware ID) of the Decklink card",
394 NULL, (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)));
395
396 g_object_class_install_property (gobject_class, PROP_CC_LINE,
397 g_param_spec_int ("cc-line", "CC Line",
398 "Line number to use for inserting closed captions (0 = disabled)", 0,
399 22, 0,
400 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
401 G_PARAM_CONSTRUCT)));
402
403 g_object_class_install_property (gobject_class, PROP_AFD_BAR_LINE,
404 g_param_spec_int ("afd-bar-line", "AFD/Bar Line",
405 "Line number to use for inserting AFD/Bar data (0 = disabled)", 0,
406 10000, 0,
407 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
408 G_PARAM_CONSTRUCT)));
409
410 templ_caps = gst_decklink_mode_get_template_caps (FALSE);
411 templ_caps = gst_caps_make_writable (templ_caps);
412 /* For output we support any framerate and only really care about timestamps */
413 gst_caps_map_in_place (templ_caps, reset_framerate, NULL);
414 gst_element_class_add_pad_template (element_class,
415 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, templ_caps));
416 gst_caps_unref (templ_caps);
417
418 gst_element_class_set_static_metadata (element_class, "Decklink Video Sink",
419 "Video/Sink/Hardware", "Decklink Sink",
420 "David Schleef <ds@entropywave.com>, "
421 "Sebastian Dröge <sebastian@centricular.com>");
422
423 GST_DEBUG_CATEGORY_INIT (gst_decklink_video_sink_debug, "decklinkvideosink",
424 0, "debug category for decklinkvideosink element");
425 }
426
427 static void
gst_decklink_video_sink_init(GstDecklinkVideoSink * self)428 gst_decklink_video_sink_init (GstDecklinkVideoSink * self)
429 {
430 self->mode = GST_DECKLINK_MODE_NTSC;
431 self->device_number = 0;
432 self->video_format = GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV;
433 self->profile_id = GST_DECKLINK_PROFILE_ID_DEFAULT;
434 /* VITC is legacy, we should expect RP188 in modern use cases */
435 self->timecode_format = bmdTimecodeRP188Any;
436 self->caption_line = 0;
437 self->afd_bar_line = 0;
438
439 gst_base_sink_set_max_lateness (GST_BASE_SINK_CAST (self), 20 * GST_MSECOND);
440 gst_base_sink_set_qos_enabled (GST_BASE_SINK_CAST (self), TRUE);
441 }
442
443 void
gst_decklink_video_sink_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)444 gst_decklink_video_sink_set_property (GObject * object, guint property_id,
445 const GValue * value, GParamSpec * pspec)
446 {
447 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
448
449 switch (property_id) {
450 case PROP_MODE:
451 self->mode = (GstDecklinkModeEnum) g_value_get_enum (value);
452 break;
453 case PROP_DEVICE_NUMBER:
454 self->device_number = g_value_get_int (value);
455 break;
456 case PROP_VIDEO_FORMAT:
457 self->video_format = (GstDecklinkVideoFormat) g_value_get_enum (value);
458 switch (self->video_format) {
459 case GST_DECKLINK_VIDEO_FORMAT_AUTO:
460 case GST_DECKLINK_VIDEO_FORMAT_8BIT_YUV:
461 case GST_DECKLINK_VIDEO_FORMAT_10BIT_YUV:
462 case GST_DECKLINK_VIDEO_FORMAT_8BIT_ARGB:
463 case GST_DECKLINK_VIDEO_FORMAT_8BIT_BGRA:
464 break;
465 default:
466 GST_ELEMENT_WARNING (GST_ELEMENT (self), CORE, NOT_IMPLEMENTED,
467 ("Format %d not supported", self->video_format), (NULL));
468 break;
469 }
470 break;
471 case PROP_PROFILE_ID:
472 self->profile_id = (GstDecklinkProfileId) g_value_get_enum (value);
473 break;
474 case PROP_TIMECODE_FORMAT:
475 self->timecode_format =
476 gst_decklink_timecode_format_from_enum ((GstDecklinkTimecodeFormat)
477 g_value_get_enum (value));
478 break;
479 case PROP_KEYER_MODE:
480 self->keyer_mode =
481 gst_decklink_keyer_mode_from_enum ((GstDecklinkKeyerMode)
482 g_value_get_enum (value));
483 break;
484 case PROP_KEYER_LEVEL:
485 self->keyer_level = g_value_get_int (value);
486 break;
487 case PROP_CC_LINE:
488 self->caption_line = g_value_get_int (value);
489 break;
490 case PROP_AFD_BAR_LINE:
491 self->afd_bar_line = g_value_get_int (value);
492 break;
493 default:
494 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
495 break;
496 }
497 }
498
499 void
gst_decklink_video_sink_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)500 gst_decklink_video_sink_get_property (GObject * object, guint property_id,
501 GValue * value, GParamSpec * pspec)
502 {
503 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
504
505 switch (property_id) {
506 case PROP_MODE:
507 g_value_set_enum (value, self->mode);
508 break;
509 case PROP_DEVICE_NUMBER:
510 g_value_set_int (value, self->device_number);
511 break;
512 case PROP_VIDEO_FORMAT:
513 g_value_set_enum (value, self->video_format);
514 break;
515 case PROP_PROFILE_ID:
516 g_value_set_enum (value, self->profile_id);
517 break;
518 case PROP_TIMECODE_FORMAT:
519 g_value_set_enum (value,
520 gst_decklink_timecode_format_to_enum (self->timecode_format));
521 break;
522 case PROP_KEYER_MODE:
523 g_value_set_enum (value,
524 gst_decklink_keyer_mode_to_enum (self->keyer_mode));
525 break;
526 case PROP_KEYER_LEVEL:
527 g_value_set_int (value, self->keyer_level);
528 break;
529 case PROP_HW_SERIAL_NUMBER:
530 if (self->output)
531 g_value_set_string (value, self->output->hw_serial_number);
532 else
533 g_value_set_string (value, NULL);
534 break;
535 case PROP_CC_LINE:
536 g_value_set_int (value, self->caption_line);
537 break;
538 case PROP_AFD_BAR_LINE:
539 g_value_set_int (value, self->afd_bar_line);
540 break;
541 default:
542 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
543 break;
544 }
545 }
546
547 void
gst_decklink_video_sink_finalize(GObject * object)548 gst_decklink_video_sink_finalize (GObject * object)
549 {
550 //GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (object);
551
552 G_OBJECT_CLASS (parent_class)->finalize (object);
553 }
554
555 static gboolean
gst_decklink_video_sink_set_caps(GstBaseSink * bsink,GstCaps * caps)556 gst_decklink_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
557 {
558 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
559 const GstDecklinkMode *mode;
560 HRESULT ret;
561 BMDVideoOutputFlags flags;
562 GstVideoInfo info;
563
564 GST_DEBUG_OBJECT (self, "Setting caps %" GST_PTR_FORMAT, caps);
565
566 if (!gst_video_info_from_caps (&info, caps))
567 return FALSE;
568
569
570 g_mutex_lock (&self->output->lock);
571 if (self->output->video_enabled) {
572 if (self->info.finfo->format == info.finfo->format &&
573 self->info.width == info.width && self->info.height == info.height) {
574 // FIXME: We should also consider the framerate as it is used
575 // for mode selection below in auto mode
576 GST_DEBUG_OBJECT (self, "Nothing relevant has changed");
577 self->info = info;
578 g_mutex_unlock (&self->output->lock);
579 return TRUE;
580 } else {
581 GST_DEBUG_OBJECT (self, "Reconfiguration not supported at this point");
582 g_mutex_unlock (&self->output->lock);
583 return FALSE;
584 }
585 }
586 g_mutex_unlock (&self->output->lock);
587
588 self->output->output->SetScheduledFrameCompletionCallback (new
589 GStreamerVideoOutputCallback (self));
590
591 if (self->mode == GST_DECKLINK_MODE_AUTO) {
592 BMDPixelFormat f;
593 mode = gst_decklink_find_mode_and_format_for_caps (caps, &f);
594 if (mode == NULL) {
595 GST_WARNING_OBJECT (self,
596 "Failed to find compatible mode for caps %" GST_PTR_FORMAT, caps);
597 return FALSE;
598 }
599 if (self->video_format != GST_DECKLINK_VIDEO_FORMAT_AUTO &&
600 gst_decklink_pixel_format_from_type (self->video_format) != f) {
601 GST_WARNING_OBJECT (self, "Failed to set pixel format to %d",
602 self->video_format);
603 return FALSE;
604 }
605 } else {
606 /* We don't have to give the format in EnableVideoOutput. Therefore,
607 * even if it's AUTO, we have it stored in self->info and set it in
608 * gst_decklink_video_sink_prepare */
609 mode = gst_decklink_get_mode (self->mode);
610 g_assert (mode != NULL);
611 };
612
613 /* enable or disable keyer */
614 if (self->output->keyer != NULL) {
615 if (self->keyer_mode == bmdKeyerModeOff) {
616 self->output->keyer->Disable ();
617 } else if (self->keyer_mode == bmdKeyerModeInternal) {
618 self->output->keyer->Enable (false);
619 self->output->keyer->SetLevel (self->keyer_level);
620 } else if (self->keyer_mode == bmdKeyerModeExternal) {
621 self->output->keyer->Enable (true);
622 self->output->keyer->SetLevel (self->keyer_level);
623 } else {
624 g_assert_not_reached ();
625 }
626 } else if (self->keyer_mode != bmdKeyerModeOff) {
627 GST_WARNING_OBJECT (self, "Failed to set keyer to mode %d",
628 self->keyer_mode);
629 }
630
631 /* The timecode_format itself is used when we embed the actual timecode data
632 * into the frame. Now we only need to know which of the two standards the
633 * timecode format will adhere to: VITC or RP188, and send the appropriate
634 * flag to EnableVideoOutput. The exact format is specified later.
635 *
636 * Note that this flag will have no effect in practice if the video stream
637 * does not contain timecode metadata.
638 */
639 if ((gint64) self->timecode_format ==
640 (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITC
641 || (gint64) self->timecode_format ==
642 (gint64) GST_DECKLINK_TIMECODE_FORMAT_VITCFIELD2)
643 flags = bmdVideoOutputVITC;
644 else
645 flags = bmdVideoOutputRP188;
646
647 if (self->caption_line > 0 || self->afd_bar_line > 0)
648 flags = (BMDVideoOutputFlags) (flags | bmdVideoOutputVANC);
649
650 ret = self->output->output->EnableVideoOutput (mode->mode, flags);
651 if (ret != S_OK) {
652 GST_WARNING_OBJECT (self, "Failed to enable video output: 0x%08lx",
653 (unsigned long) ret);
654 return FALSE;
655 }
656
657 self->info = info;
658 g_mutex_lock (&self->output->lock);
659 self->output->mode = mode;
660 self->output->video_enabled = TRUE;
661 if (self->output->start_scheduled_playback)
662 self->output->start_scheduled_playback (self->output->videosink);
663 g_mutex_unlock (&self->output->lock);
664
665 if (self->vbiencoder) {
666 gst_video_vbi_encoder_free (self->vbiencoder);
667 self->vbiencoder = NULL;
668 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
669 }
670
671 return TRUE;
672 }
673
674 static GstCaps *
gst_decklink_video_sink_get_caps(GstBaseSink * bsink,GstCaps * filter)675 gst_decklink_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
676 {
677 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
678 GstCaps *mode_caps, *caps;
679
680 if (self->mode == GST_DECKLINK_MODE_AUTO
681 && self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
682 mode_caps = gst_decklink_mode_get_template_caps (FALSE);
683 else if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO)
684 mode_caps = gst_decklink_mode_get_caps_all_formats (self->mode, FALSE);
685 else if (self->mode == GST_DECKLINK_MODE_AUTO)
686 mode_caps =
687 gst_decklink_pixel_format_get_caps (gst_decklink_pixel_format_from_type
688 (self->video_format), FALSE);
689 else
690 mode_caps =
691 gst_decklink_mode_get_caps (self->mode,
692 gst_decklink_pixel_format_from_type (self->video_format), FALSE);
693 mode_caps = gst_caps_make_writable (mode_caps);
694 /* For output we support any framerate and only really care about timestamps */
695 gst_caps_map_in_place (mode_caps, reset_framerate, NULL);
696
697 if (filter) {
698 caps =
699 gst_caps_intersect_full (filter, mode_caps, GST_CAPS_INTERSECT_FIRST);
700 gst_caps_unref (mode_caps);
701 } else {
702 caps = mode_caps;
703 }
704
705 return caps;
706 }
707
708 static GstFlowReturn
gst_decklink_video_sink_render(GstBaseSink * bsink,GstBuffer * buffer)709 gst_decklink_video_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
710 {
711 return GST_FLOW_OK;
712 }
713
714 void
gst_decklink_video_sink_convert_to_internal_clock(GstDecklinkVideoSink * self,GstClockTime * timestamp,GstClockTime * duration)715 gst_decklink_video_sink_convert_to_internal_clock (GstDecklinkVideoSink * self,
716 GstClockTime * timestamp, GstClockTime * duration)
717 {
718 GstClock *clock;
719 GstClockTime internal_base, external_base, internal_offset;
720
721 g_assert (timestamp != NULL);
722
723 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
724 GST_OBJECT_LOCK (self);
725 internal_base = self->internal_base_time;
726 external_base = self->external_base_time;
727 internal_offset = self->internal_time_offset;
728 GST_OBJECT_UNLOCK (self);
729
730 if (!clock || clock != self->output->clock) {
731 GstClockTime internal, external, rate_n, rate_d;
732 GstClockTime external_timestamp = *timestamp;
733 GstClockTime base_time;
734
735 gst_clock_get_calibration (self->output->clock, &internal, &external,
736 &rate_n, &rate_d);
737
738 // Convert to the running time corresponding to both clock times
739 if (!GST_CLOCK_TIME_IS_VALID (internal_base) || internal < internal_base)
740 internal = 0;
741 else
742 internal -= internal_base;
743
744 if (!GST_CLOCK_TIME_IS_VALID (external_base) || external < external_base)
745 external = 0;
746 else
747 external -= external_base;
748
749 // Convert timestamp to the "running time" since we started scheduled
750 // playback, that is the difference between the pipeline's base time
751 // and our own base time.
752 base_time = gst_element_get_base_time (GST_ELEMENT_CAST (self));
753 if (base_time > external_base)
754 base_time = 0;
755 else
756 base_time = external_base - base_time;
757
758 if (external_timestamp < base_time)
759 external_timestamp = 0;
760 else
761 external_timestamp = external_timestamp - base_time;
762
763 // Get the difference in the external time, note
764 // that the running time is external time.
765 // Then scale this difference and offset it to
766 // our internal time. Now we have the running time
767 // according to our internal clock.
768 //
769 // For the duration we just scale
770 *timestamp =
771 gst_clock_unadjust_with_calibration (NULL, external_timestamp,
772 internal, external, rate_n, rate_d);
773
774 GST_LOG_OBJECT (self,
775 "Converted %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT " (internal: %"
776 GST_TIME_FORMAT " external %" GST_TIME_FORMAT " rate: %lf)",
777 GST_TIME_ARGS (external_timestamp), GST_TIME_ARGS (*timestamp),
778 GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
779 ((gdouble) rate_n) / ((gdouble) rate_d));
780
781 if (duration) {
782 GstClockTime external_duration = *duration;
783
784 *duration = gst_util_uint64_scale (external_duration, rate_d, rate_n);
785
786 GST_LOG_OBJECT (self,
787 "Converted duration %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
788 " (internal: %" GST_TIME_FORMAT " external %" GST_TIME_FORMAT
789 " rate: %lf)", GST_TIME_ARGS (external_duration),
790 GST_TIME_ARGS (*duration), GST_TIME_ARGS (internal),
791 GST_TIME_ARGS (external), ((gdouble) rate_n) / ((gdouble) rate_d));
792 }
793 } else {
794 GST_LOG_OBJECT (self, "No clock conversion needed, same clocks: %"
795 GST_TIME_FORMAT, GST_TIME_ARGS (*timestamp));
796 }
797
798 if (external_base != GST_CLOCK_TIME_NONE &&
799 internal_base != GST_CLOCK_TIME_NONE)
800 *timestamp += internal_offset;
801 else
802 *timestamp = gst_clock_get_internal_time (self->output->clock);
803
804 GST_DEBUG_OBJECT (self, "Output timestamp %" GST_TIME_FORMAT
805 " using clock epoch %" GST_TIME_FORMAT,
806 GST_TIME_ARGS (*timestamp), GST_TIME_ARGS (self->output->clock_epoch));
807
808 if (clock)
809 gst_object_unref (clock);
810 }
811
812 /* Copied from ext/closedcaption/gstccconverter.c */
813 /* Converts raw CEA708 cc_data and an optional timecode into CDP */
814 static guint
convert_cea708_cc_data_cea708_cdp_internal(GstDecklinkVideoSink * self,const guint8 * cc_data,guint cc_data_len,guint8 * cdp,guint cdp_len,const GstVideoTimeCodeMeta * tc_meta)815 convert_cea708_cc_data_cea708_cdp_internal (GstDecklinkVideoSink * self,
816 const guint8 * cc_data, guint cc_data_len, guint8 * cdp, guint cdp_len,
817 const GstVideoTimeCodeMeta * tc_meta)
818 {
819 GstByteWriter bw;
820 guint8 flags, checksum;
821 guint i, len;
822 const GstDecklinkMode *mode = gst_decklink_get_mode (self->mode);
823
824 gst_byte_writer_init_with_data (&bw, cdp, cdp_len, FALSE);
825 gst_byte_writer_put_uint16_be_unchecked (&bw, 0x9669);
826 /* Write a length of 0 for now */
827 gst_byte_writer_put_uint8_unchecked (&bw, 0);
828 if (mode->fps_n == 24000 && mode->fps_d == 1001) {
829 gst_byte_writer_put_uint8_unchecked (&bw, 0x1f);
830 } else if (mode->fps_n == 24 && mode->fps_d == 1) {
831 gst_byte_writer_put_uint8_unchecked (&bw, 0x2f);
832 } else if (mode->fps_n == 25 && mode->fps_d == 1) {
833 gst_byte_writer_put_uint8_unchecked (&bw, 0x3f);
834 } else if (mode->fps_n == 30000 && mode->fps_d == 1001) {
835 gst_byte_writer_put_uint8_unchecked (&bw, 0x4f);
836 } else if (mode->fps_n == 30 && mode->fps_d == 1) {
837 gst_byte_writer_put_uint8_unchecked (&bw, 0x5f);
838 } else if (mode->fps_n == 50 && mode->fps_d == 1) {
839 gst_byte_writer_put_uint8_unchecked (&bw, 0x6f);
840 } else if (mode->fps_n == 60000 && mode->fps_d == 1001) {
841 gst_byte_writer_put_uint8_unchecked (&bw, 0x7f);
842 } else if (mode->fps_n == 60 && mode->fps_d == 1) {
843 gst_byte_writer_put_uint8_unchecked (&bw, 0x8f);
844 } else {
845 g_assert_not_reached ();
846 }
847
848 /* ccdata_present | caption_service_active */
849 flags = 0x42;
850
851 /* time_code_present */
852 if (tc_meta)
853 flags |= 0x80;
854
855 /* reserved */
856 flags |= 0x01;
857
858 gst_byte_writer_put_uint8_unchecked (&bw, flags);
859
860 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
861
862 if (tc_meta) {
863 const GstVideoTimeCode *tc = &tc_meta->tc;
864 guint8 u8;
865
866 gst_byte_writer_put_uint8_unchecked (&bw, 0x71);
867 /* reserved 11 - 2 bits */
868 u8 = 0xc0;
869 /* tens of hours - 2 bits */
870 u8 |= ((tc->hours / 10) & 0x3) << 4;
871 /* units of hours - 4 bits */
872 u8 |= (tc->hours % 10) & 0xf;
873 gst_byte_writer_put_uint8_unchecked (&bw, u8);
874
875 /* reserved 1 - 1 bit */
876 u8 = 0x80;
877 /* tens of minutes - 3 bits */
878 u8 |= ((tc->minutes / 10) & 0x7) << 4;
879 /* units of minutes - 4 bits */
880 u8 |= (tc->minutes % 10) & 0xf;
881 gst_byte_writer_put_uint8_unchecked (&bw, u8);
882
883 /* field flag - 1 bit */
884 u8 = tc->field_count < 2 ? 0x00 : 0x80;
885 /* tens of seconds - 3 bits */
886 u8 |= ((tc->seconds / 10) & 0x7) << 4;
887 /* units of seconds - 4 bits */
888 u8 |= (tc->seconds % 10) & 0xf;
889 gst_byte_writer_put_uint8_unchecked (&bw, u8);
890
891 /* drop frame flag - 1 bit */
892 u8 = (tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) ? 0x80 :
893 0x00;
894 /* reserved0 - 1 bit */
895 /* tens of frames - 2 bits */
896 u8 |= ((tc->frames / 10) & 0x3) << 4;
897 /* units of frames 4 bits */
898 u8 |= (tc->frames % 10) & 0xf;
899 gst_byte_writer_put_uint8_unchecked (&bw, u8);
900 }
901
902 gst_byte_writer_put_uint8_unchecked (&bw, 0x72);
903 gst_byte_writer_put_uint8_unchecked (&bw, 0xe0 | cc_data_len / 3);
904 gst_byte_writer_put_data_unchecked (&bw, cc_data, cc_data_len);
905
906 gst_byte_writer_put_uint8_unchecked (&bw, 0x74);
907 gst_byte_writer_put_uint16_be_unchecked (&bw, self->cdp_hdr_sequence_cntr);
908 self->cdp_hdr_sequence_cntr++;
909 /* We calculate the checksum afterwards */
910 gst_byte_writer_put_uint8_unchecked (&bw, 0);
911
912 len = gst_byte_writer_get_pos (&bw);
913 gst_byte_writer_set_pos (&bw, 2);
914 gst_byte_writer_put_uint8_unchecked (&bw, len);
915
916 checksum = 0;
917 for (i = 0; i < len; i++) {
918 checksum += cdp[i];
919 }
920 checksum &= 0xff;
921 checksum = 256 - checksum;
922 cdp[len - 1] = checksum;
923
924 return len;
925 }
926
927 static void
write_vbi(GstDecklinkVideoSink * self,GstBuffer * buffer,BMDPixelFormat format,IDeckLinkMutableVideoFrame * frame,GstVideoTimeCodeMeta * tc_meta)928 write_vbi (GstDecklinkVideoSink * self, GstBuffer * buffer,
929 BMDPixelFormat format, IDeckLinkMutableVideoFrame * frame,
930 GstVideoTimeCodeMeta * tc_meta)
931 {
932 IDeckLinkVideoFrameAncillary *vanc_frame = NULL;
933 gpointer iter = NULL;
934 GstVideoCaptionMeta *cc_meta;
935 guint8 *vancdata;
936 gboolean got_captions = FALSE;
937
938 if (self->caption_line == 0 && self->afd_bar_line == 0)
939 return;
940
941 if (self->vbiencoder == NULL) {
942 self->vbiencoder =
943 gst_video_vbi_encoder_new (GST_VIDEO_FORMAT_v210, self->info.width);
944 self->anc_vformat = GST_VIDEO_FORMAT_v210;
945 }
946
947 /* Put any closed captions into the configured line */
948 while ((cc_meta =
949 (GstVideoCaptionMeta *) gst_buffer_iterate_meta_filtered (buffer,
950 &iter, GST_VIDEO_CAPTION_META_API_TYPE))) {
951 switch (cc_meta->caption_type) {
952 case GST_VIDEO_CAPTION_TYPE_CEA608_RAW:{
953 guint8 data[138];
954 guint i, n;
955
956 n = cc_meta->size / 2;
957 if (cc_meta->size > 46) {
958 GST_WARNING_OBJECT (self, "Too big raw CEA608 buffer");
959 break;
960 }
961
962 /* This is the offset from line 9 for 525-line fields and from line
963 * 5 for 625-line fields.
964 *
965 * The highest bit is set for field 1 but not for field 0, but we
966 * have no way of knowning the field here
967 */
968 for (i = 0; i < n; i++) {
969 data[3 * i] = 0x80 | (self->info.height ==
970 525 ? self->caption_line - 9 : self->caption_line - 5);
971 data[3 * i + 1] = cc_meta->data[2 * i];
972 data[3 * i + 2] = cc_meta->data[2 * i + 1];
973 }
974
975 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
976 FALSE,
977 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
978 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, data, 3))
979 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
980
981 got_captions = TRUE;
982
983 break;
984 }
985 case GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A:{
986 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
987 FALSE,
988 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 >> 8,
989 GST_VIDEO_ANCILLARY_DID16_S334_EIA_608 & 0xff, cc_meta->data,
990 cc_meta->size))
991 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
992
993 got_captions = TRUE;
994
995 break;
996 }
997 case GST_VIDEO_CAPTION_TYPE_CEA708_RAW:{
998 guint8 data[256];
999 guint n;
1000
1001 n = cc_meta->size / 3;
1002 if (cc_meta->size > 46) {
1003 GST_WARNING_OBJECT (self, "Too big raw CEA708 buffer");
1004 break;
1005 }
1006
1007 n = convert_cea708_cc_data_cea708_cdp_internal (self, cc_meta->data,
1008 cc_meta->size, data, sizeof (data), tc_meta);
1009 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder, FALSE,
1010 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
1011 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, data, n))
1012 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1013
1014 got_captions = TRUE;
1015
1016 break;
1017 }
1018 case GST_VIDEO_CAPTION_TYPE_CEA708_CDP:{
1019 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1020 FALSE,
1021 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8,
1022 GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff, cc_meta->data,
1023 cc_meta->size))
1024 GST_WARNING_OBJECT (self, "Couldn't add meta to ancillary data");
1025
1026 got_captions = TRUE;
1027
1028 break;
1029 }
1030 default:{
1031 GST_FIXME_OBJECT (self, "Caption type %d not supported",
1032 cc_meta->caption_type);
1033 break;
1034 }
1035 }
1036 }
1037
1038 if ((got_captions || self->afd_bar_line != 0)
1039 && self->output->output->CreateAncillaryData (bmdFormat10BitYUV,
1040 &vanc_frame) == S_OK) {
1041 GstVideoAFDMeta *afd_meta = NULL, *afd_meta2 = NULL;
1042 GstVideoBarMeta *bar_meta = NULL, *bar_meta2 = NULL;
1043 GstMeta *meta;
1044 gpointer meta_iter;
1045 guint8 afd_bar_data[8] = { 0, };
1046 guint8 afd_bar_data2[8] = { 0, };
1047 guint8 afd = 0;
1048 gboolean is_letterbox = 0;
1049 guint16 bar1 = 0, bar2 = 0;
1050 guint i;
1051
1052 // Get any reasonable AFD/Bar metas for both fields
1053 meta_iter = NULL;
1054 while ((meta =
1055 gst_buffer_iterate_meta_filtered (buffer, &meta_iter,
1056 GST_VIDEO_AFD_META_API_TYPE))) {
1057 GstVideoAFDMeta *tmp_meta = (GstVideoAFDMeta *) meta;
1058
1059 if (tmp_meta->field == 0 || !afd_meta || (afd_meta && afd_meta->field != 0
1060 && tmp_meta->field == 0))
1061 afd_meta = tmp_meta;
1062 if (tmp_meta->field == 1 || !afd_meta2 || (afd_meta2
1063 && afd_meta->field != 1 && tmp_meta->field == 1))
1064 afd_meta2 = tmp_meta;
1065 }
1066
1067 meta_iter = NULL;
1068 while ((meta =
1069 gst_buffer_iterate_meta_filtered (buffer, &meta_iter,
1070 GST_VIDEO_BAR_META_API_TYPE))) {
1071 GstVideoBarMeta *tmp_meta = (GstVideoBarMeta *) meta;
1072
1073 if (tmp_meta->field == 0 || !bar_meta || (bar_meta && bar_meta->field != 0
1074 && tmp_meta->field == 0))
1075 bar_meta = tmp_meta;
1076 if (tmp_meta->field == 1 || !bar_meta2 || (bar_meta2
1077 && bar_meta->field != 1 && tmp_meta->field == 1))
1078 bar_meta2 = tmp_meta;
1079 }
1080
1081 for (i = 0; i < 2; i++) {
1082 guint8 *afd_bar_data_ptr;
1083
1084 if (i == 0) {
1085 afd_bar_data_ptr = afd_bar_data;
1086 afd = afd_meta ? afd_meta->afd : 0;
1087 is_letterbox = bar_meta ? bar_meta->is_letterbox : FALSE;
1088 bar1 = bar_meta ? bar_meta->bar_data1 : 0;
1089 bar2 = bar_meta ? bar_meta->bar_data2 : 0;
1090 } else {
1091 afd_bar_data_ptr = afd_bar_data2;
1092 afd = afd_meta2 ? afd_meta2->afd : 0;
1093 is_letterbox = bar_meta2 ? bar_meta2->is_letterbox : FALSE;
1094 bar1 = bar_meta2 ? bar_meta2->bar_data1 : 0;
1095 bar2 = bar_meta2 ? bar_meta2->bar_data2 : 0;
1096 }
1097
1098 /* See SMPTE 2016-3 Section 4 */
1099 /* AFD and AR */
1100 if (self->mode <= (gint) GST_DECKLINK_MODE_PAL_P) {
1101 afd_bar_data_ptr[0] = (afd << 3) | 0x0;
1102 } else {
1103 afd_bar_data_ptr[0] = (afd << 3) | 0x4;
1104 }
1105
1106 /* Bar flags */
1107 afd_bar_data_ptr[3] = is_letterbox ? 0xc0 : 0x30;
1108
1109 /* Bar value 1 and 2 */
1110 GST_WRITE_UINT16_BE (&afd_bar_data_ptr[4], bar1);
1111 GST_WRITE_UINT16_BE (&afd_bar_data_ptr[6], bar2);
1112 }
1113
1114 /* AFD on the same line as the captions */
1115 if (self->caption_line == self->afd_bar_line) {
1116 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1117 FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
1118 GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data,
1119 sizeof (afd_bar_data)))
1120 GST_WARNING_OBJECT (self,
1121 "Couldn't add AFD/Bar data to ancillary data");
1122 }
1123
1124 /* FIXME: Add captions to the correct field? Captions for the second
1125 * field should probably be inserted into the second field */
1126
1127 if (got_captions || self->caption_line == self->afd_bar_line) {
1128 if (vanc_frame->GetBufferForVerticalBlankingLine (self->caption_line,
1129 (void **) &vancdata) == S_OK) {
1130 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1131 } else {
1132 GST_WARNING_OBJECT (self,
1133 "Failed to get buffer for line %d ancillary data",
1134 self->caption_line);
1135 }
1136 }
1137
1138 /* AFD on a different line than the captions */
1139 if (self->afd_bar_line != 0 && self->caption_line != self->afd_bar_line) {
1140 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1141 FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
1142 GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data,
1143 sizeof (afd_bar_data)))
1144 GST_WARNING_OBJECT (self,
1145 "Couldn't add AFD/Bar data to ancillary data");
1146
1147 if (vanc_frame->GetBufferForVerticalBlankingLine (self->afd_bar_line,
1148 (void **) &vancdata) == S_OK) {
1149 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1150 } else {
1151 GST_WARNING_OBJECT (self,
1152 "Failed to get buffer for line %d ancillary data",
1153 self->afd_bar_line);
1154 }
1155 }
1156
1157 /* For interlaced video we need to also add AFD to the second field */
1158 if (GST_VIDEO_INFO_IS_INTERLACED (&self->info) && self->afd_bar_line != 0) {
1159 guint field2_offset;
1160
1161 /* The VANC lines for the second field are at an offset, depending on
1162 * the format in use.
1163 */
1164 switch (self->info.height) {
1165 case 486:
1166 /* NTSC: 525 / 2 + 1 */
1167 field2_offset = 263;
1168 break;
1169 case 576:
1170 /* PAL: 625 / 2 + 1 */
1171 field2_offset = 313;
1172 break;
1173 case 1080:
1174 /* 1080i: 1125 / 2 + 1 */
1175 field2_offset = 563;
1176 break;
1177 default:
1178 g_assert_not_reached ();
1179 }
1180
1181 if (!gst_video_vbi_encoder_add_ancillary (self->vbiencoder,
1182 FALSE, GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR >> 8,
1183 GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR & 0xff, afd_bar_data2,
1184 sizeof (afd_bar_data)))
1185 GST_WARNING_OBJECT (self,
1186 "Couldn't add AFD/Bar data to ancillary data");
1187
1188 if (vanc_frame->GetBufferForVerticalBlankingLine (self->afd_bar_line +
1189 field2_offset, (void **) &vancdata) == S_OK) {
1190 gst_video_vbi_encoder_write_line (self->vbiencoder, vancdata);
1191 } else {
1192 GST_WARNING_OBJECT (self,
1193 "Failed to get buffer for line %d ancillary data",
1194 self->afd_bar_line);
1195 }
1196 }
1197
1198 if (frame->SetAncillaryData (vanc_frame) != S_OK) {
1199 GST_WARNING_OBJECT (self, "Failed to set ancillary data");
1200 }
1201
1202 vanc_frame->Release ();
1203 } else if (got_captions || self->afd_bar_line != 0) {
1204 GST_WARNING_OBJECT (self, "Failed to allocate ancillary data frame");
1205 }
1206 }
1207
1208 static GstFlowReturn
gst_decklink_video_sink_prepare(GstBaseSink * bsink,GstBuffer * buffer)1209 gst_decklink_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buffer)
1210 {
1211 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1212 GstVideoFrame vframe;
1213 IDeckLinkMutableVideoFrame *frame;
1214 guint8 *outdata, *indata;
1215 GstFlowReturn flow_ret;
1216 HRESULT ret;
1217 GstClockTime timestamp, duration;
1218 GstClockTime running_time, running_time_duration;
1219 GstClockTime latency, render_delay;
1220 GstClockTimeDiff ts_offset;
1221 gint i;
1222 GstDecklinkVideoFormat caps_format;
1223 BMDPixelFormat format;
1224 gint stride;
1225 GstVideoTimeCodeMeta *tc_meta;
1226
1227 GST_DEBUG_OBJECT (self, "Preparing buffer %" GST_PTR_FORMAT, buffer);
1228
1229 // FIXME: Handle no timestamps
1230 if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
1231 return GST_FLOW_ERROR;
1232 }
1233
1234 caps_format = gst_decklink_type_from_video_format (self->info.finfo->format);
1235 format = gst_decklink_pixel_format_from_type (caps_format);
1236
1237 timestamp = GST_BUFFER_TIMESTAMP (buffer);
1238 duration = GST_BUFFER_DURATION (buffer);
1239 if (duration == GST_CLOCK_TIME_NONE) {
1240 duration =
1241 gst_util_uint64_scale_int (GST_SECOND, self->info.fps_d,
1242 self->info.fps_n);
1243 }
1244 running_time =
1245 gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
1246 GST_FORMAT_TIME, timestamp);
1247 running_time_duration =
1248 gst_segment_to_running_time (&GST_BASE_SINK_CAST (self)->segment,
1249 GST_FORMAT_TIME, timestamp + duration) - running_time;
1250
1251 /* See gst_base_sink_adjust_time() */
1252 latency = gst_base_sink_get_latency (bsink);
1253 render_delay = gst_base_sink_get_render_delay (bsink);
1254 ts_offset = gst_base_sink_get_ts_offset (bsink);
1255
1256 running_time += latency;
1257
1258 if (ts_offset < 0) {
1259 ts_offset = -ts_offset;
1260 if ((GstClockTime) ts_offset < running_time)
1261 running_time -= ts_offset;
1262 else
1263 running_time = 0;
1264 } else {
1265 running_time += ts_offset;
1266 }
1267
1268 if (running_time > render_delay)
1269 running_time -= render_delay;
1270 else
1271 running_time = 0;
1272
1273 ret = self->output->output->CreateVideoFrame (self->info.width,
1274 self->info.height, self->info.stride[0], format, bmdFrameFlagDefault,
1275 &frame);
1276 if (ret != S_OK) {
1277 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1278 (NULL), ("Failed to create video frame: 0x%08lx", (unsigned long) ret));
1279 return GST_FLOW_ERROR;
1280 }
1281
1282 if (!gst_video_frame_map (&vframe, &self->info, buffer, GST_MAP_READ)) {
1283 GST_ERROR_OBJECT (self, "Failed to map video frame");
1284 flow_ret = GST_FLOW_ERROR;
1285 goto out;
1286 }
1287
1288 frame->GetBytes ((void **) &outdata);
1289 indata = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
1290 stride =
1291 MIN (GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0), frame->GetRowBytes ());
1292 for (i = 0; i < self->info.height; i++) {
1293 memcpy (outdata, indata, stride);
1294 indata += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
1295 outdata += frame->GetRowBytes ();
1296 }
1297 gst_video_frame_unmap (&vframe);
1298
1299 tc_meta = gst_buffer_get_video_time_code_meta (buffer);
1300 if (tc_meta) {
1301 BMDTimecodeFlags bflags = (BMDTimecodeFlags) 0;
1302 gchar *tc_str;
1303
1304 if (((GstVideoTimeCodeFlags) (tc_meta->tc.
1305 config.flags)) & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
1306 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeIsDropFrame);
1307 else
1308 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFlagDefault);
1309 if (tc_meta->tc.field_count == 2)
1310 bflags = (BMDTimecodeFlags) (bflags | bmdTimecodeFieldMark);
1311
1312 tc_str = gst_video_time_code_to_string (&tc_meta->tc);
1313 ret = frame->SetTimecodeFromComponents (self->timecode_format,
1314 (uint8_t) tc_meta->tc.hours,
1315 (uint8_t) tc_meta->tc.minutes,
1316 (uint8_t) tc_meta->tc.seconds, (uint8_t) tc_meta->tc.frames, bflags);
1317 if (ret != S_OK) {
1318 GST_ERROR_OBJECT (self,
1319 "Failed to set timecode %s to video frame: 0x%08lx", tc_str,
1320 (unsigned long) ret);
1321 flow_ret = GST_FLOW_ERROR;
1322 g_free (tc_str);
1323 goto out;
1324 }
1325 GST_DEBUG_OBJECT (self, "Set frame timecode to %s", tc_str);
1326 g_free (tc_str);
1327 }
1328
1329 write_vbi (self, buffer, format, frame, tc_meta);
1330
1331 gst_decklink_video_sink_convert_to_internal_clock (self, &running_time,
1332 &running_time_duration);
1333
1334 GST_LOG_OBJECT (self, "Scheduling video frame %p at %" GST_TIME_FORMAT
1335 " with duration %" GST_TIME_FORMAT, frame, GST_TIME_ARGS (running_time),
1336 GST_TIME_ARGS (running_time_duration));
1337
1338 ret = self->output->output->ScheduleVideoFrame (frame,
1339 running_time, running_time_duration, GST_SECOND);
1340 if (ret != S_OK) {
1341 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1342 (NULL), ("Failed to schedule frame: 0x%08lx", (unsigned long) ret));
1343 flow_ret = GST_FLOW_ERROR;
1344 goto out;
1345 }
1346
1347 flow_ret = GST_FLOW_OK;
1348
1349 out:
1350
1351 frame->Release ();
1352
1353 return flow_ret;
1354 }
1355
1356 static gboolean
gst_decklink_video_sink_open(GstBaseSink * bsink)1357 gst_decklink_video_sink_open (GstBaseSink * bsink)
1358 {
1359 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1360 const GstDecklinkMode *mode;
1361
1362 GST_DEBUG_OBJECT (self, "Starting");
1363
1364 self->output =
1365 gst_decklink_acquire_nth_output (self->device_number,
1366 GST_ELEMENT_CAST (self), FALSE);
1367 if (!self->output) {
1368 GST_ERROR_OBJECT (self, "Failed to acquire output");
1369 return FALSE;
1370 }
1371
1372 g_object_notify (G_OBJECT (self), "hw-serial-number");
1373
1374 mode = gst_decklink_get_mode (self->mode);
1375 g_assert (mode != NULL);
1376
1377 g_mutex_lock (&self->output->lock);
1378 self->output->mode = mode;
1379 self->output->start_scheduled_playback =
1380 gst_decklink_video_sink_start_scheduled_playback;
1381 self->output->clock_start_time = GST_CLOCK_TIME_NONE;
1382 self->output->clock_epoch += self->output->clock_last_time;
1383 self->output->clock_last_time = 0;
1384 self->output->clock_offset = 0;
1385 GST_OBJECT_LOCK (self);
1386 self->internal_base_time = GST_CLOCK_TIME_NONE;
1387 self->external_base_time = GST_CLOCK_TIME_NONE;
1388 GST_OBJECT_UNLOCK (self);
1389 g_mutex_unlock (&self->output->lock);
1390
1391 return TRUE;
1392 }
1393
1394 static gboolean
gst_decklink_video_sink_close(GstBaseSink * bsink)1395 gst_decklink_video_sink_close (GstBaseSink * bsink)
1396 {
1397 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1398
1399 GST_DEBUG_OBJECT (self, "Closing");
1400
1401 if (self->output) {
1402 g_mutex_lock (&self->output->lock);
1403 self->output->mode = NULL;
1404 self->output->video_enabled = FALSE;
1405 if (self->output->start_scheduled_playback && self->output->videosink)
1406 self->output->start_scheduled_playback (self->output->videosink);
1407 g_mutex_unlock (&self->output->lock);
1408
1409 self->output->output->DisableVideoOutput ();
1410 gst_decklink_release_nth_output (self->device_number,
1411 GST_ELEMENT_CAST (self), FALSE);
1412 self->output = NULL;
1413 }
1414
1415 return TRUE;
1416 }
1417
1418 static gboolean
gst_decklink_video_sink_stop(GstDecklinkVideoSink * self)1419 gst_decklink_video_sink_stop (GstDecklinkVideoSink * self)
1420 {
1421 GST_DEBUG_OBJECT (self, "Stopping");
1422
1423 if (self->output && self->output->video_enabled) {
1424 g_mutex_lock (&self->output->lock);
1425 self->output->video_enabled = FALSE;
1426 g_mutex_unlock (&self->output->lock);
1427
1428 self->output->output->DisableVideoOutput ();
1429 self->output->output->SetScheduledFrameCompletionCallback (NULL);
1430 }
1431
1432 if (self->vbiencoder) {
1433 gst_video_vbi_encoder_free (self->vbiencoder);
1434 self->vbiencoder = NULL;
1435 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1436 }
1437
1438 return TRUE;
1439 }
1440
1441 static void
_wait_for_stop_notify(GstDecklinkVideoSink * self)1442 _wait_for_stop_notify (GstDecklinkVideoSink * self)
1443 {
1444 bool active = false;
1445
1446 self->output->output->IsScheduledPlaybackRunning (&active);
1447 while (active) {
1448 /* cause sometimes decklink stops without notifying us... */
1449 guint64 wait_time = g_get_monotonic_time () + G_TIME_SPAN_SECOND;
1450 if (!g_cond_wait_until (&self->output->cond, &self->output->lock,
1451 wait_time))
1452 GST_WARNING_OBJECT (self, "Failed to wait for stop notification");
1453 self->output->output->IsScheduledPlaybackRunning (&active);
1454 }
1455 }
1456
1457 static void
gst_decklink_video_sink_start_scheduled_playback(GstElement * element)1458 gst_decklink_video_sink_start_scheduled_playback (GstElement * element)
1459 {
1460 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1461 GstClockTime start_time;
1462 HRESULT res;
1463 bool active;
1464
1465 // Check if we're already started
1466 if (self->output->started) {
1467 GST_DEBUG_OBJECT (self, "Already started");
1468 return;
1469 }
1470 // Check if we're ready to start:
1471 // we need video and audio enabled, if there is audio
1472 // and both of the two elements need to be set to PLAYING already
1473 if (!self->output->video_enabled) {
1474 GST_DEBUG_OBJECT (self,
1475 "Not starting scheduled playback yet: video not enabled yet!");
1476 return;
1477 }
1478
1479 if (self->output->audiosink && !self->output->audio_enabled) {
1480 GST_DEBUG_OBJECT (self,
1481 "Not starting scheduled playback yet: "
1482 "have audio but not enabled yet!");
1483 return;
1484 }
1485
1486 if ((GST_STATE (self) < GST_STATE_PAUSED
1487 && GST_STATE_PENDING (self) < GST_STATE_PAUSED)
1488 || (self->output->audiosink &&
1489 GST_STATE (self->output->audiosink) < GST_STATE_PAUSED
1490 && GST_STATE_PENDING (self->output->audiosink) < GST_STATE_PAUSED)) {
1491 GST_DEBUG_OBJECT (self,
1492 "Not starting scheduled playback yet: "
1493 "Elements are not set to PAUSED yet");
1494 return;
1495 }
1496 // Need to unlock to get the clock time
1497 g_mutex_unlock (&self->output->lock);
1498
1499 start_time = gst_clock_get_internal_time (self->output->clock);
1500
1501 g_mutex_lock (&self->output->lock);
1502 // Check if someone else started in the meantime
1503 if (self->output->started) {
1504 return;
1505 }
1506
1507 active = false;
1508 self->output->output->IsScheduledPlaybackRunning (&active);
1509 if (active) {
1510 GST_DEBUG_OBJECT (self, "Stopping scheduled playback");
1511
1512 self->output->started = FALSE;
1513
1514 res = self->output->output->StopScheduledPlayback (0, 0, 0);
1515 if (res != S_OK) {
1516 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1517 (NULL), ("Failed to stop scheduled playback: 0x%08lx",
1518 (unsigned long) res));
1519 return;
1520 }
1521 // Wait until scheduled playback actually stopped
1522 _wait_for_stop_notify (self);
1523 }
1524
1525 GST_INFO_OBJECT (self,
1526 "Starting scheduled playback at %" GST_TIME_FORMAT,
1527 GST_TIME_ARGS (start_time));
1528
1529 res =
1530 self->output->output->StartScheduledPlayback (start_time,
1531 GST_SECOND, 1.0);
1532 if (res != S_OK) {
1533 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1534 (NULL), ("Failed to start scheduled playback: 0x%08lx",
1535 (unsigned long) res));
1536 return;
1537 }
1538
1539 self->output->started = TRUE;
1540 }
1541
1542 static GstStateChangeReturn
gst_decklink_video_sink_stop_scheduled_playback(GstDecklinkVideoSink * self)1543 gst_decklink_video_sink_stop_scheduled_playback (GstDecklinkVideoSink * self)
1544 {
1545 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1546 GstClockTime start_time;
1547 HRESULT res;
1548
1549 if (!self->output->started)
1550 return ret;
1551
1552 start_time = gst_clock_get_internal_time (self->output->clock);
1553
1554 GST_INFO_OBJECT (self,
1555 "Stopping scheduled playback at %" GST_TIME_FORMAT,
1556 GST_TIME_ARGS (start_time));
1557
1558 g_mutex_lock (&self->output->lock);
1559 self->output->started = FALSE;
1560 res = self->output->output->StopScheduledPlayback (start_time, 0, GST_SECOND);
1561 if (res != S_OK) {
1562 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1563 (NULL), ("Failed to stop scheduled playback: 0x%08lx", (unsigned long)
1564 res));
1565 ret = GST_STATE_CHANGE_FAILURE;
1566 } else {
1567
1568 // Wait until scheduled playback actually stopped
1569 _wait_for_stop_notify (self);
1570 }
1571 g_mutex_unlock (&self->output->lock);
1572 GST_OBJECT_LOCK (self);
1573 self->internal_base_time = GST_CLOCK_TIME_NONE;
1574 self->external_base_time = GST_CLOCK_TIME_NONE;
1575 GST_OBJECT_UNLOCK (self);
1576
1577 return ret;
1578 }
1579
1580 static GstStateChangeReturn
gst_decklink_video_sink_change_state(GstElement * element,GstStateChange transition)1581 gst_decklink_video_sink_change_state (GstElement * element,
1582 GstStateChange transition)
1583 {
1584 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1585 GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
1586
1587 GST_DEBUG_OBJECT (self, "changing state: %s => %s",
1588 gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
1589 gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
1590
1591 switch (transition) {
1592 case GST_STATE_CHANGE_READY_TO_PAUSED:
1593 self->vbiencoder = NULL;
1594 self->anc_vformat = GST_VIDEO_FORMAT_UNKNOWN;
1595 self->cdp_hdr_sequence_cntr = 0;
1596
1597 g_mutex_lock (&self->output->lock);
1598 self->output->clock_epoch += self->output->clock_last_time;
1599 self->output->clock_last_time = 0;
1600 self->output->clock_offset = 0;
1601 g_mutex_unlock (&self->output->lock);
1602 gst_element_post_message (element,
1603 gst_message_new_clock_provide (GST_OBJECT_CAST (element),
1604 self->output->clock, TRUE));
1605 g_mutex_lock (&self->output->lock);
1606 if (self->output->start_scheduled_playback)
1607 self->output->start_scheduled_playback (self->output->videosink);
1608 g_mutex_unlock (&self->output->lock);
1609 break;
1610 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:{
1611 GstClock *clock;
1612
1613 clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
1614 if (clock) {
1615 if (clock != self->output->clock) {
1616 gst_clock_set_master (self->output->clock, clock);
1617 }
1618
1619 GST_OBJECT_LOCK (self);
1620 if (self->external_base_time == GST_CLOCK_TIME_NONE
1621 || self->internal_base_time == GST_CLOCK_TIME_NONE) {
1622 self->external_base_time = gst_clock_get_internal_time (clock);
1623 self->internal_base_time =
1624 gst_clock_get_internal_time (self->output->clock);
1625 self->internal_time_offset = self->internal_base_time;
1626 } else if (GST_CLOCK_TIME_IS_VALID (self->internal_pause_time)) {
1627 self->internal_time_offset +=
1628 gst_clock_get_internal_time (self->output->clock) -
1629 self->internal_pause_time;
1630 }
1631
1632 GST_INFO_OBJECT (self, "clock has been set to %" GST_PTR_FORMAT
1633 ", updated base times - internal: %" GST_TIME_FORMAT
1634 " external: %" GST_TIME_FORMAT " internal offset %"
1635 GST_TIME_FORMAT, clock,
1636 GST_TIME_ARGS (self->internal_base_time),
1637 GST_TIME_ARGS (self->external_base_time),
1638 GST_TIME_ARGS (self->internal_time_offset));
1639 GST_OBJECT_UNLOCK (self);
1640
1641 gst_object_unref (clock);
1642 } else {
1643 GST_ELEMENT_ERROR (self, STREAM, FAILED,
1644 (NULL), ("Need a clock to go to PLAYING"));
1645 ret = GST_STATE_CHANGE_FAILURE;
1646 }
1647 break;
1648 }
1649 case GST_STATE_CHANGE_PAUSED_TO_READY:
1650 if (gst_decklink_video_sink_stop_scheduled_playback (self) ==
1651 GST_STATE_CHANGE_FAILURE)
1652 ret = GST_STATE_CHANGE_FAILURE;
1653 break;
1654 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1655 break;
1656 default:
1657 break;
1658 }
1659
1660 if (ret == GST_STATE_CHANGE_FAILURE)
1661 return ret;
1662 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1663 if (ret == GST_STATE_CHANGE_FAILURE)
1664 return ret;
1665
1666 switch (transition) {
1667 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1668 gst_element_post_message (element,
1669 gst_message_new_clock_lost (GST_OBJECT_CAST (element),
1670 self->output->clock));
1671 gst_clock_set_master (self->output->clock, NULL);
1672 // Reset calibration to make the clock reusable next time we use it
1673 gst_clock_set_calibration (self->output->clock, 0, 0, 1, 1);
1674 g_mutex_lock (&self->output->lock);
1675 self->output->clock_epoch += self->output->clock_last_time;
1676 self->output->clock_last_time = 0;
1677 self->output->clock_offset = 0;
1678 g_mutex_unlock (&self->output->lock);
1679 gst_decklink_video_sink_stop (self);
1680 GST_OBJECT_LOCK (self);
1681 self->internal_base_time = GST_CLOCK_TIME_NONE;
1682 self->external_base_time = GST_CLOCK_TIME_NONE;
1683 self->internal_pause_time = GST_CLOCK_TIME_NONE;
1684 GST_OBJECT_UNLOCK (self);
1685 break;
1686 }
1687 case GST_STATE_CHANGE_READY_TO_PAUSED:{
1688 break;
1689 }
1690 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
1691 break;
1692 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1693 self->internal_pause_time =
1694 gst_clock_get_internal_time (self->output->clock);
1695 break;
1696 default:
1697 break;
1698 }
1699
1700 return ret;
1701 }
1702
1703 static gboolean
gst_decklink_video_sink_event(GstBaseSink * bsink,GstEvent * event)1704 gst_decklink_video_sink_event (GstBaseSink * bsink, GstEvent * event)
1705 {
1706 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (bsink);
1707
1708 switch (GST_EVENT_TYPE (event)) {
1709 case GST_EVENT_FLUSH_START:
1710 {
1711 break;
1712 }
1713 case GST_EVENT_FLUSH_STOP:
1714 {
1715 gboolean reset_time;
1716
1717 gst_event_parse_flush_stop (event, &reset_time);
1718 if (reset_time) {
1719 GST_OBJECT_LOCK (self);
1720 /* force a recalculation of clock base times */
1721 self->external_base_time = GST_CLOCK_TIME_NONE;
1722 self->internal_base_time = GST_CLOCK_TIME_NONE;
1723 GST_OBJECT_UNLOCK (self);
1724 }
1725 break;
1726 }
1727 default:
1728 break;
1729 }
1730
1731 return GST_BASE_SINK_CLASS (parent_class)->event (bsink, event);
1732 }
1733
1734 static GstClock *
gst_decklink_video_sink_provide_clock(GstElement * element)1735 gst_decklink_video_sink_provide_clock (GstElement * element)
1736 {
1737 GstDecklinkVideoSink *self = GST_DECKLINK_VIDEO_SINK_CAST (element);
1738
1739 if (!self->output)
1740 return NULL;
1741
1742 return GST_CLOCK_CAST (gst_object_ref (self->output->clock));
1743 }
1744
1745 static gboolean
gst_decklink_video_sink_propose_allocation(GstBaseSink * bsink,GstQuery * query)1746 gst_decklink_video_sink_propose_allocation (GstBaseSink * bsink,
1747 GstQuery * query)
1748 {
1749 GstCaps *caps;
1750 GstVideoInfo info;
1751 GstBufferPool *pool;
1752 guint size;
1753
1754 gst_query_parse_allocation (query, &caps, NULL);
1755
1756 if (caps == NULL)
1757 return FALSE;
1758
1759 if (!gst_video_info_from_caps (&info, caps))
1760 return FALSE;
1761
1762 size = GST_VIDEO_INFO_SIZE (&info);
1763
1764 if (gst_query_get_n_allocation_pools (query) == 0) {
1765 GstStructure *structure;
1766 GstAllocator *allocator = NULL;
1767 GstAllocationParams params = { (GstMemoryFlags) 0, 15, 0, 0 };
1768
1769 if (gst_query_get_n_allocation_params (query) > 0)
1770 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1771 else
1772 gst_query_add_allocation_param (query, allocator, ¶ms);
1773
1774 pool = gst_video_buffer_pool_new ();
1775
1776 structure = gst_buffer_pool_get_config (pool);
1777 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
1778 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
1779
1780 if (allocator)
1781 gst_object_unref (allocator);
1782
1783 if (!gst_buffer_pool_set_config (pool, structure))
1784 goto config_failed;
1785
1786 gst_query_add_allocation_pool (query, pool, size, 0, 0);
1787 gst_object_unref (pool);
1788 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
1789 }
1790
1791 return TRUE;
1792 // ERRORS
1793 config_failed:
1794 {
1795 GST_ERROR_OBJECT (bsink, "failed to set config");
1796 gst_object_unref (pool);
1797 return FALSE;
1798 }
1799 }
1800