1 /* GStreamer
2 * Copyright (C) 2008 David Schleef <ds@schleef.org>
3 * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4 * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5 * Contact: Stefan Kost <stefan.kost@nokia.com>
6 * Copyright (C) 2012 Collabora Ltd.
7 * Author : Edward Hervey <edward@collabora.com>
8 *
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
13 *
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
18 *
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
23 */
24
25 /**
26 * SECTION:gstvideoencoder
27 * @title: GstVideoEncoder
28 * @short_description: Base class for video encoders
29 *
30 * This base class is for video encoders turning raw video into
31 * encoded video data.
32 *
33 * GstVideoEncoder and subclass should cooperate as follows.
34 *
35 * ## Configuration
36 *
37 * * Initially, GstVideoEncoder calls @start when the encoder element
38 * is activated, which allows subclass to perform any global setup.
39 * * GstVideoEncoder calls @set_format to inform subclass of the format
40 * of input video data that it is about to receive. Subclass should
41 * setup for encoding and configure base class as appropriate
42 * (e.g. latency). While unlikely, it might be called more than once,
43 * if changing input parameters require reconfiguration. Baseclass
44 * will ensure that processing of current configuration is finished.
45 * * GstVideoEncoder calls @stop at end of all processing.
46 *
47 * ## Data processing
48 *
49 * * Base class collects input data and metadata into a frame and hands
50 * this to subclass' @handle_frame.
51 *
52 * * If codec processing results in encoded data, subclass should call
53 * @gst_video_encoder_finish_frame to have encoded data pushed
54 * downstream.
55 *
56 * * If implemented, baseclass calls subclass @pre_push just prior to
57 * pushing to allow subclasses to modify some metadata on the buffer.
58 * If it returns GST_FLOW_OK, the buffer is pushed downstream.
59 *
60 * * GstVideoEncoderClass will handle both srcpad and sinkpad events.
61 * Sink events will be passed to subclass if @event callback has been
62 * provided.
63 *
64 * ## Shutdown phase
65 *
66 * * GstVideoEncoder class calls @stop to inform the subclass that data
67 * parsing will be stopped.
68 *
69 * Subclass is responsible for providing pad template caps for
70 * source and sink pads. The pads need to be named "sink" and "src". It should
71 * also be able to provide fixed src pad caps in @getcaps by the time it calls
72 * @gst_video_encoder_finish_frame.
73 *
74 * Things that subclass need to take care of:
75 *
76 * * Provide pad templates
77 * * Provide source pad caps before pushing the first buffer
78 * * Accept data in @handle_frame and provide encoded results to
79 * @gst_video_encoder_finish_frame.
80 *
81 *
82 * The #GstVideoEncoder:qos property will enable the Quality-of-Service
83 * features of the encoder which gather statistics about the real-time
84 * performance of the downstream elements. If enabled, subclasses can
85 * use gst_video_encoder_get_max_encode_time() to check if input frames
86 * are already late and drop them right away to give a chance to the
87 * pipeline to catch up.
88 */
89
90 #ifdef HAVE_CONFIG_H
91 #include "config.h"
92 #endif
93
94 /* TODO
95 *
96 * * Calculate actual latency based on input/output timestamp/frame_number
97 * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
98 */
99
100 #include <gst/video/video.h>
101 #include "gstvideoencoder.h"
102 #include "gstvideoutils.h"
103 #include "gstvideoutilsprivate.h"
104
105 #include <gst/video/gstvideometa.h>
106 #include <gst/video/gstvideopool.h>
107
108 #include <string.h>
109
110 GST_DEBUG_CATEGORY (videoencoder_debug);
111 #define GST_CAT_DEFAULT videoencoder_debug
112
113 /* properties */
114
115 #define DEFAULT_QOS FALSE
116 #define DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL 0
117
118 enum
119 {
120 PROP_0,
121 PROP_QOS,
122 PROP_MIN_FORCE_KEY_UNIT_INTERVAL,
123 PROP_LAST
124 };
125
126 struct _GstVideoEncoderPrivate
127 {
128 guint64 presentation_frame_number;
129 int distance_from_sync;
130
131 /* FIXME : (and introduce a context ?) */
132 gboolean drained;
133
134 gint64 min_latency;
135 gint64 max_latency;
136
137 /* FIXME 2.0: Use a GQueue or similar, see GstVideoCodecFrame::events */
138 GList *current_frame_events;
139
140 GList *headers;
141 gboolean new_headers; /* Whether new headers were just set */
142
143 GQueue force_key_unit; /* List of pending forced keyunits */
144 GstClockTime min_force_key_unit_interval;
145 GstClockTime last_force_key_unit_request;
146 GstClockTime last_key_unit;
147
148 guint32 system_frame_number;
149
150 GQueue frames; /* Protected with OBJECT_LOCK */
151 GstVideoCodecState *input_state;
152 GstVideoCodecState *output_state;
153 gboolean output_state_changed;
154
155 gint64 bytes;
156 gint64 time;
157
158 GstAllocator *allocator;
159 GstAllocationParams params;
160
161 /* upstream stream tags (global tags are passed through as-is) */
162 GstTagList *upstream_tags;
163
164 /* subclass tags */
165 GstTagList *tags;
166 GstTagMergeMode tags_merge_mode;
167
168 gboolean tags_changed;
169
170 GstClockTime min_pts;
171 /* adjustment needed on pts, dts, segment start and stop to accommodate
172 * min_pts */
173 GstClockTime time_adjustment;
174
175 /* QoS properties */
176 gint qos_enabled; /* ATOMIC */
177 gdouble proportion; /* OBJECT_LOCK */
178 GstClockTime earliest_time; /* OBJECT_LOCK */
179 GstClockTime qos_frame_duration; /* OBJECT_LOCK */
180 /* qos messages: frames dropped/processed */
181 guint dropped;
182 guint processed;
183 };
184
185 typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
186 struct _ForcedKeyUnitEvent
187 {
188 GstClockTime running_time;
189 gboolean pending; /* TRUE if this was requested already */
190 gboolean all_headers;
191 guint count;
192 guint32 frame_id;
193 };
194
195 static void
forced_key_unit_event_free(ForcedKeyUnitEvent * evt)196 forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
197 {
198 g_slice_free (ForcedKeyUnitEvent, evt);
199 }
200
201 static ForcedKeyUnitEvent *
forced_key_unit_event_new(GstClockTime running_time,gboolean all_headers,guint count)202 forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
203 guint count)
204 {
205 ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
206
207 evt->running_time = running_time;
208 evt->all_headers = all_headers;
209 evt->count = count;
210
211 return evt;
212 }
213
214 static gint
forced_key_unit_event_compare(const ForcedKeyUnitEvent * a,const ForcedKeyUnitEvent * b,gpointer user_data)215 forced_key_unit_event_compare (const ForcedKeyUnitEvent * a,
216 const ForcedKeyUnitEvent * b, gpointer user_data)
217 {
218 if (a->running_time == b->running_time) {
219 /* Sort pending ones before non-pending ones */
220 if (a->pending && !b->pending)
221 return -1;
222 if (!a->pending && b->pending)
223 return 1;
224 return 0;
225 }
226
227 if (a->running_time == GST_CLOCK_TIME_NONE)
228 return -1;
229 if (b->running_time == GST_CLOCK_TIME_NONE)
230 return 1;
231 if (a->running_time < b->running_time)
232 return -1;
233 return 1;
234 }
235
236 static GstElementClass *parent_class = NULL;
237 static gint private_offset = 0;
238
239 /* cached quark to avoid contention on the global quark table lock */
240 #define META_TAG_VIDEO meta_tag_video_quark
241 static GQuark meta_tag_video_quark;
242
243 static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
244 static void gst_video_encoder_init (GstVideoEncoder * enc,
245 GstVideoEncoderClass * klass);
246
247 static void gst_video_encoder_finalize (GObject * object);
248
249 static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
250 GstCaps * caps);
251 static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
252 GstCaps * filter);
253 static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
254 GstEvent * event);
255 static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
256 GstEvent * event);
257 static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
258 GstBuffer * buf);
259 static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
260 element, GstStateChange transition);
261 static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
262 GstQuery * query);
263 static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
264 GstQuery * query);
265 static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
266 encoder, GstBuffer * buf, GstClockTime pts, GstClockTime dts,
267 GstClockTime duration);
268
269 static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
270 GstEvent * event);
271 static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
272 GstEvent * event);
273 static gboolean gst_video_encoder_decide_allocation_default (GstVideoEncoder *
274 encoder, GstQuery * query);
275 static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
276 encoder, GstQuery * query);
277 static gboolean gst_video_encoder_negotiate_default (GstVideoEncoder * encoder);
278 static gboolean gst_video_encoder_negotiate_unlocked (GstVideoEncoder *
279 encoder);
280
281 static gboolean gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
282 GstQuery * query);
283 static gboolean gst_video_encoder_src_query_default (GstVideoEncoder * encoder,
284 GstQuery * query);
285
286 static gboolean gst_video_encoder_transform_meta_default (GstVideoEncoder *
287 encoder, GstVideoCodecFrame * frame, GstMeta * meta);
288
289 /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
290 * method to get to the padtemplates */
291 GType
gst_video_encoder_get_type(void)292 gst_video_encoder_get_type (void)
293 {
294 static gsize type = 0;
295
296 if (g_once_init_enter (&type)) {
297 GType _type;
298 static const GTypeInfo info = {
299 sizeof (GstVideoEncoderClass),
300 NULL,
301 NULL,
302 (GClassInitFunc) gst_video_encoder_class_init,
303 NULL,
304 NULL,
305 sizeof (GstVideoEncoder),
306 0,
307 (GInstanceInitFunc) gst_video_encoder_init,
308 };
309 const GInterfaceInfo preset_interface_info = {
310 NULL, /* interface_init */
311 NULL, /* interface_finalize */
312 NULL /* interface_data */
313 };
314
315 _type = g_type_register_static (GST_TYPE_ELEMENT,
316 "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
317 private_offset =
318 g_type_add_instance_private (_type, sizeof (GstVideoEncoderPrivate));
319 g_type_add_interface_static (_type, GST_TYPE_PRESET,
320 &preset_interface_info);
321 g_once_init_leave (&type, _type);
322 }
323 return type;
324 }
325
326 static inline GstVideoEncoderPrivate *
gst_video_encoder_get_instance_private(GstVideoEncoder * self)327 gst_video_encoder_get_instance_private (GstVideoEncoder * self)
328 {
329 return (G_STRUCT_MEMBER_P (self, private_offset));
330 }
331
332 static void
gst_video_encoder_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)333 gst_video_encoder_set_property (GObject * object, guint prop_id,
334 const GValue * value, GParamSpec * pspec)
335 {
336 GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
337
338 switch (prop_id) {
339 case PROP_QOS:
340 gst_video_encoder_set_qos_enabled (sink, g_value_get_boolean (value));
341 break;
342 case PROP_MIN_FORCE_KEY_UNIT_INTERVAL:
343 gst_video_encoder_set_min_force_key_unit_interval (sink,
344 g_value_get_uint64 (value));
345 break;
346 default:
347 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
348 break;
349 }
350 }
351
352 static void
gst_video_encoder_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)353 gst_video_encoder_get_property (GObject * object, guint prop_id, GValue * value,
354 GParamSpec * pspec)
355 {
356 GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
357
358 switch (prop_id) {
359 case PROP_QOS:
360 g_value_set_boolean (value, gst_video_encoder_is_qos_enabled (sink));
361 break;
362 case PROP_MIN_FORCE_KEY_UNIT_INTERVAL:
363 g_value_set_uint64 (value,
364 gst_video_encoder_get_min_force_key_unit_interval (sink));
365 break;
366 default:
367 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
368 break;
369 }
370 }
371
372 static void
gst_video_encoder_class_init(GstVideoEncoderClass * klass)373 gst_video_encoder_class_init (GstVideoEncoderClass * klass)
374 {
375 GObjectClass *gobject_class;
376 GstElementClass *gstelement_class;
377
378 gobject_class = G_OBJECT_CLASS (klass);
379 gstelement_class = GST_ELEMENT_CLASS (klass);
380
381 GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
382 "Base Video Encoder");
383
384 parent_class = g_type_class_peek_parent (klass);
385
386 if (private_offset != 0)
387 g_type_class_adjust_private_offset (klass, &private_offset);
388
389 gobject_class->set_property = gst_video_encoder_set_property;
390 gobject_class->get_property = gst_video_encoder_get_property;
391 gobject_class->finalize = gst_video_encoder_finalize;
392
393 gstelement_class->change_state =
394 GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
395
396 klass->sink_event = gst_video_encoder_sink_event_default;
397 klass->src_event = gst_video_encoder_src_event_default;
398 klass->propose_allocation = gst_video_encoder_propose_allocation_default;
399 klass->decide_allocation = gst_video_encoder_decide_allocation_default;
400 klass->negotiate = gst_video_encoder_negotiate_default;
401 klass->sink_query = gst_video_encoder_sink_query_default;
402 klass->src_query = gst_video_encoder_src_query_default;
403 klass->transform_meta = gst_video_encoder_transform_meta_default;
404
405 g_object_class_install_property (gobject_class, PROP_QOS,
406 g_param_spec_boolean ("qos", "Qos",
407 "Handle Quality-of-Service events from downstream", DEFAULT_QOS,
408 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
409
410 /**
411 * GstVideoEncoder:min-force-key-unit-interval:
412 *
413 * Minimum interval between force-keyunit requests in nanoseconds. See
414 * gst_video_encoder_set_min_force_key_unit_interval() for more details.
415 *
416 * Since: 1.18
417 **/
418 g_object_class_install_property (gobject_class,
419 PROP_MIN_FORCE_KEY_UNIT_INTERVAL,
420 g_param_spec_uint64 ("min-force-key-unit-interval",
421 "Minimum Force Keyunit Interval",
422 "Minimum interval between force-keyunit requests in nanoseconds", 0,
423 G_MAXUINT64, DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL,
424 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
425
426 meta_tag_video_quark = g_quark_from_static_string (GST_META_TAG_VIDEO_STR);
427 }
428
429 static GList *
_flush_events(GstPad * pad,GList * events)430 _flush_events (GstPad * pad, GList * events)
431 {
432 GList *tmp;
433
434 for (tmp = events; tmp; tmp = tmp->next) {
435 if (GST_EVENT_TYPE (tmp->data) != GST_EVENT_EOS &&
436 GST_EVENT_TYPE (tmp->data) != GST_EVENT_SEGMENT &&
437 GST_EVENT_IS_STICKY (tmp->data)) {
438 gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data));
439 }
440 gst_event_unref (tmp->data);
441 }
442 g_list_free (events);
443
444 return NULL;
445 }
446
447 #if !GLIB_CHECK_VERSION(2, 60, 0)
448 #define g_queue_clear_full queue_clear_full
449 static void
queue_clear_full(GQueue * queue,GDestroyNotify free_func)450 queue_clear_full (GQueue * queue, GDestroyNotify free_func)
451 {
452 gpointer data;
453
454 while ((data = g_queue_pop_head (queue)) != NULL)
455 free_func (data);
456 }
457 #endif
458
459 static gboolean
gst_video_encoder_reset(GstVideoEncoder * encoder,gboolean hard)460 gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard)
461 {
462 GstVideoEncoderPrivate *priv = encoder->priv;
463 gboolean ret = TRUE;
464
465 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
466
467 priv->presentation_frame_number = 0;
468 priv->distance_from_sync = 0;
469
470 g_queue_clear_full (&priv->force_key_unit,
471 (GDestroyNotify) forced_key_unit_event_free);
472 priv->last_force_key_unit_request = GST_CLOCK_TIME_NONE;
473 priv->last_key_unit = GST_CLOCK_TIME_NONE;
474
475 priv->drained = TRUE;
476
477 GST_OBJECT_LOCK (encoder);
478 priv->bytes = 0;
479 priv->time = 0;
480 GST_OBJECT_UNLOCK (encoder);
481
482 priv->time_adjustment = GST_CLOCK_TIME_NONE;
483
484 if (hard) {
485 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
486 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
487
488 if (priv->input_state)
489 gst_video_codec_state_unref (priv->input_state);
490 priv->input_state = NULL;
491 if (priv->output_state)
492 gst_video_codec_state_unref (priv->output_state);
493 priv->output_state = NULL;
494
495 if (priv->upstream_tags) {
496 gst_tag_list_unref (priv->upstream_tags);
497 priv->upstream_tags = NULL;
498 }
499 if (priv->tags)
500 gst_tag_list_unref (priv->tags);
501 priv->tags = NULL;
502 priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
503 priv->tags_changed = FALSE;
504
505 g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
506 g_list_free (priv->headers);
507 priv->headers = NULL;
508 priv->new_headers = FALSE;
509
510 if (priv->allocator) {
511 gst_object_unref (priv->allocator);
512 priv->allocator = NULL;
513 }
514
515 g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
516 g_list_free (priv->current_frame_events);
517 priv->current_frame_events = NULL;
518
519 GST_OBJECT_LOCK (encoder);
520 priv->proportion = 0.5;
521 priv->earliest_time = GST_CLOCK_TIME_NONE;
522 priv->qos_frame_duration = 0;
523 GST_OBJECT_UNLOCK (encoder);
524
525 priv->dropped = 0;
526 priv->processed = 0;
527 } else {
528 GList *l;
529
530 for (l = priv->frames.head; l; l = l->next) {
531 GstVideoCodecFrame *frame = l->data;
532
533 frame->events = _flush_events (encoder->srcpad, frame->events);
534 }
535 priv->current_frame_events = _flush_events (encoder->srcpad,
536 encoder->priv->current_frame_events);
537 }
538
539 g_queue_clear_full (&priv->frames,
540 (GDestroyNotify) gst_video_codec_frame_unref);
541
542 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
543
544 return ret;
545 }
546
547 /* Always call reset() in one way or another after this */
548 static gboolean
gst_video_encoder_flush(GstVideoEncoder * encoder)549 gst_video_encoder_flush (GstVideoEncoder * encoder)
550 {
551 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
552 gboolean ret = TRUE;
553
554 if (klass->flush)
555 ret = klass->flush (encoder);
556
557 return ret;
558 }
559
560 static void
gst_video_encoder_init(GstVideoEncoder * encoder,GstVideoEncoderClass * klass)561 gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
562 {
563 GstVideoEncoderPrivate *priv;
564 GstPadTemplate *pad_template;
565 GstPad *pad;
566
567 GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
568
569 priv = encoder->priv = gst_video_encoder_get_instance_private (encoder);
570
571 pad_template =
572 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
573 g_return_if_fail (pad_template != NULL);
574
575 encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
576
577 gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
578 gst_pad_set_event_function (pad,
579 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
580 gst_pad_set_query_function (pad,
581 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
582 gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
583
584 pad_template =
585 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
586 g_return_if_fail (pad_template != NULL);
587
588 encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
589
590 gst_pad_set_query_function (pad,
591 GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
592 gst_pad_set_event_function (pad,
593 GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
594 gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
595
596 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
597 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
598
599 g_rec_mutex_init (&encoder->stream_lock);
600
601 priv->headers = NULL;
602 priv->new_headers = FALSE;
603
604 g_queue_init (&priv->frames);
605 g_queue_init (&priv->force_key_unit);
606
607 priv->min_latency = 0;
608 priv->max_latency = 0;
609 priv->min_pts = GST_CLOCK_TIME_NONE;
610 priv->time_adjustment = GST_CLOCK_TIME_NONE;
611
612 gst_video_encoder_reset (encoder, TRUE);
613 }
614
615 /**
616 * gst_video_encoder_set_headers:
617 * @encoder: a #GstVideoEncoder
618 * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
619 *
620 * Set the codec headers to be sent downstream whenever requested.
621 */
622 void
gst_video_encoder_set_headers(GstVideoEncoder * video_encoder,GList * headers)623 gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
624 {
625 GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
626
627 GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
628 if (video_encoder->priv->headers) {
629 g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
630 NULL);
631 g_list_free (video_encoder->priv->headers);
632 }
633 video_encoder->priv->headers = headers;
634 video_encoder->priv->new_headers = TRUE;
635
636 GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
637 }
638
639 static GstVideoCodecState *
_new_output_state(GstCaps * caps,GstVideoCodecState * reference)640 _new_output_state (GstCaps * caps, GstVideoCodecState * reference)
641 {
642 GstVideoCodecState *state;
643
644 state = g_slice_new0 (GstVideoCodecState);
645 state->ref_count = 1;
646 gst_video_info_init (&state->info);
647
648 if (!gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0)) {
649 g_slice_free (GstVideoCodecState, state);
650 return NULL;
651 }
652
653 state->caps = caps;
654
655 if (reference) {
656 GstVideoInfo *tgt, *ref;
657
658 tgt = &state->info;
659 ref = &reference->info;
660
661 /* Copy over extra fields from reference state */
662 tgt->interlace_mode = ref->interlace_mode;
663 tgt->flags = ref->flags;
664 tgt->width = ref->width;
665 tgt->height = ref->height;
666 tgt->chroma_site = ref->chroma_site;
667 tgt->colorimetry = ref->colorimetry;
668 tgt->par_n = ref->par_n;
669 tgt->par_d = ref->par_d;
670 tgt->fps_n = ref->fps_n;
671 tgt->fps_d = ref->fps_d;
672
673 GST_VIDEO_INFO_FIELD_ORDER (tgt) = GST_VIDEO_INFO_FIELD_ORDER (ref);
674
675 GST_VIDEO_INFO_MULTIVIEW_MODE (tgt) = GST_VIDEO_INFO_MULTIVIEW_MODE (ref);
676 GST_VIDEO_INFO_MULTIVIEW_FLAGS (tgt) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (ref);
677
678 if (reference->mastering_display_info) {
679 state->mastering_display_info = g_slice_dup (GstVideoMasteringDisplayInfo,
680 reference->mastering_display_info);
681 }
682 if (reference->content_light_level) {
683 state->content_light_level = g_slice_dup (GstVideoContentLightLevel,
684 reference->content_light_level);
685 }
686 }
687
688 return state;
689 }
690
691 static GstVideoCodecState *
_new_input_state(GstCaps * caps)692 _new_input_state (GstCaps * caps)
693 {
694 GstVideoCodecState *state;
695 GstStructure *c_struct;
696 const gchar *s;
697
698 state = g_slice_new0 (GstVideoCodecState);
699 state->ref_count = 1;
700 gst_video_info_init (&state->info);
701 if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
702 goto parse_fail;
703 state->caps = gst_caps_ref (caps);
704
705 c_struct = gst_caps_get_structure (caps, 0);
706
707 if ((s = gst_structure_get_string (c_struct, "mastering-display-info"))) {
708 state->mastering_display_info = g_slice_new (GstVideoMasteringDisplayInfo);
709 gst_video_mastering_display_info_from_string (state->mastering_display_info,
710 s);
711 }
712 if ((s = gst_structure_get_string (c_struct, "content-light-level"))) {
713 state->content_light_level = g_slice_new (GstVideoContentLightLevel);
714 gst_video_content_light_level_from_string (state->content_light_level, s);
715 }
716
717 return state;
718
719 parse_fail:
720 {
721 g_slice_free (GstVideoCodecState, state);
722 return NULL;
723 }
724 }
725
726 static gboolean
gst_video_encoder_setcaps(GstVideoEncoder * encoder,GstCaps * caps)727 gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
728 {
729 GstVideoEncoderClass *encoder_class;
730 GstVideoCodecState *state;
731 gboolean ret = TRUE;
732
733 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
734
735 GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
736
737 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
738
739 if (encoder->priv->input_state) {
740 GST_DEBUG_OBJECT (encoder,
741 "Checking if caps changed old %" GST_PTR_FORMAT " new %" GST_PTR_FORMAT,
742 encoder->priv->input_state->caps, caps);
743 if (gst_caps_is_equal (encoder->priv->input_state->caps, caps))
744 goto caps_not_changed;
745 }
746
747 state = _new_input_state (caps);
748 if (G_UNLIKELY (!state))
749 goto parse_fail;
750
751 if (encoder->priv->input_state
752 && gst_video_info_is_equal (&state->info,
753 &encoder->priv->input_state->info)) {
754 gst_video_codec_state_unref (state);
755 goto caps_not_changed;
756 }
757
758 if (encoder_class->reset) {
759 GST_FIXME_OBJECT (encoder, "GstVideoEncoder::reset() is deprecated");
760 encoder_class->reset (encoder, TRUE);
761 }
762
763 /* and subclass should be ready to configure format at any time around */
764 if (encoder_class->set_format != NULL)
765 ret = encoder_class->set_format (encoder, state);
766
767 if (ret) {
768 if (encoder->priv->input_state)
769 gst_video_codec_state_unref (encoder->priv->input_state);
770 encoder->priv->input_state = state;
771 } else {
772 gst_video_codec_state_unref (state);
773 }
774
775 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
776
777 if (!ret)
778 GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
779
780 return ret;
781
782 caps_not_changed:
783 {
784 GST_DEBUG_OBJECT (encoder, "Caps did not change - ignore");
785 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
786 return TRUE;
787 }
788
789 /* ERRORS */
790 parse_fail:
791 {
792 GST_WARNING_OBJECT (encoder, "Failed to parse caps");
793 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
794 return FALSE;
795 }
796 }
797
798 /**
799 * gst_video_encoder_proxy_getcaps:
800 * @enc: a #GstVideoEncoder
801 * @caps: (allow-none): initial caps
802 * @filter: (allow-none): filter caps
803 *
804 * Returns caps that express @caps (or sink template caps if @caps == NULL)
805 * restricted to resolution/format/... combinations supported by downstream
806 * elements (e.g. muxers).
807 *
808 * Returns: (transfer full): a #GstCaps owned by caller
809 */
810 GstCaps *
gst_video_encoder_proxy_getcaps(GstVideoEncoder * encoder,GstCaps * caps,GstCaps * filter)811 gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
812 GstCaps * filter)
813 {
814 return __gst_video_element_proxy_getcaps (GST_ELEMENT_CAST (encoder),
815 GST_VIDEO_ENCODER_SINK_PAD (encoder),
816 GST_VIDEO_ENCODER_SRC_PAD (encoder), caps, filter);
817 }
818
819 static GstCaps *
gst_video_encoder_sink_getcaps(GstVideoEncoder * encoder,GstCaps * filter)820 gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
821 {
822 GstVideoEncoderClass *klass;
823 GstCaps *caps;
824
825 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
826
827 if (klass->getcaps)
828 caps = klass->getcaps (encoder, filter);
829 else
830 caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
831
832 GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
833
834 return caps;
835 }
836
837 static gboolean
gst_video_encoder_decide_allocation_default(GstVideoEncoder * encoder,GstQuery * query)838 gst_video_encoder_decide_allocation_default (GstVideoEncoder * encoder,
839 GstQuery * query)
840 {
841 GstAllocator *allocator = NULL;
842 GstAllocationParams params;
843 gboolean update_allocator;
844
845 /* we got configuration from our peer or the decide_allocation method,
846 * parse them */
847 if (gst_query_get_n_allocation_params (query) > 0) {
848 /* try the allocator */
849 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
850 update_allocator = TRUE;
851 } else {
852 allocator = NULL;
853 gst_allocation_params_init (¶ms);
854 update_allocator = FALSE;
855 }
856
857 if (update_allocator)
858 gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
859 else
860 gst_query_add_allocation_param (query, allocator, ¶ms);
861 if (allocator)
862 gst_object_unref (allocator);
863
864 return TRUE;
865 }
866
867 static gboolean
gst_video_encoder_propose_allocation_default(GstVideoEncoder * encoder,GstQuery * query)868 gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
869 GstQuery * query)
870 {
871 GstCaps *caps;
872 GstVideoInfo info;
873 GstBufferPool *pool;
874 guint size;
875
876 gst_query_parse_allocation (query, &caps, NULL);
877
878 if (caps == NULL)
879 return FALSE;
880
881 if (!gst_video_info_from_caps (&info, caps))
882 return FALSE;
883
884 size = GST_VIDEO_INFO_SIZE (&info);
885
886 if (gst_query_get_n_allocation_pools (query) == 0) {
887 GstStructure *structure;
888 GstAllocator *allocator = NULL;
889 GstAllocationParams params = { 0, 15, 0, 0 };
890
891 if (gst_query_get_n_allocation_params (query) > 0)
892 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
893 else
894 gst_query_add_allocation_param (query, allocator, ¶ms);
895
896 pool = gst_video_buffer_pool_new ();
897
898 structure = gst_buffer_pool_get_config (pool);
899 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
900 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
901
902 if (allocator)
903 gst_object_unref (allocator);
904
905 if (!gst_buffer_pool_set_config (pool, structure))
906 goto config_failed;
907
908 gst_query_add_allocation_pool (query, pool, size, 0, 0);
909 gst_object_unref (pool);
910 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
911 }
912
913 return TRUE;
914
915 /* ERRORS */
916 config_failed:
917 {
918 GST_ERROR_OBJECT (encoder, "failed to set config");
919 gst_object_unref (pool);
920 return FALSE;
921 }
922 }
923
924 static gboolean
gst_video_encoder_sink_query_default(GstVideoEncoder * encoder,GstQuery * query)925 gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
926 GstQuery * query)
927 {
928 GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
929 gboolean res = FALSE;
930
931 switch (GST_QUERY_TYPE (query)) {
932 case GST_QUERY_CAPS:
933 {
934 GstCaps *filter, *caps;
935
936 gst_query_parse_caps (query, &filter);
937 caps = gst_video_encoder_sink_getcaps (encoder, filter);
938 gst_query_set_caps_result (query, caps);
939 gst_caps_unref (caps);
940 res = TRUE;
941 break;
942 }
943 case GST_QUERY_CONVERT:
944 {
945 GstFormat src_fmt, dest_fmt;
946 gint64 src_val, dest_val;
947
948 GST_DEBUG_OBJECT (encoder, "convert query");
949
950 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
951 GST_OBJECT_LOCK (encoder);
952 if (encoder->priv->input_state != NULL)
953 res = __gst_video_rawvideo_convert (encoder->priv->input_state,
954 src_fmt, src_val, &dest_fmt, &dest_val);
955 else
956 res = FALSE;
957 GST_OBJECT_UNLOCK (encoder);
958 if (!res)
959 goto error;
960 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
961 break;
962 }
963 case GST_QUERY_ALLOCATION:
964 {
965 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
966
967 if (klass->propose_allocation)
968 res = klass->propose_allocation (encoder, query);
969 break;
970 }
971 default:
972 res = gst_pad_query_default (pad, GST_OBJECT (encoder), query);
973 break;
974 }
975 return res;
976
977 error:
978 GST_DEBUG_OBJECT (encoder, "query failed");
979 return res;
980 }
981
982 static gboolean
gst_video_encoder_sink_query(GstPad * pad,GstObject * parent,GstQuery * query)983 gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
984 GstQuery * query)
985 {
986 GstVideoEncoder *encoder;
987 GstVideoEncoderClass *encoder_class;
988 gboolean ret = FALSE;
989
990 encoder = GST_VIDEO_ENCODER (parent);
991 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
992
993 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
994 GST_QUERY_TYPE_NAME (query));
995
996 if (encoder_class->sink_query)
997 ret = encoder_class->sink_query (encoder, query);
998
999 return ret;
1000 }
1001
1002 static void
gst_video_encoder_finalize(GObject * object)1003 gst_video_encoder_finalize (GObject * object)
1004 {
1005 GstVideoEncoder *encoder;
1006
1007 GST_DEBUG_OBJECT (object, "finalize");
1008
1009 encoder = GST_VIDEO_ENCODER (object);
1010 g_rec_mutex_clear (&encoder->stream_lock);
1011
1012 if (encoder->priv->allocator) {
1013 gst_object_unref (encoder->priv->allocator);
1014 encoder->priv->allocator = NULL;
1015 }
1016
1017 G_OBJECT_CLASS (parent_class)->finalize (object);
1018 }
1019
1020 static gboolean
gst_video_encoder_push_event(GstVideoEncoder * encoder,GstEvent * event)1021 gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
1022 {
1023 switch (GST_EVENT_TYPE (event)) {
1024 case GST_EVENT_SEGMENT:
1025 {
1026 GstSegment segment;
1027
1028 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1029
1030 gst_event_copy_segment (event, &segment);
1031
1032 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1033
1034 if (segment.format != GST_FORMAT_TIME) {
1035 GST_DEBUG_OBJECT (encoder, "received non TIME segment");
1036 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1037 break;
1038 }
1039
1040 if (encoder->priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1041 segment.start += encoder->priv->time_adjustment;
1042 if (GST_CLOCK_TIME_IS_VALID (segment.position)) {
1043 segment.position += encoder->priv->time_adjustment;
1044 }
1045 if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
1046 segment.stop += encoder->priv->time_adjustment;
1047 }
1048 }
1049
1050 encoder->output_segment = segment;
1051 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1052
1053 gst_event_unref (event);
1054 event = gst_event_new_segment (&encoder->output_segment);
1055
1056 break;
1057 }
1058 default:
1059 break;
1060 }
1061
1062 return gst_pad_push_event (encoder->srcpad, event);
1063 }
1064
1065 static GstEvent *
gst_video_encoder_create_merged_tags_event(GstVideoEncoder * enc)1066 gst_video_encoder_create_merged_tags_event (GstVideoEncoder * enc)
1067 {
1068 GstTagList *merged_tags;
1069
1070 GST_LOG_OBJECT (enc, "upstream : %" GST_PTR_FORMAT, enc->priv->upstream_tags);
1071 GST_LOG_OBJECT (enc, "encoder : %" GST_PTR_FORMAT, enc->priv->tags);
1072 GST_LOG_OBJECT (enc, "mode : %d", enc->priv->tags_merge_mode);
1073
1074 merged_tags =
1075 gst_tag_list_merge (enc->priv->upstream_tags, enc->priv->tags,
1076 enc->priv->tags_merge_mode);
1077
1078 GST_DEBUG_OBJECT (enc, "merged : %" GST_PTR_FORMAT, merged_tags);
1079
1080 if (merged_tags == NULL)
1081 return NULL;
1082
1083 if (gst_tag_list_is_empty (merged_tags)) {
1084 gst_tag_list_unref (merged_tags);
1085 return NULL;
1086 }
1087
1088 return gst_event_new_tag (merged_tags);
1089 }
1090
1091 static inline void
gst_video_encoder_check_and_push_tags(GstVideoEncoder * encoder)1092 gst_video_encoder_check_and_push_tags (GstVideoEncoder * encoder)
1093 {
1094 if (encoder->priv->tags_changed) {
1095 GstEvent *tags_event;
1096
1097 tags_event = gst_video_encoder_create_merged_tags_event (encoder);
1098
1099 if (tags_event != NULL)
1100 gst_video_encoder_push_event (encoder, tags_event);
1101
1102 encoder->priv->tags_changed = FALSE;
1103 }
1104 }
1105
1106 static gboolean
gst_video_encoder_sink_event_default(GstVideoEncoder * encoder,GstEvent * event)1107 gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
1108 GstEvent * event)
1109 {
1110 GstVideoEncoderClass *encoder_class;
1111 gboolean ret = FALSE;
1112
1113 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1114
1115 switch (GST_EVENT_TYPE (event)) {
1116 case GST_EVENT_CAPS:
1117 {
1118 GstCaps *caps;
1119
1120 gst_event_parse_caps (event, &caps);
1121 ret = gst_video_encoder_setcaps (encoder, caps);
1122
1123 gst_event_unref (event);
1124 event = NULL;
1125 break;
1126 }
1127 case GST_EVENT_EOS:
1128 {
1129 GstFlowReturn flow_ret;
1130
1131 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1132
1133 if (encoder_class->finish) {
1134 flow_ret = encoder_class->finish (encoder);
1135 } else {
1136 flow_ret = GST_FLOW_OK;
1137 }
1138
1139 if (encoder->priv->current_frame_events) {
1140 GList *l;
1141
1142 for (l = g_list_last (encoder->priv->current_frame_events); l;
1143 l = g_list_previous (l)) {
1144 GstEvent *event = GST_EVENT (l->data);
1145
1146 gst_video_encoder_push_event (encoder, event);
1147 }
1148 }
1149 g_list_free (encoder->priv->current_frame_events);
1150 encoder->priv->current_frame_events = NULL;
1151
1152 gst_video_encoder_check_and_push_tags (encoder);
1153
1154 ret = (flow_ret == GST_FLOW_OK);
1155 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1156 break;
1157 }
1158 case GST_EVENT_SEGMENT:
1159 {
1160 GstSegment segment;
1161
1162 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1163
1164 gst_event_copy_segment (event, &segment);
1165
1166 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1167
1168 if (segment.format != GST_FORMAT_TIME) {
1169 GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
1170 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1171 break;
1172 }
1173
1174 encoder->input_segment = segment;
1175 ret = TRUE;
1176 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1177 break;
1178 }
1179 case GST_EVENT_CUSTOM_DOWNSTREAM:
1180 {
1181 if (gst_video_event_is_force_key_unit (event)) {
1182 GstClockTime running_time;
1183 gboolean all_headers;
1184 guint count;
1185
1186 if (gst_video_event_parse_downstream_force_key_unit (event,
1187 NULL, NULL, &running_time, &all_headers, &count)) {
1188 ForcedKeyUnitEvent *fevt;
1189
1190 GST_OBJECT_LOCK (encoder);
1191 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1192 g_queue_insert_sorted (&encoder->priv->force_key_unit, fevt,
1193 (GCompareDataFunc) forced_key_unit_event_compare, NULL);
1194 GST_OBJECT_UNLOCK (encoder);
1195
1196 GST_DEBUG_OBJECT (encoder,
1197 "force-key-unit event: running-time %" GST_TIME_FORMAT
1198 ", all_headers %d, count %u",
1199 GST_TIME_ARGS (running_time), all_headers, count);
1200 }
1201 gst_event_unref (event);
1202 event = NULL;
1203 ret = TRUE;
1204 }
1205 break;
1206 }
1207 case GST_EVENT_STREAM_START:
1208 {
1209 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1210 /* Flush upstream tags after a STREAM_START */
1211 GST_DEBUG_OBJECT (encoder, "STREAM_START, clearing upstream tags");
1212 if (encoder->priv->upstream_tags) {
1213 gst_tag_list_unref (encoder->priv->upstream_tags);
1214 encoder->priv->upstream_tags = NULL;
1215 encoder->priv->tags_changed = TRUE;
1216 }
1217 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1218 break;
1219 }
1220 case GST_EVENT_TAG:
1221 {
1222 GstTagList *tags;
1223
1224 gst_event_parse_tag (event, &tags);
1225
1226 if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
1227 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1228 if (encoder->priv->upstream_tags != tags) {
1229 tags = gst_tag_list_copy (tags);
1230
1231 /* FIXME: make generic based on GST_TAG_FLAG_ENCODED */
1232 gst_tag_list_remove_tag (tags, GST_TAG_CODEC);
1233 gst_tag_list_remove_tag (tags, GST_TAG_AUDIO_CODEC);
1234 gst_tag_list_remove_tag (tags, GST_TAG_VIDEO_CODEC);
1235 gst_tag_list_remove_tag (tags, GST_TAG_SUBTITLE_CODEC);
1236 gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
1237 gst_tag_list_remove_tag (tags, GST_TAG_BITRATE);
1238 gst_tag_list_remove_tag (tags, GST_TAG_NOMINAL_BITRATE);
1239 gst_tag_list_remove_tag (tags, GST_TAG_MAXIMUM_BITRATE);
1240 gst_tag_list_remove_tag (tags, GST_TAG_MINIMUM_BITRATE);
1241 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER);
1242 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER_VERSION);
1243
1244 if (encoder->priv->upstream_tags)
1245 gst_tag_list_unref (encoder->priv->upstream_tags);
1246 encoder->priv->upstream_tags = tags;
1247 GST_INFO_OBJECT (encoder, "upstream tags: %" GST_PTR_FORMAT, tags);
1248 }
1249 gst_event_unref (event);
1250 event = gst_video_encoder_create_merged_tags_event (encoder);
1251 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1252 if (!event)
1253 ret = TRUE;
1254 }
1255 break;
1256 }
1257 case GST_EVENT_FLUSH_STOP:{
1258 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1259 gst_video_encoder_flush (encoder);
1260 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
1261 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
1262 gst_video_encoder_reset (encoder, FALSE);
1263 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1264 break;
1265 }
1266 default:
1267 break;
1268 }
1269
1270 /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
1271 * For EOS this is required because no buffer or serialized event
1272 * will come after EOS and nothing could trigger another
1273 * _finish_frame() call. *
1274 * If the subclass handles sending of EOS manually it can simply
1275 * not chain up to the parent class' event handler
1276 *
1277 * For FLUSH_STOP this is required because it is expected
1278 * to be forwarded immediately and no buffers are queued anyway.
1279 */
1280 if (event) {
1281 if (!GST_EVENT_IS_SERIALIZED (event)
1282 || GST_EVENT_TYPE (event) == GST_EVENT_EOS
1283 || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
1284 ret = gst_video_encoder_push_event (encoder, event);
1285 } else {
1286 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1287 encoder->priv->current_frame_events =
1288 g_list_prepend (encoder->priv->current_frame_events, event);
1289 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1290 ret = TRUE;
1291 }
1292 }
1293
1294 return ret;
1295 }
1296
1297 static gboolean
gst_video_encoder_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)1298 gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
1299 GstEvent * event)
1300 {
1301 GstVideoEncoder *enc;
1302 GstVideoEncoderClass *klass;
1303 gboolean ret = TRUE;
1304
1305 enc = GST_VIDEO_ENCODER (parent);
1306 klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
1307
1308 GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
1309 GST_EVENT_TYPE_NAME (event));
1310
1311 if (klass->sink_event)
1312 ret = klass->sink_event (enc, event);
1313
1314 return ret;
1315 }
1316
1317 static gboolean
gst_video_encoder_src_event_default(GstVideoEncoder * encoder,GstEvent * event)1318 gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
1319 GstEvent * event)
1320 {
1321 gboolean ret = FALSE;
1322 GstVideoEncoderPrivate *priv = encoder->priv;
1323
1324 switch (GST_EVENT_TYPE (event)) {
1325 case GST_EVENT_CUSTOM_UPSTREAM:
1326 {
1327 if (gst_video_event_is_force_key_unit (event)) {
1328 GstClockTime running_time;
1329 gboolean all_headers;
1330 guint count;
1331
1332 if (gst_video_event_parse_upstream_force_key_unit (event,
1333 &running_time, &all_headers, &count)) {
1334 ForcedKeyUnitEvent *fevt;
1335
1336 GST_OBJECT_LOCK (encoder);
1337 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1338 g_queue_insert_sorted (&encoder->priv->force_key_unit, fevt,
1339 (GCompareDataFunc) forced_key_unit_event_compare, NULL);
1340 GST_OBJECT_UNLOCK (encoder);
1341
1342 GST_DEBUG_OBJECT (encoder,
1343 "force-key-unit event: running-time %" GST_TIME_FORMAT
1344 ", all_headers %d, count %u",
1345 GST_TIME_ARGS (running_time), all_headers, count);
1346 }
1347 gst_event_unref (event);
1348 event = NULL;
1349 ret = TRUE;
1350 }
1351 break;
1352 }
1353 case GST_EVENT_QOS:
1354 {
1355 GstQOSType type;
1356 gdouble proportion;
1357 GstClockTimeDiff diff;
1358 GstClockTime timestamp;
1359
1360 if (!g_atomic_int_get (&priv->qos_enabled))
1361 break;
1362
1363 gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
1364
1365 GST_OBJECT_LOCK (encoder);
1366 priv->proportion = proportion;
1367 if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
1368 if (G_UNLIKELY (diff > 0)) {
1369 priv->earliest_time = timestamp + 2 * diff + priv->qos_frame_duration;
1370 } else {
1371 priv->earliest_time = timestamp + diff;
1372 }
1373 } else {
1374 priv->earliest_time = GST_CLOCK_TIME_NONE;
1375 }
1376 GST_OBJECT_UNLOCK (encoder);
1377
1378 GST_DEBUG_OBJECT (encoder,
1379 "got QoS %" GST_TIME_FORMAT ", %" GST_STIME_FORMAT ", %g",
1380 GST_TIME_ARGS (timestamp), GST_STIME_ARGS (diff), proportion);
1381
1382 ret = gst_pad_push_event (encoder->sinkpad, event);
1383 event = NULL;
1384 break;
1385 }
1386 default:
1387 break;
1388 }
1389
1390 if (event)
1391 ret =
1392 gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
1393 event);
1394
1395 return ret;
1396 }
1397
1398 static gboolean
gst_video_encoder_src_event(GstPad * pad,GstObject * parent,GstEvent * event)1399 gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
1400 {
1401 GstVideoEncoder *encoder;
1402 GstVideoEncoderClass *klass;
1403 gboolean ret = FALSE;
1404
1405 encoder = GST_VIDEO_ENCODER (parent);
1406 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1407
1408 GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
1409
1410 if (klass->src_event)
1411 ret = klass->src_event (encoder, event);
1412
1413 return ret;
1414 }
1415
1416 static gboolean
gst_video_encoder_src_query_default(GstVideoEncoder * enc,GstQuery * query)1417 gst_video_encoder_src_query_default (GstVideoEncoder * enc, GstQuery * query)
1418 {
1419 GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (enc);
1420 GstVideoEncoderPrivate *priv;
1421 gboolean res;
1422
1423 priv = enc->priv;
1424
1425 GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
1426
1427 switch (GST_QUERY_TYPE (query)) {
1428 case GST_QUERY_CONVERT:
1429 {
1430 GstFormat src_fmt, dest_fmt;
1431 gint64 src_val, dest_val;
1432
1433 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
1434 GST_OBJECT_LOCK (enc);
1435 res =
1436 __gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
1437 src_val, &dest_fmt, &dest_val);
1438 GST_OBJECT_UNLOCK (enc);
1439 if (!res)
1440 goto error;
1441 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
1442 break;
1443 }
1444 case GST_QUERY_LATENCY:
1445 {
1446 gboolean live;
1447 GstClockTime min_latency, max_latency;
1448
1449 res = gst_pad_peer_query (enc->sinkpad, query);
1450 if (res) {
1451 gst_query_parse_latency (query, &live, &min_latency, &max_latency);
1452 GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
1453 GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
1454 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1455
1456 GST_OBJECT_LOCK (enc);
1457 min_latency += priv->min_latency;
1458 if (max_latency == GST_CLOCK_TIME_NONE
1459 || enc->priv->max_latency == GST_CLOCK_TIME_NONE)
1460 max_latency = GST_CLOCK_TIME_NONE;
1461 else
1462 max_latency += enc->priv->max_latency;
1463 GST_OBJECT_UNLOCK (enc);
1464
1465 gst_query_set_latency (query, live, min_latency, max_latency);
1466 }
1467 }
1468 break;
1469 default:
1470 res = gst_pad_query_default (pad, GST_OBJECT (enc), query);
1471 }
1472 return res;
1473
1474 error:
1475 GST_DEBUG_OBJECT (enc, "query failed");
1476 return res;
1477 }
1478
1479 static gboolean
gst_video_encoder_src_query(GstPad * pad,GstObject * parent,GstQuery * query)1480 gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
1481 {
1482 GstVideoEncoder *encoder;
1483 GstVideoEncoderClass *encoder_class;
1484 gboolean ret = FALSE;
1485
1486 encoder = GST_VIDEO_ENCODER (parent);
1487 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1488
1489 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
1490 GST_QUERY_TYPE_NAME (query));
1491
1492 if (encoder_class->src_query)
1493 ret = encoder_class->src_query (encoder, query);
1494
1495 return ret;
1496 }
1497
1498 static GstVideoCodecFrame *
gst_video_encoder_new_frame(GstVideoEncoder * encoder,GstBuffer * buf,GstClockTime pts,GstClockTime dts,GstClockTime duration)1499 gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
1500 GstClockTime pts, GstClockTime dts, GstClockTime duration)
1501 {
1502 GstVideoEncoderPrivate *priv = encoder->priv;
1503 GstVideoCodecFrame *frame;
1504
1505 frame = g_slice_new0 (GstVideoCodecFrame);
1506
1507 frame->ref_count = 1;
1508
1509 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1510 frame->system_frame_number = priv->system_frame_number;
1511 priv->system_frame_number++;
1512
1513 frame->presentation_frame_number = priv->presentation_frame_number;
1514 priv->presentation_frame_number++;
1515 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1516
1517 frame->events = priv->current_frame_events;
1518 priv->current_frame_events = NULL;
1519 frame->input_buffer = buf;
1520 frame->pts = pts;
1521 frame->dts = dts;
1522 frame->duration = duration;
1523 frame->abidata.ABI.ts = pts;
1524
1525 return frame;
1526 }
1527
1528
1529 static GstFlowReturn
gst_video_encoder_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)1530 gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1531 {
1532 GstVideoEncoder *encoder;
1533 GstVideoEncoderPrivate *priv;
1534 GstVideoEncoderClass *klass;
1535 GstVideoCodecFrame *frame;
1536 GstClockTime pts, duration;
1537 GstFlowReturn ret = GST_FLOW_OK;
1538 guint64 start, stop, cstart, cstop;
1539
1540 encoder = GST_VIDEO_ENCODER (parent);
1541 priv = encoder->priv;
1542 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1543
1544 g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
1545
1546 if (!encoder->priv->input_state)
1547 goto not_negotiated;
1548
1549 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1550
1551 pts = GST_BUFFER_PTS (buf);
1552 duration = GST_BUFFER_DURATION (buf);
1553
1554 GST_LOG_OBJECT (encoder,
1555 "received buffer of size %" G_GSIZE_FORMAT " with PTS %" GST_TIME_FORMAT
1556 ", DTS %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
1557 gst_buffer_get_size (buf), GST_TIME_ARGS (pts),
1558 GST_TIME_ARGS (GST_BUFFER_DTS (buf)), GST_TIME_ARGS (duration));
1559
1560 start = pts;
1561 if (GST_CLOCK_TIME_IS_VALID (duration))
1562 stop = start + duration;
1563 else
1564 stop = GST_CLOCK_TIME_NONE;
1565
1566 /* Drop buffers outside of segment */
1567 if (!gst_segment_clip (&encoder->input_segment,
1568 GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
1569 GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
1570 gst_buffer_unref (buf);
1571 goto done;
1572 }
1573
1574 if (GST_CLOCK_TIME_IS_VALID (cstop))
1575 duration = cstop - cstart;
1576 else
1577 duration = GST_CLOCK_TIME_NONE;
1578
1579 if (priv->min_pts != GST_CLOCK_TIME_NONE
1580 && priv->time_adjustment == GST_CLOCK_TIME_NONE) {
1581 if (cstart < priv->min_pts) {
1582 priv->time_adjustment = priv->min_pts - cstart;
1583 }
1584 }
1585
1586 if (priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1587 cstart += priv->time_adjustment;
1588 }
1589
1590 /* incoming DTS is not really relevant and does not make sense anyway,
1591 * so pass along _NONE and maybe come up with something better later on */
1592 frame = gst_video_encoder_new_frame (encoder, buf, cstart,
1593 GST_CLOCK_TIME_NONE, duration);
1594
1595 GST_OBJECT_LOCK (encoder);
1596 if (priv->force_key_unit.head) {
1597 GList *l;
1598 GstClockTime running_time;
1599 gboolean throttled, have_fevt = FALSE, have_pending_none_fevt = FALSE;
1600 GQueue matching_fevt = G_QUEUE_INIT;
1601
1602 running_time =
1603 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1604 cstart);
1605
1606 throttled = (priv->min_force_key_unit_interval != 0 &&
1607 priv->min_force_key_unit_interval != GST_CLOCK_TIME_NONE &&
1608 ((priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE &&
1609 priv->last_force_key_unit_request +
1610 priv->min_force_key_unit_interval > running_time)
1611 || (priv->last_key_unit != GST_CLOCK_TIME_NONE
1612 && priv->last_key_unit + priv->min_force_key_unit_interval >
1613 running_time)));
1614
1615 for (l = priv->force_key_unit.head; l && (!throttled || !have_fevt);
1616 l = l->next) {
1617 ForcedKeyUnitEvent *fevt = l->data;
1618
1619 /* Skip pending keyunits */
1620 if (fevt->pending) {
1621 if (fevt->running_time == GST_CLOCK_TIME_NONE)
1622 have_pending_none_fevt = TRUE;
1623 continue;
1624 }
1625
1626 /* Simple case, keyunit ASAP */
1627 if (fevt->running_time == GST_CLOCK_TIME_NONE) {
1628 have_fevt = TRUE;
1629 if (!throttled)
1630 g_queue_push_tail (&matching_fevt, fevt);
1631 continue;
1632 }
1633
1634 /* Event for before this frame */
1635 if (fevt->running_time <= running_time) {
1636 have_fevt = TRUE;
1637 if (!throttled)
1638 g_queue_push_tail (&matching_fevt, fevt);
1639 continue;
1640 }
1641
1642 /* Otherwise all following events are in the future */
1643 break;
1644 }
1645
1646 if (throttled && have_fevt) {
1647 GstClockTime last_time;
1648
1649 if (priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE &&
1650 priv->last_force_key_unit_request +
1651 priv->min_force_key_unit_interval > running_time) {
1652 last_time = priv->last_force_key_unit_request;
1653 } else {
1654 last_time = priv->last_key_unit;
1655 }
1656
1657 GST_DEBUG_OBJECT (encoder,
1658 "Not requesting a new key unit yet due to throttling (%"
1659 GST_TIME_FORMAT " + %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
1660 GST_TIME_ARGS (last_time),
1661 GST_TIME_ARGS (priv->min_force_key_unit_interval),
1662 GST_TIME_ARGS (running_time));
1663 g_queue_clear (&matching_fevt);
1664 }
1665
1666 if (matching_fevt.length > 0) {
1667 ForcedKeyUnitEvent *fevt;
1668 gboolean all_headers = FALSE;
1669 gboolean force_keyunit = FALSE;
1670
1671 while ((fevt = g_queue_pop_head (&matching_fevt))) {
1672 fevt->pending = TRUE;
1673
1674 if ((fevt->running_time == GST_CLOCK_TIME_NONE
1675 && have_pending_none_fevt)
1676 || (priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE
1677 && fevt->running_time != GST_CLOCK_TIME_NONE
1678 && fevt->running_time <= priv->last_force_key_unit_request) ||
1679 (priv->last_key_unit != GST_CLOCK_TIME_NONE
1680 && fevt->running_time != GST_CLOCK_TIME_NONE
1681 && fevt->running_time <= priv->last_key_unit)) {
1682 GST_DEBUG_OBJECT (encoder,
1683 "Not requesting another key unit at running time %"
1684 GST_TIME_FORMAT, GST_TIME_ARGS (fevt->running_time));
1685 } else {
1686 force_keyunit = TRUE;
1687 fevt->frame_id = frame->system_frame_number;
1688 if (fevt->all_headers)
1689 all_headers = TRUE;
1690 }
1691 }
1692
1693 if (force_keyunit) {
1694 GST_DEBUG_OBJECT (encoder,
1695 "Forcing a key unit at running time %" GST_TIME_FORMAT,
1696 GST_TIME_ARGS (running_time));
1697
1698 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
1699 if (all_headers)
1700 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
1701 priv->last_force_key_unit_request = running_time;
1702 }
1703 }
1704 }
1705 GST_OBJECT_UNLOCK (encoder);
1706
1707 g_queue_push_tail (&priv->frames, gst_video_codec_frame_ref (frame));
1708
1709 /* new data, more finish needed */
1710 priv->drained = FALSE;
1711
1712 GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
1713 frame->presentation_frame_number);
1714
1715 frame->deadline =
1716 gst_segment_to_running_time (&encoder->input_segment, GST_FORMAT_TIME,
1717 frame->pts);
1718
1719 ret = klass->handle_frame (encoder, frame);
1720
1721 done:
1722 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1723
1724 return ret;
1725
1726 /* ERRORS */
1727 not_negotiated:
1728 {
1729 GST_ELEMENT_ERROR (encoder, CORE, NEGOTIATION, (NULL),
1730 ("encoder not initialized"));
1731 gst_buffer_unref (buf);
1732 return GST_FLOW_NOT_NEGOTIATED;
1733 }
1734 }
1735
1736 static GstStateChangeReturn
gst_video_encoder_change_state(GstElement * element,GstStateChange transition)1737 gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
1738 {
1739 GstVideoEncoder *encoder;
1740 GstVideoEncoderClass *encoder_class;
1741 GstStateChangeReturn ret;
1742
1743 encoder = GST_VIDEO_ENCODER (element);
1744 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
1745
1746 switch (transition) {
1747 case GST_STATE_CHANGE_NULL_TO_READY:
1748 /* open device/library if needed */
1749 if (encoder_class->open && !encoder_class->open (encoder))
1750 goto open_failed;
1751 break;
1752 case GST_STATE_CHANGE_READY_TO_PAUSED:
1753 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1754 gst_video_encoder_reset (encoder, TRUE);
1755 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1756
1757 /* Initialize device/library if needed */
1758 if (encoder_class->start && !encoder_class->start (encoder))
1759 goto start_failed;
1760 break;
1761 default:
1762 break;
1763 }
1764
1765 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1766
1767 switch (transition) {
1768 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1769 gboolean stopped = TRUE;
1770
1771 if (encoder_class->stop)
1772 stopped = encoder_class->stop (encoder);
1773
1774 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1775 gst_video_encoder_reset (encoder, TRUE);
1776 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1777
1778 if (!stopped)
1779 goto stop_failed;
1780 break;
1781 }
1782 case GST_STATE_CHANGE_READY_TO_NULL:
1783 /* close device/library if needed */
1784 if (encoder_class->close && !encoder_class->close (encoder))
1785 goto close_failed;
1786 break;
1787 default:
1788 break;
1789 }
1790
1791 return ret;
1792
1793 /* Errors */
1794
1795 open_failed:
1796 {
1797 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1798 ("Failed to open encoder"));
1799 return GST_STATE_CHANGE_FAILURE;
1800 }
1801
1802 start_failed:
1803 {
1804 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1805 ("Failed to start encoder"));
1806 return GST_STATE_CHANGE_FAILURE;
1807 }
1808
1809 stop_failed:
1810 {
1811 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1812 ("Failed to stop encoder"));
1813 return GST_STATE_CHANGE_FAILURE;
1814 }
1815
1816 close_failed:
1817 {
1818 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1819 ("Failed to close encoder"));
1820 return GST_STATE_CHANGE_FAILURE;
1821 }
1822 }
1823
1824 static gboolean
gst_video_encoder_negotiate_default(GstVideoEncoder * encoder)1825 gst_video_encoder_negotiate_default (GstVideoEncoder * encoder)
1826 {
1827 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1828 GstAllocator *allocator;
1829 GstAllocationParams params;
1830 gboolean ret = TRUE;
1831 GstVideoCodecState *state = encoder->priv->output_state;
1832 GstVideoInfo *info = &state->info;
1833 GstQuery *query = NULL;
1834 GstVideoCodecFrame *frame;
1835 GstCaps *prevcaps;
1836 gchar *colorimetry;
1837
1838 g_return_val_if_fail (state->caps != NULL, FALSE);
1839
1840 if (encoder->priv->output_state_changed) {
1841 GstStructure *out_struct;
1842
1843 state->caps = gst_caps_make_writable (state->caps);
1844
1845 /* Fill caps */
1846 gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
1847 "height", G_TYPE_INT, info->height,
1848 "pixel-aspect-ratio", GST_TYPE_FRACTION,
1849 info->par_n, info->par_d, NULL);
1850 if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
1851 /* variable fps with a max-framerate */
1852 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
1853 "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
1854 } else {
1855 /* no variable fps or no max-framerate */
1856 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
1857 info->fps_n, info->fps_d, NULL);
1858 }
1859 if (state->codec_data)
1860 gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
1861 state->codec_data, NULL);
1862
1863 gst_caps_set_simple (state->caps, "interlace-mode", G_TYPE_STRING,
1864 gst_video_interlace_mode_to_string (info->interlace_mode), NULL);
1865 if (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1866 GST_VIDEO_INFO_FIELD_ORDER (info) != GST_VIDEO_FIELD_ORDER_UNKNOWN)
1867 gst_caps_set_simple (state->caps, "field-order", G_TYPE_STRING,
1868 gst_video_field_order_to_string (GST_VIDEO_INFO_FIELD_ORDER (info)),
1869 NULL);
1870
1871 colorimetry = gst_video_colorimetry_to_string (&info->colorimetry);
1872 if (colorimetry)
1873 gst_caps_set_simple (state->caps, "colorimetry", G_TYPE_STRING,
1874 colorimetry, NULL);
1875 g_free (colorimetry);
1876
1877 if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) {
1878 gchar *chroma_site = gst_video_chroma_site_to_string (info->chroma_site);
1879
1880 if (!chroma_site) {
1881 GST_WARNING ("Couldn't convert chroma-site 0x%x to string",
1882 info->chroma_site);
1883 } else {
1884 gst_caps_set_simple (state->caps,
1885 "chroma-site", G_TYPE_STRING, chroma_site, NULL);
1886 g_free (chroma_site);
1887 }
1888 }
1889
1890 if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) {
1891 const gchar *caps_mview_mode =
1892 gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE
1893 (info));
1894
1895 gst_caps_set_simple (state->caps, "multiview-mode", G_TYPE_STRING,
1896 caps_mview_mode, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
1897 GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL);
1898 }
1899
1900 out_struct = gst_caps_get_structure (state->caps, 0);
1901
1902 /* forward upstream mastering display info and content light level
1903 * if subclass didn't set */
1904 if (state->mastering_display_info &&
1905 !gst_structure_has_field (out_struct, "mastering-display-info")) {
1906 gst_video_mastering_display_info_add_to_caps
1907 (state->mastering_display_info, state->caps);
1908 }
1909
1910 if (state->content_light_level &&
1911 !gst_structure_has_field (out_struct, "content-light-level")) {
1912 gst_video_content_light_level_add_to_caps (state->content_light_level,
1913 state->caps);
1914 }
1915
1916 encoder->priv->output_state_changed = FALSE;
1917 }
1918
1919 if (state->allocation_caps == NULL)
1920 state->allocation_caps = gst_caps_ref (state->caps);
1921
1922 /* Push all pending pre-caps events of the oldest frame before
1923 * setting caps */
1924 frame = encoder->priv->frames.head ? encoder->priv->frames.head->data : NULL;
1925 if (frame || encoder->priv->current_frame_events) {
1926 GList **events, *l;
1927
1928 if (frame) {
1929 events = &frame->events;
1930 } else {
1931 events = &encoder->priv->current_frame_events;
1932 }
1933
1934 for (l = g_list_last (*events); l;) {
1935 GstEvent *event = GST_EVENT (l->data);
1936 GList *tmp;
1937
1938 if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
1939 gst_video_encoder_push_event (encoder, event);
1940 tmp = l;
1941 l = l->prev;
1942 *events = g_list_delete_link (*events, tmp);
1943 } else {
1944 l = l->prev;
1945 }
1946 }
1947 }
1948
1949 prevcaps = gst_pad_get_current_caps (encoder->srcpad);
1950 if (!prevcaps || !gst_caps_is_equal (prevcaps, state->caps))
1951 ret = gst_pad_set_caps (encoder->srcpad, state->caps);
1952 else
1953 ret = TRUE;
1954 if (prevcaps)
1955 gst_caps_unref (prevcaps);
1956
1957 if (!ret)
1958 goto done;
1959
1960 query = gst_query_new_allocation (state->allocation_caps, TRUE);
1961 if (!gst_pad_peer_query (encoder->srcpad, query)) {
1962 GST_DEBUG_OBJECT (encoder, "didn't get downstream ALLOCATION hints");
1963 }
1964
1965 g_assert (klass->decide_allocation != NULL);
1966 ret = klass->decide_allocation (encoder, query);
1967
1968 GST_DEBUG_OBJECT (encoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
1969 query);
1970
1971 if (!ret)
1972 goto no_decide_allocation;
1973
1974 /* we got configuration from our peer or the decide_allocation method,
1975 * parse them */
1976 if (gst_query_get_n_allocation_params (query) > 0) {
1977 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1978 } else {
1979 allocator = NULL;
1980 gst_allocation_params_init (¶ms);
1981 }
1982
1983 if (encoder->priv->allocator)
1984 gst_object_unref (encoder->priv->allocator);
1985 encoder->priv->allocator = allocator;
1986 encoder->priv->params = params;
1987
1988 done:
1989 if (query)
1990 gst_query_unref (query);
1991
1992 return ret;
1993
1994 /* Errors */
1995 no_decide_allocation:
1996 {
1997 GST_WARNING_OBJECT (encoder, "Subclass failed to decide allocation");
1998 goto done;
1999 }
2000 }
2001
2002 static gboolean
gst_video_encoder_negotiate_unlocked(GstVideoEncoder * encoder)2003 gst_video_encoder_negotiate_unlocked (GstVideoEncoder * encoder)
2004 {
2005 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2006 gboolean ret = TRUE;
2007
2008 if (G_LIKELY (klass->negotiate))
2009 ret = klass->negotiate (encoder);
2010
2011 return ret;
2012 }
2013
2014 /**
2015 * gst_video_encoder_negotiate:
2016 * @encoder: a #GstVideoEncoder
2017 *
2018 * Negotiate with downstream elements to currently configured #GstVideoCodecState.
2019 * Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
2020 * negotiate fails.
2021 *
2022 * Returns: %TRUE if the negotiation succeeded, else %FALSE.
2023 */
2024 gboolean
gst_video_encoder_negotiate(GstVideoEncoder * encoder)2025 gst_video_encoder_negotiate (GstVideoEncoder * encoder)
2026 {
2027 GstVideoEncoderClass *klass;
2028 gboolean ret = TRUE;
2029
2030 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
2031 g_return_val_if_fail (encoder->priv->output_state, FALSE);
2032
2033 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2034
2035 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2036 gst_pad_check_reconfigure (encoder->srcpad);
2037 if (klass->negotiate) {
2038 ret = klass->negotiate (encoder);
2039 if (!ret)
2040 gst_pad_mark_reconfigure (encoder->srcpad);
2041 }
2042 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2043
2044 return ret;
2045 }
2046
2047 /**
2048 * gst_video_encoder_allocate_output_buffer:
2049 * @encoder: a #GstVideoEncoder
2050 * @size: size of the buffer
2051 *
2052 * Helper function that allocates a buffer to hold an encoded video frame
2053 * for @encoder's current #GstVideoCodecState.
2054 *
2055 * Returns: (transfer full): allocated buffer
2056 */
2057 GstBuffer *
gst_video_encoder_allocate_output_buffer(GstVideoEncoder * encoder,gsize size)2058 gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, gsize size)
2059 {
2060 GstBuffer *buffer;
2061 gboolean needs_reconfigure = FALSE;
2062
2063 g_return_val_if_fail (size > 0, NULL);
2064
2065 GST_DEBUG ("alloc src buffer");
2066
2067 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2068 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2069 if (G_UNLIKELY (encoder->priv->output_state_changed
2070 || (encoder->priv->output_state && needs_reconfigure))) {
2071 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2072 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
2073 gst_pad_mark_reconfigure (encoder->srcpad);
2074 goto fallback;
2075 }
2076 }
2077
2078 buffer =
2079 gst_buffer_new_allocate (encoder->priv->allocator, size,
2080 &encoder->priv->params);
2081 if (!buffer) {
2082 GST_INFO_OBJECT (encoder, "couldn't allocate output buffer");
2083 goto fallback;
2084 }
2085
2086 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2087
2088 return buffer;
2089
2090 fallback:
2091 buffer = gst_buffer_new_allocate (NULL, size, NULL);
2092
2093 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2094
2095 return buffer;
2096 }
2097
2098 /**
2099 * gst_video_encoder_allocate_output_frame:
2100 * @encoder: a #GstVideoEncoder
2101 * @frame: a #GstVideoCodecFrame
2102 * @size: size of the buffer
2103 *
2104 * Helper function that allocates a buffer to hold an encoded video frame for @encoder's
2105 * current #GstVideoCodecState. Subclass should already have configured video
2106 * state and set src pad caps.
2107 *
2108 * The buffer allocated here is owned by the frame and you should only
2109 * keep references to the frame, not the buffer.
2110 *
2111 * Returns: %GST_FLOW_OK if an output buffer could be allocated
2112 */
2113 GstFlowReturn
gst_video_encoder_allocate_output_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame,gsize size)2114 gst_video_encoder_allocate_output_frame (GstVideoEncoder *
2115 encoder, GstVideoCodecFrame * frame, gsize size)
2116 {
2117 gboolean needs_reconfigure = FALSE;
2118
2119 g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
2120
2121 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2122 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2123 if (G_UNLIKELY (encoder->priv->output_state_changed
2124 || (encoder->priv->output_state && needs_reconfigure))) {
2125 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2126 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
2127 gst_pad_mark_reconfigure (encoder->srcpad);
2128 }
2129 }
2130
2131 GST_LOG_OBJECT (encoder, "alloc buffer size %" G_GSIZE_FORMAT, size);
2132
2133 frame->output_buffer =
2134 gst_buffer_new_allocate (encoder->priv->allocator, size,
2135 &encoder->priv->params);
2136
2137 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2138
2139 return frame->output_buffer ? GST_FLOW_OK : GST_FLOW_ERROR;
2140 }
2141
2142 static void
gst_video_encoder_release_frame(GstVideoEncoder * enc,GstVideoCodecFrame * frame)2143 gst_video_encoder_release_frame (GstVideoEncoder * enc,
2144 GstVideoCodecFrame * frame)
2145 {
2146 GList *link;
2147
2148 /* unref once from the list */
2149 link = g_queue_find (&enc->priv->frames, frame);
2150 if (link) {
2151 gst_video_codec_frame_unref (frame);
2152 g_queue_delete_link (&enc->priv->frames, link);
2153 }
2154 /* unref because this function takes ownership */
2155 gst_video_codec_frame_unref (frame);
2156 }
2157
2158 static gboolean
gst_video_encoder_transform_meta_default(GstVideoEncoder * encoder,GstVideoCodecFrame * frame,GstMeta * meta)2159 gst_video_encoder_transform_meta_default (GstVideoEncoder *
2160 encoder, GstVideoCodecFrame * frame, GstMeta * meta)
2161 {
2162 const GstMetaInfo *info = meta->info;
2163 const gchar *const *tags;
2164 const gchar *const supported_tags[] = {
2165 GST_META_TAG_VIDEO_STR,
2166 GST_META_TAG_VIDEO_ORIENTATION_STR,
2167 GST_META_TAG_VIDEO_SIZE_STR,
2168 NULL,
2169 };
2170
2171 tags = gst_meta_api_type_get_tags (info->api);
2172
2173 if (!tags)
2174 return TRUE;
2175
2176 while (*tags) {
2177 if (!g_strv_contains (supported_tags, *tags))
2178 return FALSE;
2179 tags++;
2180 }
2181
2182 return TRUE;
2183 }
2184
2185 typedef struct
2186 {
2187 GstVideoEncoder *encoder;
2188 GstVideoCodecFrame *frame;
2189 } CopyMetaData;
2190
2191 static gboolean
foreach_metadata(GstBuffer * inbuf,GstMeta ** meta,gpointer user_data)2192 foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
2193 {
2194 CopyMetaData *data = user_data;
2195 GstVideoEncoder *encoder = data->encoder;
2196 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2197 GstVideoCodecFrame *frame = data->frame;
2198 const GstMetaInfo *info = (*meta)->info;
2199 gboolean do_copy = FALSE;
2200
2201 if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) {
2202 /* never call the transform_meta with memory specific metadata */
2203 GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s",
2204 g_type_name (info->api));
2205 do_copy = FALSE;
2206 } else if (klass->transform_meta) {
2207 do_copy = klass->transform_meta (encoder, frame, *meta);
2208 GST_DEBUG_OBJECT (encoder, "transformed metadata %s: copy: %d",
2209 g_type_name (info->api), do_copy);
2210 }
2211
2212 /* we only copy metadata when the subclass implemented a transform_meta
2213 * function and when it returns %TRUE */
2214 if (do_copy && info->transform_func) {
2215 GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
2216 GST_DEBUG_OBJECT (encoder, "copy metadata %s", g_type_name (info->api));
2217 /* simply copy then */
2218 info->transform_func (frame->output_buffer, *meta, inbuf,
2219 _gst_meta_transform_copy, ©_data);
2220 }
2221 return TRUE;
2222 }
2223
2224 static void
gst_video_encoder_drop_frame(GstVideoEncoder * enc,GstVideoCodecFrame * frame)2225 gst_video_encoder_drop_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
2226 {
2227 GstVideoEncoderPrivate *priv = enc->priv;
2228 GstClockTime stream_time, jitter, earliest_time, qostime, timestamp;
2229 GstSegment *segment;
2230 GstMessage *qos_msg;
2231 gdouble proportion;
2232
2233 GST_DEBUG_OBJECT (enc, "dropping frame %" GST_TIME_FORMAT,
2234 GST_TIME_ARGS (frame->pts));
2235
2236 priv->dropped++;
2237
2238 /* post QoS message */
2239 GST_OBJECT_LOCK (enc);
2240 proportion = priv->proportion;
2241 earliest_time = priv->earliest_time;
2242 GST_OBJECT_UNLOCK (enc);
2243
2244 timestamp = frame->pts;
2245 segment = &enc->output_segment;
2246 if (G_UNLIKELY (segment->format == GST_FORMAT_UNDEFINED))
2247 segment = &enc->input_segment;
2248 stream_time =
2249 gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp);
2250 qostime = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp);
2251 jitter = GST_CLOCK_DIFF (qostime, earliest_time);
2252 qos_msg =
2253 gst_message_new_qos (GST_OBJECT_CAST (enc), FALSE, qostime, stream_time,
2254 timestamp, GST_CLOCK_TIME_NONE);
2255 gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
2256 gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
2257 priv->processed, priv->dropped);
2258 gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
2259 }
2260
2261 static GstFlowReturn
gst_video_encoder_can_push_unlocked(GstVideoEncoder * encoder)2262 gst_video_encoder_can_push_unlocked (GstVideoEncoder * encoder)
2263 {
2264 GstVideoEncoderPrivate *priv = encoder->priv;
2265 gboolean needs_reconfigure;
2266
2267 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2268 if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
2269 && needs_reconfigure))) {
2270 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2271 gst_pad_mark_reconfigure (encoder->srcpad);
2272 if (GST_PAD_IS_FLUSHING (encoder->srcpad))
2273 return GST_FLOW_FLUSHING;
2274 else
2275 return GST_FLOW_NOT_NEGOTIATED;
2276 }
2277 }
2278
2279 if (G_UNLIKELY (priv->output_state == NULL)) {
2280 GST_ERROR_OBJECT (encoder, "Output state was not configured");
2281 GST_ELEMENT_ERROR (encoder, LIBRARY, FAILED,
2282 ("Output state was not configured"), (NULL));
2283 return GST_FLOW_ERROR;
2284 }
2285
2286 return GST_FLOW_OK;
2287 }
2288
2289 static void
gst_video_encoder_push_pending_unlocked(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2290 gst_video_encoder_push_pending_unlocked (GstVideoEncoder * encoder,
2291 GstVideoCodecFrame * frame)
2292 {
2293 GstVideoEncoderPrivate *priv = encoder->priv;
2294 GList *l;
2295
2296 /* Push all pending events that arrived before this frame */
2297 for (l = priv->frames.head; l; l = l->next) {
2298 GstVideoCodecFrame *tmp = l->data;
2299
2300 if (tmp->events) {
2301 GList *k;
2302
2303 for (k = g_list_last (tmp->events); k; k = k->prev)
2304 gst_video_encoder_push_event (encoder, k->data);
2305 g_list_free (tmp->events);
2306 tmp->events = NULL;
2307 }
2308
2309 if (tmp == frame)
2310 break;
2311 }
2312
2313 gst_video_encoder_check_and_push_tags (encoder);
2314 }
2315
2316 static void
gst_video_encoder_infer_dts_unlocked(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2317 gst_video_encoder_infer_dts_unlocked (GstVideoEncoder * encoder,
2318 GstVideoCodecFrame * frame)
2319 {
2320 /* DTS is expected to be monotonously increasing,
2321 * so a good guess is the lowest unsent PTS (all being OK) */
2322 GstVideoEncoderPrivate *priv = encoder->priv;
2323 GList *l;
2324 GstClockTime min_ts = GST_CLOCK_TIME_NONE;
2325 GstVideoCodecFrame *oframe = NULL;
2326 gboolean seen_none = FALSE;
2327
2328 /* some maintenance regardless */
2329 for (l = priv->frames.head; l; l = l->next) {
2330 GstVideoCodecFrame *tmp = l->data;
2331
2332 if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
2333 seen_none = TRUE;
2334 continue;
2335 }
2336
2337 if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
2338 min_ts = tmp->abidata.ABI.ts;
2339 oframe = tmp;
2340 }
2341 }
2342 /* save a ts if needed */
2343 if (oframe && oframe != frame) {
2344 oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
2345 }
2346
2347 /* and set if needed */
2348 if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
2349 frame->dts = min_ts;
2350 GST_DEBUG_OBJECT (encoder,
2351 "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
2352 GST_TIME_ARGS (frame->pts));
2353 }
2354 }
2355
2356 static void
gst_video_encoder_send_header_unlocked(GstVideoEncoder * encoder,gboolean * discont,gboolean key_unit)2357 gst_video_encoder_send_header_unlocked (GstVideoEncoder * encoder,
2358 gboolean * discont, gboolean key_unit)
2359 {
2360 GstVideoEncoderPrivate *priv = encoder->priv;
2361
2362 if (G_UNLIKELY (priv->new_headers)) {
2363 GList *tmp;
2364
2365 GST_DEBUG_OBJECT (encoder, "Sending headers");
2366
2367 /* First make all buffers metadata-writable */
2368 for (tmp = priv->headers; tmp; tmp = tmp->next) {
2369 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2370
2371 tmp->data = tmpbuf = gst_buffer_make_writable (tmpbuf);
2372
2373 GST_OBJECT_LOCK (encoder);
2374 priv->bytes += gst_buffer_get_size (tmpbuf);
2375 GST_OBJECT_UNLOCK (encoder);
2376
2377 if (G_UNLIKELY (key_unit)) {
2378 key_unit = FALSE;
2379 GST_BUFFER_FLAG_UNSET (tmpbuf, GST_BUFFER_FLAG_DELTA_UNIT);
2380 } else {
2381 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DELTA_UNIT);
2382 }
2383
2384 if (G_UNLIKELY (*discont)) {
2385 GST_LOG_OBJECT (encoder, "marking discont");
2386 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2387 *discont = FALSE;
2388 } else {
2389 GST_BUFFER_FLAG_UNSET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2390 }
2391
2392 gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
2393 }
2394 priv->new_headers = FALSE;
2395 }
2396 }
2397
2398 static void
gst_video_encoder_transform_meta_unlocked(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2399 gst_video_encoder_transform_meta_unlocked (GstVideoEncoder * encoder,
2400 GstVideoCodecFrame * frame)
2401 {
2402 GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2403
2404 if (encoder_class->transform_meta) {
2405 if (G_LIKELY (frame->input_buffer)) {
2406 CopyMetaData data;
2407
2408 data.encoder = encoder;
2409 data.frame = frame;
2410 gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
2411 } else {
2412 GST_FIXME_OBJECT (encoder,
2413 "Can't copy metadata because input frame disappeared");
2414 }
2415 }
2416 }
2417
2418 static void
gst_video_encoder_send_key_unit_unlocked(GstVideoEncoder * encoder,GstVideoCodecFrame * frame,gboolean * send_headers)2419 gst_video_encoder_send_key_unit_unlocked (GstVideoEncoder * encoder,
2420 GstVideoCodecFrame * frame, gboolean * send_headers)
2421 {
2422 GstVideoEncoderPrivate *priv = encoder->priv;
2423 GstClockTime stream_time, running_time;
2424 GstEvent *ev;
2425 GList *l;
2426 GQueue matching_fevt = G_QUEUE_INIT;
2427 ForcedKeyUnitEvent *fevt;
2428
2429 running_time =
2430 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2431 frame->pts);
2432
2433 GST_OBJECT_LOCK (encoder);
2434 for (l = priv->force_key_unit.head; l;) {
2435 fevt = l->data;
2436
2437 /* Skip non-pending keyunits */
2438 if (!fevt->pending) {
2439 l = l->next;
2440 continue;
2441 }
2442
2443 /* Exact match using the frame id */
2444 if (frame->system_frame_number == fevt->frame_id) {
2445 GList *next = l->next;
2446 g_queue_push_tail (&matching_fevt, fevt);
2447 g_queue_delete_link (&priv->force_key_unit, l);
2448 l = next;
2449 continue;
2450 }
2451
2452 /* Simple case, keyunit ASAP */
2453 if (fevt->running_time == GST_CLOCK_TIME_NONE) {
2454 GList *next = l->next;
2455 g_queue_push_tail (&matching_fevt, fevt);
2456 g_queue_delete_link (&priv->force_key_unit, l);
2457 l = next;
2458 continue;
2459 }
2460
2461 /* Event for before this frame */
2462 if (fevt->running_time <= running_time) {
2463 GList *next = l->next;
2464 g_queue_push_tail (&matching_fevt, fevt);
2465 g_queue_delete_link (&priv->force_key_unit, l);
2466 l = next;
2467 continue;
2468 }
2469
2470 /* Otherwise all following events are in the future */
2471 break;
2472 }
2473
2474 GST_OBJECT_UNLOCK (encoder);
2475
2476 while ((fevt = g_queue_pop_head (&matching_fevt))) {
2477 stream_time =
2478 gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
2479 frame->pts);
2480
2481 ev = gst_video_event_new_downstream_force_key_unit
2482 (frame->pts, stream_time, running_time, fevt->all_headers, fevt->count);
2483
2484 gst_video_encoder_push_event (encoder, ev);
2485
2486 if (fevt->all_headers)
2487 *send_headers = TRUE;
2488
2489 GST_DEBUG_OBJECT (encoder,
2490 "Forced key unit: running-time %" GST_TIME_FORMAT
2491 ", all_headers %d, count %u",
2492 GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
2493 forced_key_unit_event_free (fevt);
2494 }
2495 }
2496
2497 /**
2498 * gst_video_encoder_finish_frame:
2499 * @encoder: a #GstVideoEncoder
2500 * @frame: (transfer full): an encoded #GstVideoCodecFrame
2501 *
2502 * @frame must have a valid encoded data buffer, whose metadata fields
2503 * are then appropriately set according to frame data or no buffer at
2504 * all if the frame should be dropped.
2505 * It is subsequently pushed downstream or provided to @pre_push.
2506 * In any case, the frame is considered finished and released.
2507 *
2508 * After calling this function the output buffer of the frame is to be
2509 * considered read-only. This function will also change the metadata
2510 * of the buffer.
2511 *
2512 * Returns: a #GstFlowReturn resulting from sending data downstream
2513 */
2514 GstFlowReturn
gst_video_encoder_finish_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2515 gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
2516 GstVideoCodecFrame * frame)
2517 {
2518 GstVideoEncoderPrivate *priv = encoder->priv;
2519 GstFlowReturn ret = GST_FLOW_OK;
2520 GstVideoEncoderClass *encoder_class;
2521 gboolean send_headers = FALSE;
2522 gboolean key_unit = FALSE;
2523 gboolean discont = FALSE;
2524 GstBuffer *buffer;
2525
2526 g_return_val_if_fail (frame, GST_FLOW_ERROR);
2527
2528 discont = (frame->presentation_frame_number == 0
2529 && frame->abidata.ABI.num_subframes == 0);
2530
2531 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2532
2533 GST_LOG_OBJECT (encoder,
2534 "finish frame fpn %d sync point: %d", frame->presentation_frame_number,
2535 GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
2536
2537 GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
2538 ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
2539 GST_TIME_ARGS (frame->dts));
2540
2541 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2542
2543 ret = gst_video_encoder_can_push_unlocked (encoder);
2544 if (ret != GST_FLOW_OK)
2545 goto done;
2546
2547 if (frame->abidata.ABI.num_subframes == 0)
2548 gst_video_encoder_push_pending_unlocked (encoder, frame);
2549
2550 /* no buffer data means this frame is skipped/dropped */
2551 if (!frame->output_buffer) {
2552 gst_video_encoder_drop_frame (encoder, frame);
2553 goto done;
2554 }
2555
2556 priv->processed++;
2557
2558 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit.head)
2559 gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
2560
2561 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
2562 && frame->abidata.ABI.num_subframes == 0) {
2563 priv->distance_from_sync = 0;
2564 key_unit = TRUE;
2565 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2566 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2567 frame->dts = frame->pts;
2568 }
2569 priv->last_key_unit =
2570 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2571 frame->pts);
2572 }
2573
2574 gst_video_encoder_infer_dts_unlocked (encoder, frame);
2575
2576 frame->distance_from_sync = priv->distance_from_sync;
2577 priv->distance_from_sync++;
2578
2579 /* We need a writable buffer for the metadata changes below */
2580 frame->output_buffer = gst_buffer_make_writable (frame->output_buffer);
2581
2582 GST_BUFFER_PTS (frame->output_buffer) = frame->pts;
2583 GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
2584 GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
2585
2586 /* At this stage we have a full frame in subframe use case ,
2587 * let's mark it to enabled some latency optimization
2588 * in some uses cases like RTP. */
2589
2590 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_MARKER);
2591
2592 GST_OBJECT_LOCK (encoder);
2593 /* update rate estimate */
2594 priv->bytes += gst_buffer_get_size (frame->output_buffer);
2595 if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
2596 priv->time += frame->duration;
2597 } else {
2598 /* better none than nothing valid */
2599 priv->time = GST_CLOCK_TIME_NONE;
2600 }
2601 GST_OBJECT_UNLOCK (encoder);
2602
2603 if (G_UNLIKELY (send_headers))
2604 priv->new_headers = TRUE;
2605
2606 gst_video_encoder_send_header_unlocked (encoder, &discont, key_unit);
2607
2608 if (key_unit) {
2609 GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2610 } else {
2611 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2612 }
2613
2614 if (G_UNLIKELY (discont)) {
2615 GST_LOG_OBJECT (encoder, "marking discont");
2616 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
2617 }
2618
2619 if (encoder_class->pre_push)
2620 ret = encoder_class->pre_push (encoder, frame);
2621
2622 gst_video_encoder_transform_meta_unlocked (encoder, frame);
2623
2624 /* Get an additional ref to the buffer, which is going to be pushed
2625 * downstream, the original ref is owned by the frame */
2626 if (ret == GST_FLOW_OK)
2627 buffer = gst_buffer_ref (frame->output_buffer);
2628
2629 /* Release frame so the buffer is writable when we push it downstream
2630 * if possible, i.e. if the subclass does not hold additional references
2631 * to the frame
2632 */
2633 gst_video_encoder_release_frame (encoder, frame);
2634 frame = NULL;
2635
2636 if (ret == GST_FLOW_OK) {
2637 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2638 ret = gst_pad_push (encoder->srcpad, buffer);
2639 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2640 }
2641
2642 done:
2643 /* handed out */
2644 if (frame)
2645 gst_video_encoder_release_frame (encoder, frame);
2646
2647 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2648
2649 return ret;
2650 }
2651
2652 /**
2653 * gst_video_encoder_finish_subframe:
2654 * @encoder: a #GstVideoEncoder
2655 * @frame: (transfer none): a #GstVideoCodecFrame being encoded
2656 *
2657 * If multiple subframes are produced for one input frame then use this method
2658 * for each subframe, except for the last one. Before calling this function,
2659 * you need to fill frame->output_buffer with the encoded buffer to push.
2660
2661 * You must call #gst_video_encoder_finish_frame() for the last sub-frame
2662 * to tell the encoder that the frame has been fully encoded.
2663 *
2664 * This function will change the metadata of @frame and frame->output_buffer
2665 * will be pushed downstream.
2666 *
2667 * Returns: a #GstFlowReturn resulting from pushing the buffer downstream.
2668 *
2669 * Since: 1.18
2670 */
2671 GstFlowReturn
gst_video_encoder_finish_subframe(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2672 gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
2673 GstVideoCodecFrame * frame)
2674 {
2675 GstVideoEncoderPrivate *priv = encoder->priv;
2676 GstVideoEncoderClass *encoder_class;
2677 GstFlowReturn ret = GST_FLOW_OK;
2678 GstBuffer *subframe_buffer = NULL;
2679 gboolean discont = FALSE;
2680 gboolean send_headers = FALSE;
2681 gboolean key_unit = FALSE;
2682
2683 g_return_val_if_fail (frame, GST_FLOW_ERROR);
2684 g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
2685
2686 subframe_buffer = frame->output_buffer;
2687
2688 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2689 discont = (frame->presentation_frame_number == 0
2690 && frame->abidata.ABI.num_subframes == 0);
2691
2692 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2693
2694 GST_LOG_OBJECT (encoder,
2695 "finish subframe %u of frame fpn %u PTS %" GST_TIME_FORMAT ", DTS %"
2696 GST_TIME_FORMAT " sync point: %d", frame->abidata.ABI.num_subframes,
2697 frame->presentation_frame_number, GST_TIME_ARGS (frame->pts),
2698 GST_TIME_ARGS (frame->dts), GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
2699
2700 ret = gst_video_encoder_can_push_unlocked (encoder);
2701 if (ret != GST_FLOW_OK)
2702 goto done;
2703
2704 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit.head)
2705 gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
2706
2707 /* Push pending events only for the first subframe ie segment event.
2708 * Push new incoming events on finish_frame otherwise.
2709 */
2710 if (frame->abidata.ABI.num_subframes == 0)
2711 gst_video_encoder_push_pending_unlocked (encoder, frame);
2712
2713 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
2714 && frame->abidata.ABI.num_subframes == 0) {
2715 priv->distance_from_sync = 0;
2716 key_unit = TRUE;
2717 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2718 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2719 frame->dts = frame->pts;
2720 }
2721 priv->last_key_unit =
2722 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2723 frame->pts);
2724 }
2725
2726 gst_video_encoder_infer_dts_unlocked (encoder, frame);
2727
2728 /* We need a writable buffer for the metadata changes below */
2729 subframe_buffer = gst_buffer_make_writable (subframe_buffer);
2730
2731 GST_BUFFER_PTS (subframe_buffer) = frame->pts;
2732 GST_BUFFER_DTS (subframe_buffer) = frame->dts;
2733 GST_BUFFER_DURATION (subframe_buffer) = frame->duration;
2734
2735 GST_OBJECT_LOCK (encoder);
2736 /* update rate estimate */
2737 priv->bytes += gst_buffer_get_size (subframe_buffer);
2738 GST_OBJECT_UNLOCK (encoder);
2739
2740 if (G_UNLIKELY (send_headers))
2741 priv->new_headers = TRUE;
2742
2743 gst_video_encoder_send_header_unlocked (encoder, &discont, key_unit);
2744
2745 if (key_unit) {
2746 GST_BUFFER_FLAG_UNSET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2747 } else {
2748 GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2749 }
2750
2751 if (G_UNLIKELY (discont)) {
2752 GST_LOG_OBJECT (encoder, "marking discont buffer: %" GST_PTR_FORMAT,
2753 subframe_buffer);
2754 GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DISCONT);
2755 }
2756
2757 if (encoder_class->pre_push) {
2758 ret = encoder_class->pre_push (encoder, frame);
2759 }
2760
2761 gst_video_encoder_transform_meta_unlocked (encoder, frame);
2762
2763 if (ret == GST_FLOW_OK) {
2764 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2765 ret = gst_pad_push (encoder->srcpad, subframe_buffer);
2766 subframe_buffer = NULL;
2767 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2768 }
2769
2770 done:
2771 frame->abidata.ABI.num_subframes++;
2772 if (subframe_buffer)
2773 gst_buffer_unref (subframe_buffer);
2774 frame->output_buffer = NULL;
2775
2776 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2777
2778 return ret;
2779 }
2780
2781 /**
2782 * gst_video_encoder_get_output_state:
2783 * @encoder: a #GstVideoEncoder
2784 *
2785 * Get the current #GstVideoCodecState
2786 *
2787 * Returns: (transfer full): #GstVideoCodecState describing format of video data.
2788 */
2789 GstVideoCodecState *
gst_video_encoder_get_output_state(GstVideoEncoder * encoder)2790 gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
2791 {
2792 GstVideoCodecState *state;
2793
2794 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2795 state = gst_video_codec_state_ref (encoder->priv->output_state);
2796 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2797
2798 return state;
2799 }
2800
2801 /**
2802 * gst_video_encoder_set_output_state:
2803 * @encoder: a #GstVideoEncoder
2804 * @caps: (transfer full): the #GstCaps to use for the output
2805 * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
2806 *
2807 * Creates a new #GstVideoCodecState with the specified caps as the output state
2808 * for the encoder.
2809 * Any previously set output state on @encoder will be replaced by the newly
2810 * created one.
2811 *
2812 * The specified @caps should not contain any resolution, pixel-aspect-ratio,
2813 * framerate, codec-data, .... Those should be specified instead in the returned
2814 * #GstVideoCodecState.
2815 *
2816 * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
2817 * or framerate) from an existing #GstVideoCodecState, it can be provided as a
2818 * @reference.
2819 *
2820 * If the subclass wishes to override some fields from the output state (like
2821 * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
2822 *
2823 * The new output state will only take effect (set on pads and buffers) starting
2824 * from the next call to #gst_video_encoder_finish_frame().
2825 *
2826 * Returns: (transfer full): the newly configured output state.
2827 */
2828 GstVideoCodecState *
gst_video_encoder_set_output_state(GstVideoEncoder * encoder,GstCaps * caps,GstVideoCodecState * reference)2829 gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
2830 GstVideoCodecState * reference)
2831 {
2832 GstVideoEncoderPrivate *priv = encoder->priv;
2833 GstVideoCodecState *state;
2834
2835 g_return_val_if_fail (caps != NULL, NULL);
2836
2837 state = _new_output_state (caps, reference);
2838 if (!state)
2839 return NULL;
2840
2841 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2842 if (priv->output_state)
2843 gst_video_codec_state_unref (priv->output_state);
2844 priv->output_state = gst_video_codec_state_ref (state);
2845
2846 if (priv->output_state != NULL && priv->output_state->info.fps_n > 0) {
2847 priv->qos_frame_duration =
2848 gst_util_uint64_scale (GST_SECOND, priv->output_state->info.fps_d,
2849 priv->output_state->info.fps_n);
2850 } else {
2851 priv->qos_frame_duration = 0;
2852 }
2853
2854 priv->output_state_changed = TRUE;
2855 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2856
2857 return state;
2858 }
2859
2860 /**
2861 * gst_video_encoder_set_latency:
2862 * @encoder: a #GstVideoEncoder
2863 * @min_latency: minimum latency
2864 * @max_latency: maximum latency
2865 *
2866 * Informs baseclass of encoding latency.
2867 */
2868 void
gst_video_encoder_set_latency(GstVideoEncoder * encoder,GstClockTime min_latency,GstClockTime max_latency)2869 gst_video_encoder_set_latency (GstVideoEncoder * encoder,
2870 GstClockTime min_latency, GstClockTime max_latency)
2871 {
2872 g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
2873 g_return_if_fail (max_latency >= min_latency);
2874
2875 GST_OBJECT_LOCK (encoder);
2876 encoder->priv->min_latency = min_latency;
2877 encoder->priv->max_latency = max_latency;
2878 GST_OBJECT_UNLOCK (encoder);
2879
2880 gst_element_post_message (GST_ELEMENT_CAST (encoder),
2881 gst_message_new_latency (GST_OBJECT_CAST (encoder)));
2882 }
2883
2884 /**
2885 * gst_video_encoder_get_latency:
2886 * @encoder: a #GstVideoEncoder
2887 * @min_latency: (out) (allow-none): address of variable in which to store the
2888 * configured minimum latency, or %NULL
2889 * @max_latency: (out) (allow-none): address of variable in which to store the
2890 * configured maximum latency, or %NULL
2891 *
2892 * Query the configured encoding latency. Results will be returned via
2893 * @min_latency and @max_latency.
2894 */
2895 void
gst_video_encoder_get_latency(GstVideoEncoder * encoder,GstClockTime * min_latency,GstClockTime * max_latency)2896 gst_video_encoder_get_latency (GstVideoEncoder * encoder,
2897 GstClockTime * min_latency, GstClockTime * max_latency)
2898 {
2899 GST_OBJECT_LOCK (encoder);
2900 if (min_latency)
2901 *min_latency = encoder->priv->min_latency;
2902 if (max_latency)
2903 *max_latency = encoder->priv->max_latency;
2904 GST_OBJECT_UNLOCK (encoder);
2905 }
2906
2907 /**
2908 * gst_video_encoder_get_oldest_frame:
2909 * @encoder: a #GstVideoEncoder
2910 *
2911 * Get the oldest unfinished pending #GstVideoCodecFrame
2912 *
2913 * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
2914 */
2915 GstVideoCodecFrame *
gst_video_encoder_get_oldest_frame(GstVideoEncoder * encoder)2916 gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
2917 {
2918 GstVideoCodecFrame *frame = NULL;
2919
2920 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2921 if (encoder->priv->frames.head)
2922 frame = gst_video_codec_frame_ref (encoder->priv->frames.head->data);
2923 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2924
2925 return (GstVideoCodecFrame *) frame;
2926 }
2927
2928 /**
2929 * gst_video_encoder_get_frame:
2930 * @encoder: a #GstVideoEncoder
2931 * @frame_number: system_frame_number of a frame
2932 *
2933 * Get a pending unfinished #GstVideoCodecFrame
2934 *
2935 * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
2936 */
2937 GstVideoCodecFrame *
gst_video_encoder_get_frame(GstVideoEncoder * encoder,int frame_number)2938 gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
2939 {
2940 GList *g;
2941 GstVideoCodecFrame *frame = NULL;
2942
2943 GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
2944
2945 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2946 for (g = encoder->priv->frames.head; g; g = g->next) {
2947 GstVideoCodecFrame *tmp = g->data;
2948
2949 if (tmp->system_frame_number == frame_number) {
2950 frame = gst_video_codec_frame_ref (tmp);
2951 break;
2952 }
2953 }
2954 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2955
2956 return frame;
2957 }
2958
2959 /**
2960 * gst_video_encoder_get_frames:
2961 * @encoder: a #GstVideoEncoder
2962 *
2963 * Get all pending unfinished #GstVideoCodecFrame
2964 *
2965 * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
2966 */
2967 GList *
gst_video_encoder_get_frames(GstVideoEncoder * encoder)2968 gst_video_encoder_get_frames (GstVideoEncoder * encoder)
2969 {
2970 GList *frames;
2971
2972 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2973 frames =
2974 g_list_copy_deep (encoder->priv->frames.head,
2975 (GCopyFunc) gst_video_codec_frame_ref, NULL);
2976 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2977
2978 return frames;
2979 }
2980
2981 /**
2982 * gst_video_encoder_merge_tags:
2983 * @encoder: a #GstVideoEncoder
2984 * @tags: (allow-none): a #GstTagList to merge, or NULL to unset
2985 * previously-set tags
2986 * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
2987 *
2988 * Sets the video encoder tags and how they should be merged with any
2989 * upstream stream tags. This will override any tags previously-set
2990 * with gst_video_encoder_merge_tags().
2991 *
2992 * Note that this is provided for convenience, and the subclass is
2993 * not required to use this and can still do tag handling on its own.
2994 *
2995 * MT safe.
2996 */
2997 void
gst_video_encoder_merge_tags(GstVideoEncoder * encoder,const GstTagList * tags,GstTagMergeMode mode)2998 gst_video_encoder_merge_tags (GstVideoEncoder * encoder,
2999 const GstTagList * tags, GstTagMergeMode mode)
3000 {
3001 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3002 g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
3003 g_return_if_fail (tags == NULL || mode != GST_TAG_MERGE_UNDEFINED);
3004
3005 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
3006 if (encoder->priv->tags != tags) {
3007 if (encoder->priv->tags) {
3008 gst_tag_list_unref (encoder->priv->tags);
3009 encoder->priv->tags = NULL;
3010 encoder->priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
3011 }
3012 if (tags) {
3013 encoder->priv->tags = gst_tag_list_ref ((GstTagList *) tags);
3014 encoder->priv->tags_merge_mode = mode;
3015 }
3016
3017 GST_DEBUG_OBJECT (encoder, "setting encoder tags to %" GST_PTR_FORMAT,
3018 tags);
3019 encoder->priv->tags_changed = TRUE;
3020 }
3021 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
3022 }
3023
3024 /**
3025 * gst_video_encoder_get_allocator:
3026 * @encoder: a #GstVideoEncoder
3027 * @allocator: (out) (allow-none) (transfer full): the #GstAllocator
3028 * used
3029 * @params: (out) (allow-none) (transfer full): the
3030 * #GstAllocationParams of @allocator
3031 *
3032 * Lets #GstVideoEncoder sub-classes to know the memory @allocator
3033 * used by the base class and its @params.
3034 *
3035 * Unref the @allocator after use it.
3036 */
3037 void
gst_video_encoder_get_allocator(GstVideoEncoder * encoder,GstAllocator ** allocator,GstAllocationParams * params)3038 gst_video_encoder_get_allocator (GstVideoEncoder * encoder,
3039 GstAllocator ** allocator, GstAllocationParams * params)
3040 {
3041 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3042
3043 if (allocator)
3044 *allocator = encoder->priv->allocator ?
3045 gst_object_ref (encoder->priv->allocator) : NULL;
3046
3047 if (params)
3048 *params = encoder->priv->params;
3049 }
3050
3051 /**
3052 * gst_video_encoder_set_min_pts:
3053 * @encoder: a #GstVideoEncoder
3054 * @min_pts: minimal PTS that will be passed to handle_frame
3055 *
3056 * Request minimal value for PTS passed to handle_frame.
3057 *
3058 * For streams with reordered frames this can be used to ensure that there
3059 * is enough time to accommodate first DTS, which may be less than first PTS
3060 *
3061 * Since: 1.6
3062 */
3063 void
gst_video_encoder_set_min_pts(GstVideoEncoder * encoder,GstClockTime min_pts)3064 gst_video_encoder_set_min_pts (GstVideoEncoder * encoder, GstClockTime min_pts)
3065 {
3066 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3067 encoder->priv->min_pts = min_pts;
3068 encoder->priv->time_adjustment = GST_CLOCK_TIME_NONE;
3069 }
3070
3071 /**
3072 * gst_video_encoder_get_max_encode_time:
3073 * @encoder: a #GstVideoEncoder
3074 * @frame: a #GstVideoCodecFrame
3075 *
3076 * Determines maximum possible encoding time for @frame that will
3077 * allow it to encode and arrive in time (as determined by QoS events).
3078 * In particular, a negative result means encoding in time is no longer possible
3079 * and should therefore occur as soon/skippy as possible.
3080 *
3081 * If no QoS events have been received from downstream, or if
3082 * #GstVideoEncoder:qos is disabled this function returns #G_MAXINT64.
3083 *
3084 * Returns: max decoding time.
3085 * Since: 1.14
3086 */
3087 GstClockTimeDiff
gst_video_encoder_get_max_encode_time(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)3088 gst_video_encoder_get_max_encode_time (GstVideoEncoder *
3089 encoder, GstVideoCodecFrame * frame)
3090 {
3091 GstClockTimeDiff deadline;
3092 GstClockTime earliest_time;
3093
3094 if (!g_atomic_int_get (&encoder->priv->qos_enabled))
3095 return G_MAXINT64;
3096
3097 GST_OBJECT_LOCK (encoder);
3098 earliest_time = encoder->priv->earliest_time;
3099 if (GST_CLOCK_TIME_IS_VALID (earliest_time)
3100 && GST_CLOCK_TIME_IS_VALID (frame->deadline))
3101 deadline = GST_CLOCK_DIFF (earliest_time, frame->deadline);
3102 else
3103 deadline = G_MAXINT64;
3104
3105 GST_LOG_OBJECT (encoder, "earliest %" GST_TIME_FORMAT
3106 ", frame deadline %" GST_TIME_FORMAT ", deadline %" GST_STIME_FORMAT,
3107 GST_TIME_ARGS (earliest_time), GST_TIME_ARGS (frame->deadline),
3108 GST_STIME_ARGS (deadline));
3109
3110 GST_OBJECT_UNLOCK (encoder);
3111
3112 return deadline;
3113 }
3114
3115 /**
3116 * gst_video_encoder_set_qos_enabled:
3117 * @encoder: the encoder
3118 * @enabled: the new qos value.
3119 *
3120 * Configures @encoder to handle Quality-of-Service events from downstream.
3121 * Since: 1.14
3122 */
3123 void
gst_video_encoder_set_qos_enabled(GstVideoEncoder * encoder,gboolean enabled)3124 gst_video_encoder_set_qos_enabled (GstVideoEncoder * encoder, gboolean enabled)
3125 {
3126 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3127
3128 g_atomic_int_set (&encoder->priv->qos_enabled, enabled);
3129 }
3130
3131 /**
3132 * gst_video_encoder_is_qos_enabled:
3133 * @encoder: the encoder
3134 *
3135 * Checks if @encoder is currently configured to handle Quality-of-Service
3136 * events from downstream.
3137 *
3138 * Returns: %TRUE if the encoder is configured to perform Quality-of-Service.
3139 * Since: 1.14
3140 */
3141 gboolean
gst_video_encoder_is_qos_enabled(GstVideoEncoder * encoder)3142 gst_video_encoder_is_qos_enabled (GstVideoEncoder * encoder)
3143 {
3144 gboolean res;
3145
3146 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
3147
3148 res = g_atomic_int_get (&encoder->priv->qos_enabled);
3149
3150 return res;
3151 }
3152
3153 /**
3154 * gst_video_encoder_set_min_force_key_unit_interval:
3155 * @encoder: the encoder
3156 * @interval: minimum interval
3157 *
3158 * Sets the minimum interval for requesting keyframes based on force-keyunit
3159 * events. Setting this to 0 will allow to handle every event, setting this to
3160 * %GST_CLOCK_TIME_NONE causes force-keyunit events to be ignored.
3161 *
3162 * Since: 1.18
3163 */
3164 void
gst_video_encoder_set_min_force_key_unit_interval(GstVideoEncoder * encoder,GstClockTime interval)3165 gst_video_encoder_set_min_force_key_unit_interval (GstVideoEncoder * encoder,
3166 GstClockTime interval)
3167 {
3168 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3169
3170 GST_OBJECT_LOCK (encoder);
3171 encoder->priv->min_force_key_unit_interval = interval;
3172 GST_OBJECT_UNLOCK (encoder);
3173 }
3174
3175 /**
3176 * gst_video_encoder_get_min_force_key_unit_interval:
3177 * @encoder: the encoder
3178 *
3179 * Returns the minimum force-keyunit interval, see gst_video_encoder_set_min_force_key_unit_interval()
3180 * for more details.
3181 *
3182 * Returns: the minimum force-keyunit interval
3183 *
3184 * Since: 1.18
3185 */
3186 GstClockTime
gst_video_encoder_get_min_force_key_unit_interval(GstVideoEncoder * encoder)3187 gst_video_encoder_get_min_force_key_unit_interval (GstVideoEncoder * encoder)
3188 {
3189 GstClockTime interval;
3190
3191 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), GST_CLOCK_TIME_NONE);
3192
3193 GST_OBJECT_LOCK (encoder);
3194 interval = encoder->priv->min_force_key_unit_interval;
3195 GST_OBJECT_UNLOCK (encoder);
3196
3197 return interval;
3198 }
3199