• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GStreamer
2  * Copyright (C) 2008 David Schleef <ds@schleef.org>
3  * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4  * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5  *   Contact: Stefan Kost <stefan.kost@nokia.com>
6  * Copyright (C) 2012 Collabora Ltd.
7  *	Author : Edward Hervey <edward@collabora.com>
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public
20  * License along with this library; if not, write to the
21  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24 
25 /**
26  * SECTION:gstvideoencoder
27  * @title: GstVideoEncoder
28  * @short_description: Base class for video encoders
29  * @see_also:
30  *
31  * This base class is for video encoders turning raw video into
32  * encoded video data.
33  *
34  * GstVideoEncoder and subclass should cooperate as follows.
35  *
36  * ## Configuration
37  *
38  *   * Initially, GstVideoEncoder calls @start when the encoder element
39  *     is activated, which allows subclass to perform any global setup.
40  *   * GstVideoEncoder calls @set_format to inform subclass of the format
41  *     of input video data that it is about to receive.  Subclass should
42  *     setup for encoding and configure base class as appropriate
43  *     (e.g. latency). While unlikely, it might be called more than once,
44  *     if changing input parameters require reconfiguration.  Baseclass
45  *     will ensure that processing of current configuration is finished.
46  *   * GstVideoEncoder calls @stop at end of all processing.
47  *
48  * ## Data processing
49  *
50  *     * Base class collects input data and metadata into a frame and hands
51  *       this to subclass' @handle_frame.
52  *
53  *     * If codec processing results in encoded data, subclass should call
54  *       @gst_video_encoder_finish_frame to have encoded data pushed
55  *       downstream.
56  *
57  *     * If implemented, baseclass calls subclass @pre_push just prior to
58  *       pushing to allow subclasses to modify some metadata on the buffer.
59  *       If it returns GST_FLOW_OK, the buffer is pushed downstream.
60  *
61  *     * GstVideoEncoderClass will handle both srcpad and sinkpad events.
62  *       Sink events will be passed to subclass if @event callback has been
63  *       provided.
64  *
65  * ## Shutdown phase
66  *
67  *   * GstVideoEncoder class calls @stop to inform the subclass that data
68  *     parsing will be stopped.
69  *
70  * Subclass is responsible for providing pad template caps for
71  * source and sink pads. The pads need to be named "sink" and "src". It should
72  * also be able to provide fixed src pad caps in @getcaps by the time it calls
73  * @gst_video_encoder_finish_frame.
74  *
75  * Things that subclass need to take care of:
76  *
77  *   * Provide pad templates
78  *   * Provide source pad caps before pushing the first buffer
79  *   * Accept data in @handle_frame and provide encoded results to
80  *      @gst_video_encoder_finish_frame.
81  *
82  *
83  * The #GstVideoEncoder:qos property will enable the Quality-of-Service
84  * features of the encoder which gather statistics about the real-time
85  * performance of the downstream elements. If enabled, subclasses can
86  * use gst_video_encoder_get_max_encode_time() to check if input frames
87  * are already late and drop them right away to give a chance to the
88  * pipeline to catch up.
89  */
90 
91 #ifdef HAVE_CONFIG_H
92 #include "config.h"
93 #endif
94 
95 /* TODO
96  *
97  * * Calculate actual latency based on input/output timestamp/frame_number
98  *   and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
99  */
100 
101 #include <gst/video/video.h>
102 #include "gstvideoencoder.h"
103 #include "gstvideoutils.h"
104 #include "gstvideoutilsprivate.h"
105 
106 #include <gst/video/gstvideometa.h>
107 #include <gst/video/gstvideopool.h>
108 
109 #include <string.h>
110 
111 GST_DEBUG_CATEGORY (videoencoder_debug);
112 #define GST_CAT_DEFAULT videoencoder_debug
113 
114 /* properties */
115 
116 #define DEFAULT_QOS                 FALSE
117 
118 enum
119 {
120   PROP_0,
121   PROP_QOS,
122   PROP_LAST
123 };
124 
125 struct _GstVideoEncoderPrivate
126 {
127   guint64 presentation_frame_number;
128   int distance_from_sync;
129 
130   /* FIXME : (and introduce a context ?) */
131   gboolean drained;
132 
133   gint64 min_latency;
134   gint64 max_latency;
135 
136   GList *current_frame_events;
137 
138   GList *headers;
139   gboolean new_headers;         /* Whether new headers were just set */
140 
141   GList *force_key_unit;        /* List of pending forced keyunits */
142 
143   guint32 system_frame_number;
144 
145   GList *frames;                /* Protected with OBJECT_LOCK */
146   GstVideoCodecState *input_state;
147   GstVideoCodecState *output_state;
148   gboolean output_state_changed;
149 
150   gint64 bytes;
151   gint64 time;
152 
153   GstAllocator *allocator;
154   GstAllocationParams params;
155 
156   /* upstream stream tags (global tags are passed through as-is) */
157   GstTagList *upstream_tags;
158 
159   /* subclass tags */
160   GstTagList *tags;
161   GstTagMergeMode tags_merge_mode;
162 
163   gboolean tags_changed;
164 
165   GstClockTime min_pts;
166   /* adjustment needed on pts, dts, segment start and stop to accomodate
167    * min_pts */
168   GstClockTime time_adjustment;
169 
170   /* QoS properties */
171   gint qos_enabled;             /* ATOMIC */
172   gdouble proportion;           /* OBJECT_LOCK */
173   GstClockTime earliest_time;   /* OBJECT_LOCK */
174   GstClockTime qos_frame_duration;      /* OBJECT_LOCK */
175   /* qos messages: frames dropped/processed */
176   guint dropped;
177   guint processed;
178 };
179 
180 typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
181 struct _ForcedKeyUnitEvent
182 {
183   GstClockTime running_time;
184   gboolean pending;             /* TRUE if this was requested already */
185   gboolean all_headers;
186   guint count;
187   guint32 frame_id;
188 };
189 
190 static void
forced_key_unit_event_free(ForcedKeyUnitEvent * evt)191 forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
192 {
193   g_slice_free (ForcedKeyUnitEvent, evt);
194 }
195 
196 static ForcedKeyUnitEvent *
forced_key_unit_event_new(GstClockTime running_time,gboolean all_headers,guint count)197 forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
198     guint count)
199 {
200   ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
201 
202   evt->running_time = running_time;
203   evt->all_headers = all_headers;
204   evt->count = count;
205 
206   return evt;
207 }
208 
209 static GstElementClass *parent_class = NULL;
210 static gint private_offset = 0;
211 
212 static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
213 static void gst_video_encoder_init (GstVideoEncoder * enc,
214     GstVideoEncoderClass * klass);
215 
216 static void gst_video_encoder_finalize (GObject * object);
217 
218 static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
219     GstCaps * caps);
220 static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
221     GstCaps * filter);
222 static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
223     GstEvent * event);
224 static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
225     GstEvent * event);
226 static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
227     GstBuffer * buf);
228 static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
229     element, GstStateChange transition);
230 static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
231     GstQuery * query);
232 static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
233     GstQuery * query);
234 static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
235     encoder, GstBuffer * buf, GstClockTime pts, GstClockTime dts,
236     GstClockTime duration);
237 
238 static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
239     GstEvent * event);
240 static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
241     GstEvent * event);
242 static gboolean gst_video_encoder_decide_allocation_default (GstVideoEncoder *
243     encoder, GstQuery * query);
244 static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
245     encoder, GstQuery * query);
246 static gboolean gst_video_encoder_negotiate_default (GstVideoEncoder * encoder);
247 static gboolean gst_video_encoder_negotiate_unlocked (GstVideoEncoder *
248     encoder);
249 
250 static gboolean gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
251     GstQuery * query);
252 static gboolean gst_video_encoder_src_query_default (GstVideoEncoder * encoder,
253     GstQuery * query);
254 
255 static gboolean gst_video_encoder_transform_meta_default (GstVideoEncoder *
256     encoder, GstVideoCodecFrame * frame, GstMeta * meta);
257 
258 /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
259  * method to get to the padtemplates */
260 GType
gst_video_encoder_get_type(void)261 gst_video_encoder_get_type (void)
262 {
263   static volatile gsize type = 0;
264 
265   if (g_once_init_enter (&type)) {
266     GType _type;
267     static const GTypeInfo info = {
268       sizeof (GstVideoEncoderClass),
269       NULL,
270       NULL,
271       (GClassInitFunc) gst_video_encoder_class_init,
272       NULL,
273       NULL,
274       sizeof (GstVideoEncoder),
275       0,
276       (GInstanceInitFunc) gst_video_encoder_init,
277     };
278     const GInterfaceInfo preset_interface_info = {
279       NULL,                     /* interface_init */
280       NULL,                     /* interface_finalize */
281       NULL                      /* interface_data */
282     };
283 
284     _type = g_type_register_static (GST_TYPE_ELEMENT,
285         "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
286     private_offset =
287         g_type_add_instance_private (_type, sizeof (GstVideoEncoderPrivate));
288     g_type_add_interface_static (_type, GST_TYPE_PRESET,
289         &preset_interface_info);
290     g_once_init_leave (&type, _type);
291   }
292   return type;
293 }
294 
295 static inline GstVideoEncoderPrivate *
gst_video_encoder_get_instance_private(GstVideoEncoder * self)296 gst_video_encoder_get_instance_private (GstVideoEncoder * self)
297 {
298   return (G_STRUCT_MEMBER_P (self, private_offset));
299 }
300 
301 static void
gst_video_encoder_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)302 gst_video_encoder_set_property (GObject * object, guint prop_id,
303     const GValue * value, GParamSpec * pspec)
304 {
305   GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
306 
307   switch (prop_id) {
308     case PROP_QOS:
309       gst_video_encoder_set_qos_enabled (sink, g_value_get_boolean (value));
310       break;
311     default:
312       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
313       break;
314   }
315 }
316 
317 static void
gst_video_encoder_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)318 gst_video_encoder_get_property (GObject * object, guint prop_id, GValue * value,
319     GParamSpec * pspec)
320 {
321   GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
322 
323   switch (prop_id) {
324     case PROP_QOS:
325       g_value_set_boolean (value, gst_video_encoder_is_qos_enabled (sink));
326       break;
327     default:
328       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
329       break;
330   }
331 }
332 
333 static void
gst_video_encoder_class_init(GstVideoEncoderClass * klass)334 gst_video_encoder_class_init (GstVideoEncoderClass * klass)
335 {
336   GObjectClass *gobject_class;
337   GstElementClass *gstelement_class;
338 
339   gobject_class = G_OBJECT_CLASS (klass);
340   gstelement_class = GST_ELEMENT_CLASS (klass);
341 
342   GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
343       "Base Video Encoder");
344 
345   parent_class = g_type_class_peek_parent (klass);
346 
347   if (private_offset != 0)
348     g_type_class_adjust_private_offset (klass, &private_offset);
349 
350   gobject_class->set_property = gst_video_encoder_set_property;
351   gobject_class->get_property = gst_video_encoder_get_property;
352   gobject_class->finalize = gst_video_encoder_finalize;
353 
354   gstelement_class->change_state =
355       GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
356 
357   klass->sink_event = gst_video_encoder_sink_event_default;
358   klass->src_event = gst_video_encoder_src_event_default;
359   klass->propose_allocation = gst_video_encoder_propose_allocation_default;
360   klass->decide_allocation = gst_video_encoder_decide_allocation_default;
361   klass->negotiate = gst_video_encoder_negotiate_default;
362   klass->sink_query = gst_video_encoder_sink_query_default;
363   klass->src_query = gst_video_encoder_src_query_default;
364   klass->transform_meta = gst_video_encoder_transform_meta_default;
365 
366   g_object_class_install_property (gobject_class, PROP_QOS,
367       g_param_spec_boolean ("qos", "Qos",
368           "Handle Quality-of-Service events from downstream", DEFAULT_QOS,
369           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
370 }
371 
372 static GList *
_flush_events(GstPad * pad,GList * events)373 _flush_events (GstPad * pad, GList * events)
374 {
375   GList *tmp;
376 
377   for (tmp = events; tmp; tmp = tmp->next) {
378     if (GST_EVENT_TYPE (tmp->data) != GST_EVENT_EOS &&
379         GST_EVENT_TYPE (tmp->data) != GST_EVENT_SEGMENT &&
380         GST_EVENT_IS_STICKY (tmp->data)) {
381       gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data));
382     }
383     gst_event_unref (tmp->data);
384   }
385   g_list_free (events);
386 
387   return NULL;
388 }
389 
390 static gboolean
gst_video_encoder_reset(GstVideoEncoder * encoder,gboolean hard)391 gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard)
392 {
393   GstVideoEncoderPrivate *priv = encoder->priv;
394   gboolean ret = TRUE;
395 
396   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
397 
398   priv->presentation_frame_number = 0;
399   priv->distance_from_sync = 0;
400 
401   g_list_foreach (priv->force_key_unit, (GFunc) forced_key_unit_event_free,
402       NULL);
403   g_list_free (priv->force_key_unit);
404   priv->force_key_unit = NULL;
405 
406   priv->drained = TRUE;
407 
408   GST_OBJECT_LOCK (encoder);
409   priv->bytes = 0;
410   priv->time = 0;
411   GST_OBJECT_UNLOCK (encoder);
412 
413   priv->time_adjustment = GST_CLOCK_TIME_NONE;
414 
415   if (hard) {
416     gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
417     gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
418 
419     if (priv->input_state)
420       gst_video_codec_state_unref (priv->input_state);
421     priv->input_state = NULL;
422     if (priv->output_state)
423       gst_video_codec_state_unref (priv->output_state);
424     priv->output_state = NULL;
425 
426     if (priv->upstream_tags) {
427       gst_tag_list_unref (priv->upstream_tags);
428       priv->upstream_tags = NULL;
429     }
430     if (priv->tags)
431       gst_tag_list_unref (priv->tags);
432     priv->tags = NULL;
433     priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
434     priv->tags_changed = FALSE;
435 
436     g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
437     g_list_free (priv->headers);
438     priv->headers = NULL;
439     priv->new_headers = FALSE;
440 
441     if (priv->allocator) {
442       gst_object_unref (priv->allocator);
443       priv->allocator = NULL;
444     }
445 
446     g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
447     g_list_free (priv->current_frame_events);
448     priv->current_frame_events = NULL;
449 
450     GST_OBJECT_LOCK (encoder);
451     priv->proportion = 0.5;
452     priv->earliest_time = GST_CLOCK_TIME_NONE;
453     priv->qos_frame_duration = 0;
454     GST_OBJECT_UNLOCK (encoder);
455 
456     priv->dropped = 0;
457     priv->processed = 0;
458   } else {
459     GList *l;
460 
461     for (l = priv->frames; l; l = l->next) {
462       GstVideoCodecFrame *frame = l->data;
463 
464       frame->events = _flush_events (encoder->srcpad, frame->events);
465     }
466     priv->current_frame_events = _flush_events (encoder->srcpad,
467         encoder->priv->current_frame_events);
468   }
469 
470   g_list_foreach (priv->frames, (GFunc) gst_video_codec_frame_unref, NULL);
471   g_list_free (priv->frames);
472   priv->frames = NULL;
473 
474   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
475 
476   return ret;
477 }
478 
479 /* Always call reset() in one way or another after this */
480 static gboolean
gst_video_encoder_flush(GstVideoEncoder * encoder)481 gst_video_encoder_flush (GstVideoEncoder * encoder)
482 {
483   GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
484   gboolean ret = TRUE;
485 
486   if (klass->flush)
487     ret = klass->flush (encoder);
488 
489   return ret;
490 }
491 
492 static void
gst_video_encoder_init(GstVideoEncoder * encoder,GstVideoEncoderClass * klass)493 gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
494 {
495   GstVideoEncoderPrivate *priv;
496   GstPadTemplate *pad_template;
497   GstPad *pad;
498 
499   GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
500 
501   priv = encoder->priv = gst_video_encoder_get_instance_private (encoder);
502 
503   pad_template =
504       gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
505   g_return_if_fail (pad_template != NULL);
506 
507   encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
508 
509   gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
510   gst_pad_set_event_function (pad,
511       GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
512   gst_pad_set_query_function (pad,
513       GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
514   gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
515 
516   pad_template =
517       gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
518   g_return_if_fail (pad_template != NULL);
519 
520   encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
521 
522   gst_pad_set_query_function (pad,
523       GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
524   gst_pad_set_event_function (pad,
525       GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
526   gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
527 
528   gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
529   gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
530 
531   g_rec_mutex_init (&encoder->stream_lock);
532 
533   priv->headers = NULL;
534   priv->new_headers = FALSE;
535 
536   priv->min_latency = 0;
537   priv->max_latency = 0;
538   priv->min_pts = GST_CLOCK_TIME_NONE;
539   priv->time_adjustment = GST_CLOCK_TIME_NONE;
540 
541   gst_video_encoder_reset (encoder, TRUE);
542 }
543 
544 /**
545  * gst_video_encoder_set_headers:
546  * @encoder: a #GstVideoEncoder
547  * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
548  *
549  * Set the codec headers to be sent downstream whenever requested.
550  */
551 void
gst_video_encoder_set_headers(GstVideoEncoder * video_encoder,GList * headers)552 gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
553 {
554   GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
555 
556   GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
557   if (video_encoder->priv->headers) {
558     g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
559         NULL);
560     g_list_free (video_encoder->priv->headers);
561   }
562   video_encoder->priv->headers = headers;
563   video_encoder->priv->new_headers = TRUE;
564 
565   GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
566 }
567 
568 static GstVideoCodecState *
_new_output_state(GstCaps * caps,GstVideoCodecState * reference)569 _new_output_state (GstCaps * caps, GstVideoCodecState * reference)
570 {
571   GstVideoCodecState *state;
572 
573   state = g_slice_new0 (GstVideoCodecState);
574   state->ref_count = 1;
575   gst_video_info_init (&state->info);
576 
577   if (!gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0)) {
578     g_slice_free (GstVideoCodecState, state);
579     return NULL;
580   }
581 
582   state->caps = caps;
583 
584   if (reference) {
585     GstVideoInfo *tgt, *ref;
586 
587     tgt = &state->info;
588     ref = &reference->info;
589 
590     /* Copy over extra fields from reference state */
591     tgt->interlace_mode = ref->interlace_mode;
592     tgt->flags = ref->flags;
593     tgt->width = ref->width;
594     tgt->height = ref->height;
595     tgt->chroma_site = ref->chroma_site;
596     tgt->colorimetry = ref->colorimetry;
597     tgt->par_n = ref->par_n;
598     tgt->par_d = ref->par_d;
599     tgt->fps_n = ref->fps_n;
600     tgt->fps_d = ref->fps_d;
601 
602     GST_VIDEO_INFO_FIELD_ORDER (tgt) = GST_VIDEO_INFO_FIELD_ORDER (ref);
603 
604     GST_VIDEO_INFO_MULTIVIEW_MODE (tgt) = GST_VIDEO_INFO_MULTIVIEW_MODE (ref);
605     GST_VIDEO_INFO_MULTIVIEW_FLAGS (tgt) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (ref);
606   }
607 
608   return state;
609 }
610 
611 static GstVideoCodecState *
_new_input_state(GstCaps * caps)612 _new_input_state (GstCaps * caps)
613 {
614   GstVideoCodecState *state;
615 
616   state = g_slice_new0 (GstVideoCodecState);
617   state->ref_count = 1;
618   gst_video_info_init (&state->info);
619   if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
620     goto parse_fail;
621   state->caps = gst_caps_ref (caps);
622 
623   return state;
624 
625 parse_fail:
626   {
627     g_slice_free (GstVideoCodecState, state);
628     return NULL;
629   }
630 }
631 
632 static gboolean
gst_video_encoder_setcaps(GstVideoEncoder * encoder,GstCaps * caps)633 gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
634 {
635   GstVideoEncoderClass *encoder_class;
636   GstVideoCodecState *state;
637   gboolean ret;
638 
639   encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
640 
641   /* subclass should do something here ... */
642   g_return_val_if_fail (encoder_class->set_format != NULL, FALSE);
643 
644   GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
645 
646   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
647 
648   if (encoder->priv->input_state) {
649     GST_DEBUG_OBJECT (encoder,
650         "Checking if caps changed old %" GST_PTR_FORMAT " new %" GST_PTR_FORMAT,
651         encoder->priv->input_state->caps, caps);
652     if (gst_caps_is_equal (encoder->priv->input_state->caps, caps))
653       goto caps_not_changed;
654   }
655 
656   state = _new_input_state (caps);
657   if (G_UNLIKELY (!state))
658     goto parse_fail;
659 
660   if (encoder->priv->input_state
661       && gst_video_info_is_equal (&state->info,
662           &encoder->priv->input_state->info)) {
663     gst_video_codec_state_unref (state);
664     goto caps_not_changed;
665   }
666 
667   if (encoder_class->reset) {
668     GST_FIXME_OBJECT (encoder, "GstVideoEncoder::reset() is deprecated");
669     encoder_class->reset (encoder, TRUE);
670   }
671 
672   /* and subclass should be ready to configure format at any time around */
673   ret = encoder_class->set_format (encoder, state);
674   if (ret) {
675     if (encoder->priv->input_state)
676       gst_video_codec_state_unref (encoder->priv->input_state);
677     encoder->priv->input_state = state;
678   } else {
679     gst_video_codec_state_unref (state);
680   }
681 
682   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
683 
684   if (!ret)
685     GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
686 
687   return ret;
688 
689 caps_not_changed:
690   {
691     GST_DEBUG_OBJECT (encoder, "Caps did not change - ignore");
692     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
693     return TRUE;
694   }
695 
696   /* ERRORS */
697 parse_fail:
698   {
699     GST_WARNING_OBJECT (encoder, "Failed to parse caps");
700     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
701     return FALSE;
702   }
703 }
704 
705 /**
706  * gst_video_encoder_proxy_getcaps:
707  * @enc: a #GstVideoEncoder
708  * @caps: (allow-none): initial caps
709  * @filter: (allow-none): filter caps
710  *
711  * Returns caps that express @caps (or sink template caps if @caps == NULL)
712  * restricted to resolution/format/... combinations supported by downstream
713  * elements (e.g. muxers).
714  *
715  * Returns: (transfer full): a #GstCaps owned by caller
716  */
717 GstCaps *
gst_video_encoder_proxy_getcaps(GstVideoEncoder * encoder,GstCaps * caps,GstCaps * filter)718 gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
719     GstCaps * filter)
720 {
721   return __gst_video_element_proxy_getcaps (GST_ELEMENT_CAST (encoder),
722       GST_VIDEO_ENCODER_SINK_PAD (encoder),
723       GST_VIDEO_ENCODER_SRC_PAD (encoder), caps, filter);
724 }
725 
726 static GstCaps *
gst_video_encoder_sink_getcaps(GstVideoEncoder * encoder,GstCaps * filter)727 gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
728 {
729   GstVideoEncoderClass *klass;
730   GstCaps *caps;
731 
732   klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
733 
734   if (klass->getcaps)
735     caps = klass->getcaps (encoder, filter);
736   else
737     caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
738 
739   GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
740 
741   return caps;
742 }
743 
744 static gboolean
gst_video_encoder_decide_allocation_default(GstVideoEncoder * encoder,GstQuery * query)745 gst_video_encoder_decide_allocation_default (GstVideoEncoder * encoder,
746     GstQuery * query)
747 {
748   GstAllocator *allocator = NULL;
749   GstAllocationParams params;
750   gboolean update_allocator;
751 
752   /* we got configuration from our peer or the decide_allocation method,
753    * parse them */
754   if (gst_query_get_n_allocation_params (query) > 0) {
755     /* try the allocator */
756     gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
757     update_allocator = TRUE;
758   } else {
759     allocator = NULL;
760     gst_allocation_params_init (&params);
761     update_allocator = FALSE;
762   }
763 
764   if (update_allocator)
765     gst_query_set_nth_allocation_param (query, 0, allocator, &params);
766   else
767     gst_query_add_allocation_param (query, allocator, &params);
768   if (allocator)
769     gst_object_unref (allocator);
770 
771   return TRUE;
772 }
773 
774 static gboolean
gst_video_encoder_propose_allocation_default(GstVideoEncoder * encoder,GstQuery * query)775 gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
776     GstQuery * query)
777 {
778   GstCaps *caps;
779   GstVideoInfo info;
780   GstBufferPool *pool;
781   guint size;
782 
783   gst_query_parse_allocation (query, &caps, NULL);
784 
785   if (caps == NULL)
786     return FALSE;
787 
788   if (!gst_video_info_from_caps (&info, caps))
789     return FALSE;
790 
791   size = GST_VIDEO_INFO_SIZE (&info);
792 
793   if (gst_query_get_n_allocation_pools (query) == 0) {
794     GstStructure *structure;
795     GstAllocator *allocator = NULL;
796     GstAllocationParams params = { 0, 15, 0, 0 };
797 
798     if (gst_query_get_n_allocation_params (query) > 0)
799       gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
800     else
801       gst_query_add_allocation_param (query, allocator, &params);
802 
803     pool = gst_video_buffer_pool_new ();
804 
805     structure = gst_buffer_pool_get_config (pool);
806     gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
807     gst_buffer_pool_config_set_allocator (structure, allocator, &params);
808 
809     if (allocator)
810       gst_object_unref (allocator);
811 
812     if (!gst_buffer_pool_set_config (pool, structure))
813       goto config_failed;
814 
815     gst_query_add_allocation_pool (query, pool, size, 0, 0);
816     gst_object_unref (pool);
817     gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
818   }
819 
820   return TRUE;
821 
822   /* ERRORS */
823 config_failed:
824   {
825     GST_ERROR_OBJECT (encoder, "failed to set config");
826     gst_object_unref (pool);
827     return FALSE;
828   }
829 }
830 
831 static gboolean
gst_video_encoder_sink_query_default(GstVideoEncoder * encoder,GstQuery * query)832 gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
833     GstQuery * query)
834 {
835   GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
836   gboolean res = FALSE;
837 
838   switch (GST_QUERY_TYPE (query)) {
839     case GST_QUERY_CAPS:
840     {
841       GstCaps *filter, *caps;
842 
843       gst_query_parse_caps (query, &filter);
844       caps = gst_video_encoder_sink_getcaps (encoder, filter);
845       gst_query_set_caps_result (query, caps);
846       gst_caps_unref (caps);
847       res = TRUE;
848       break;
849     }
850     case GST_QUERY_CONVERT:
851     {
852       GstFormat src_fmt, dest_fmt;
853       gint64 src_val, dest_val;
854 
855       GST_DEBUG_OBJECT (encoder, "convert query");
856 
857       gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
858       GST_OBJECT_LOCK (encoder);
859       if (encoder->priv->input_state != NULL)
860         res = __gst_video_rawvideo_convert (encoder->priv->input_state,
861             src_fmt, src_val, &dest_fmt, &dest_val);
862       else
863         res = FALSE;
864       GST_OBJECT_UNLOCK (encoder);
865       if (!res)
866         goto error;
867       gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
868       break;
869     }
870     case GST_QUERY_ALLOCATION:
871     {
872       GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
873 
874       if (klass->propose_allocation)
875         res = klass->propose_allocation (encoder, query);
876       break;
877     }
878     default:
879       res = gst_pad_query_default (pad, GST_OBJECT (encoder), query);
880       break;
881   }
882   return res;
883 
884 error:
885   GST_DEBUG_OBJECT (encoder, "query failed");
886   return res;
887 }
888 
889 static gboolean
gst_video_encoder_sink_query(GstPad * pad,GstObject * parent,GstQuery * query)890 gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
891     GstQuery * query)
892 {
893   GstVideoEncoder *encoder;
894   GstVideoEncoderClass *encoder_class;
895   gboolean ret = FALSE;
896 
897   encoder = GST_VIDEO_ENCODER (parent);
898   encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
899 
900   GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
901       GST_QUERY_TYPE_NAME (query));
902 
903   if (encoder_class->sink_query)
904     ret = encoder_class->sink_query (encoder, query);
905 
906   return ret;
907 }
908 
909 static void
gst_video_encoder_finalize(GObject * object)910 gst_video_encoder_finalize (GObject * object)
911 {
912   GstVideoEncoder *encoder;
913 
914   GST_DEBUG_OBJECT (object, "finalize");
915 
916   encoder = GST_VIDEO_ENCODER (object);
917   g_rec_mutex_clear (&encoder->stream_lock);
918 
919   if (encoder->priv->allocator) {
920     gst_object_unref (encoder->priv->allocator);
921     encoder->priv->allocator = NULL;
922   }
923 
924   G_OBJECT_CLASS (parent_class)->finalize (object);
925 }
926 
927 static gboolean
gst_video_encoder_push_event(GstVideoEncoder * encoder,GstEvent * event)928 gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
929 {
930   switch (GST_EVENT_TYPE (event)) {
931     case GST_EVENT_SEGMENT:
932     {
933       GstSegment segment;
934 
935       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
936 
937       gst_event_copy_segment (event, &segment);
938 
939       GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
940 
941       if (segment.format != GST_FORMAT_TIME) {
942         GST_DEBUG_OBJECT (encoder, "received non TIME segment");
943         GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
944         break;
945       }
946 
947       if (encoder->priv->time_adjustment != GST_CLOCK_TIME_NONE) {
948         segment.start += encoder->priv->time_adjustment;
949         if (GST_CLOCK_TIME_IS_VALID (segment.position)) {
950           segment.position += encoder->priv->time_adjustment;
951         }
952         if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
953           segment.stop += encoder->priv->time_adjustment;
954         }
955       }
956 
957       encoder->output_segment = segment;
958       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
959 
960       gst_event_unref (event);
961       event = gst_event_new_segment (&encoder->output_segment);
962 
963       break;
964     }
965     default:
966       break;
967   }
968 
969   return gst_pad_push_event (encoder->srcpad, event);
970 }
971 
972 static GstEvent *
gst_video_encoder_create_merged_tags_event(GstVideoEncoder * enc)973 gst_video_encoder_create_merged_tags_event (GstVideoEncoder * enc)
974 {
975   GstTagList *merged_tags;
976 
977   GST_LOG_OBJECT (enc, "upstream : %" GST_PTR_FORMAT, enc->priv->upstream_tags);
978   GST_LOG_OBJECT (enc, "encoder  : %" GST_PTR_FORMAT, enc->priv->tags);
979   GST_LOG_OBJECT (enc, "mode     : %d", enc->priv->tags_merge_mode);
980 
981   merged_tags =
982       gst_tag_list_merge (enc->priv->upstream_tags, enc->priv->tags,
983       enc->priv->tags_merge_mode);
984 
985   GST_DEBUG_OBJECT (enc, "merged   : %" GST_PTR_FORMAT, merged_tags);
986 
987   if (merged_tags == NULL)
988     return NULL;
989 
990   if (gst_tag_list_is_empty (merged_tags)) {
991     gst_tag_list_unref (merged_tags);
992     return NULL;
993   }
994 
995   return gst_event_new_tag (merged_tags);
996 }
997 
998 static inline void
gst_video_encoder_check_and_push_tags(GstVideoEncoder * encoder)999 gst_video_encoder_check_and_push_tags (GstVideoEncoder * encoder)
1000 {
1001   if (encoder->priv->tags_changed) {
1002     GstEvent *tags_event;
1003 
1004     tags_event = gst_video_encoder_create_merged_tags_event (encoder);
1005 
1006     if (tags_event != NULL)
1007       gst_video_encoder_push_event (encoder, tags_event);
1008 
1009     encoder->priv->tags_changed = FALSE;
1010   }
1011 }
1012 
1013 static gboolean
gst_video_encoder_sink_event_default(GstVideoEncoder * encoder,GstEvent * event)1014 gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
1015     GstEvent * event)
1016 {
1017   GstVideoEncoderClass *encoder_class;
1018   gboolean ret = FALSE;
1019 
1020   encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1021 
1022   switch (GST_EVENT_TYPE (event)) {
1023     case GST_EVENT_CAPS:
1024     {
1025       GstCaps *caps;
1026 
1027       gst_event_parse_caps (event, &caps);
1028       ret = gst_video_encoder_setcaps (encoder, caps);
1029 
1030       gst_event_unref (event);
1031       event = NULL;
1032       break;
1033     }
1034     case GST_EVENT_EOS:
1035     {
1036       GstFlowReturn flow_ret;
1037 
1038       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1039 
1040       if (encoder_class->finish) {
1041         flow_ret = encoder_class->finish (encoder);
1042       } else {
1043         flow_ret = GST_FLOW_OK;
1044       }
1045 
1046       if (encoder->priv->current_frame_events) {
1047         GList *l;
1048 
1049         for (l = g_list_last (encoder->priv->current_frame_events); l;
1050             l = g_list_previous (l)) {
1051           GstEvent *event = GST_EVENT (l->data);
1052 
1053           gst_video_encoder_push_event (encoder, event);
1054         }
1055       }
1056       g_list_free (encoder->priv->current_frame_events);
1057       encoder->priv->current_frame_events = NULL;
1058 
1059       gst_video_encoder_check_and_push_tags (encoder);
1060 
1061       ret = (flow_ret == GST_FLOW_OK);
1062       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1063       break;
1064     }
1065     case GST_EVENT_SEGMENT:
1066     {
1067       GstSegment segment;
1068 
1069       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1070 
1071       gst_event_copy_segment (event, &segment);
1072 
1073       GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1074 
1075       if (segment.format != GST_FORMAT_TIME) {
1076         GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
1077         GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1078         break;
1079       }
1080 
1081       encoder->input_segment = segment;
1082       ret = TRUE;
1083       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1084       break;
1085     }
1086     case GST_EVENT_CUSTOM_DOWNSTREAM:
1087     {
1088       if (gst_video_event_is_force_key_unit (event)) {
1089         GstClockTime running_time;
1090         gboolean all_headers;
1091         guint count;
1092 
1093         if (gst_video_event_parse_downstream_force_key_unit (event,
1094                 NULL, NULL, &running_time, &all_headers, &count)) {
1095           ForcedKeyUnitEvent *fevt;
1096 
1097           GST_OBJECT_LOCK (encoder);
1098           fevt = forced_key_unit_event_new (running_time, all_headers, count);
1099           encoder->priv->force_key_unit =
1100               g_list_append (encoder->priv->force_key_unit, fevt);
1101           GST_OBJECT_UNLOCK (encoder);
1102 
1103           GST_DEBUG_OBJECT (encoder,
1104               "force-key-unit event: running-time %" GST_TIME_FORMAT
1105               ", all_headers %d, count %u",
1106               GST_TIME_ARGS (running_time), all_headers, count);
1107         }
1108         gst_event_unref (event);
1109         event = NULL;
1110         ret = TRUE;
1111       }
1112       break;
1113     }
1114     case GST_EVENT_STREAM_START:
1115     {
1116       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1117       /* Flush upstream tags after a STREAM_START */
1118       GST_DEBUG_OBJECT (encoder, "STREAM_START, clearing upstream tags");
1119       if (encoder->priv->upstream_tags) {
1120         gst_tag_list_unref (encoder->priv->upstream_tags);
1121         encoder->priv->upstream_tags = NULL;
1122         encoder->priv->tags_changed = TRUE;
1123       }
1124       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1125       break;
1126     }
1127     case GST_EVENT_TAG:
1128     {
1129       GstTagList *tags;
1130 
1131       gst_event_parse_tag (event, &tags);
1132 
1133       if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
1134         GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1135         if (encoder->priv->upstream_tags != tags) {
1136           tags = gst_tag_list_copy (tags);
1137 
1138           /* FIXME: make generic based on GST_TAG_FLAG_ENCODED */
1139           gst_tag_list_remove_tag (tags, GST_TAG_CODEC);
1140           gst_tag_list_remove_tag (tags, GST_TAG_AUDIO_CODEC);
1141           gst_tag_list_remove_tag (tags, GST_TAG_VIDEO_CODEC);
1142           gst_tag_list_remove_tag (tags, GST_TAG_SUBTITLE_CODEC);
1143           gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
1144           gst_tag_list_remove_tag (tags, GST_TAG_BITRATE);
1145           gst_tag_list_remove_tag (tags, GST_TAG_NOMINAL_BITRATE);
1146           gst_tag_list_remove_tag (tags, GST_TAG_MAXIMUM_BITRATE);
1147           gst_tag_list_remove_tag (tags, GST_TAG_MINIMUM_BITRATE);
1148           gst_tag_list_remove_tag (tags, GST_TAG_ENCODER);
1149           gst_tag_list_remove_tag (tags, GST_TAG_ENCODER_VERSION);
1150 
1151           if (encoder->priv->upstream_tags)
1152             gst_tag_list_unref (encoder->priv->upstream_tags);
1153           encoder->priv->upstream_tags = tags;
1154           GST_INFO_OBJECT (encoder, "upstream tags: %" GST_PTR_FORMAT, tags);
1155         }
1156         gst_event_unref (event);
1157         event = gst_video_encoder_create_merged_tags_event (encoder);
1158         GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1159         if (!event)
1160           ret = TRUE;
1161       }
1162       break;
1163     }
1164     case GST_EVENT_FLUSH_STOP:{
1165       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1166       gst_video_encoder_flush (encoder);
1167       gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
1168       gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
1169       gst_video_encoder_reset (encoder, FALSE);
1170       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1171       break;
1172     }
1173     default:
1174       break;
1175   }
1176 
1177   /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
1178    * For EOS this is required because no buffer or serialized event
1179    * will come after EOS and nothing could trigger another
1180    * _finish_frame() call.   *
1181    * If the subclass handles sending of EOS manually it can simply
1182    * not chain up to the parent class' event handler
1183    *
1184    * For FLUSH_STOP this is required because it is expected
1185    * to be forwarded immediately and no buffers are queued anyway.
1186    */
1187   if (event) {
1188     if (!GST_EVENT_IS_SERIALIZED (event)
1189         || GST_EVENT_TYPE (event) == GST_EVENT_EOS
1190         || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
1191       ret = gst_video_encoder_push_event (encoder, event);
1192     } else {
1193       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1194       encoder->priv->current_frame_events =
1195           g_list_prepend (encoder->priv->current_frame_events, event);
1196       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1197       ret = TRUE;
1198     }
1199   }
1200 
1201   return ret;
1202 }
1203 
1204 static gboolean
gst_video_encoder_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)1205 gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
1206     GstEvent * event)
1207 {
1208   GstVideoEncoder *enc;
1209   GstVideoEncoderClass *klass;
1210   gboolean ret = TRUE;
1211 
1212   enc = GST_VIDEO_ENCODER (parent);
1213   klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
1214 
1215   GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
1216       GST_EVENT_TYPE_NAME (event));
1217 
1218   if (klass->sink_event)
1219     ret = klass->sink_event (enc, event);
1220 
1221   return ret;
1222 }
1223 
1224 static gboolean
gst_video_encoder_src_event_default(GstVideoEncoder * encoder,GstEvent * event)1225 gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
1226     GstEvent * event)
1227 {
1228   gboolean ret = FALSE;
1229   GstVideoEncoderPrivate *priv = encoder->priv;
1230 
1231   switch (GST_EVENT_TYPE (event)) {
1232     case GST_EVENT_CUSTOM_UPSTREAM:
1233     {
1234       if (gst_video_event_is_force_key_unit (event)) {
1235         GstClockTime running_time;
1236         gboolean all_headers;
1237         guint count;
1238 
1239         if (gst_video_event_parse_upstream_force_key_unit (event,
1240                 &running_time, &all_headers, &count)) {
1241           ForcedKeyUnitEvent *fevt;
1242 
1243           GST_OBJECT_LOCK (encoder);
1244           fevt = forced_key_unit_event_new (running_time, all_headers, count);
1245           encoder->priv->force_key_unit =
1246               g_list_append (encoder->priv->force_key_unit, fevt);
1247           GST_OBJECT_UNLOCK (encoder);
1248 
1249           GST_DEBUG_OBJECT (encoder,
1250               "force-key-unit event: running-time %" GST_TIME_FORMAT
1251               ", all_headers %d, count %u",
1252               GST_TIME_ARGS (running_time), all_headers, count);
1253         }
1254         gst_event_unref (event);
1255         event = NULL;
1256         ret = TRUE;
1257       }
1258       break;
1259     }
1260     case GST_EVENT_QOS:
1261     {
1262       GstQOSType type;
1263       gdouble proportion;
1264       GstClockTimeDiff diff;
1265       GstClockTime timestamp;
1266 
1267       if (!g_atomic_int_get (&priv->qos_enabled))
1268         break;
1269 
1270       gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
1271 
1272       GST_OBJECT_LOCK (encoder);
1273       priv->proportion = proportion;
1274       if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
1275         if (G_UNLIKELY (diff > 0)) {
1276           priv->earliest_time = timestamp + 2 * diff + priv->qos_frame_duration;
1277         } else {
1278           priv->earliest_time = timestamp + diff;
1279         }
1280       } else {
1281         priv->earliest_time = GST_CLOCK_TIME_NONE;
1282       }
1283       GST_OBJECT_UNLOCK (encoder);
1284 
1285       GST_DEBUG_OBJECT (encoder,
1286           "got QoS %" GST_TIME_FORMAT ", %" GST_STIME_FORMAT ", %g",
1287           GST_TIME_ARGS (timestamp), GST_STIME_ARGS (diff), proportion);
1288 
1289       ret = gst_pad_push_event (encoder->sinkpad, event);
1290       event = NULL;
1291       break;
1292     }
1293     default:
1294       break;
1295   }
1296 
1297   if (event)
1298     ret =
1299         gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
1300         event);
1301 
1302   return ret;
1303 }
1304 
1305 static gboolean
gst_video_encoder_src_event(GstPad * pad,GstObject * parent,GstEvent * event)1306 gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
1307 {
1308   GstVideoEncoder *encoder;
1309   GstVideoEncoderClass *klass;
1310   gboolean ret = FALSE;
1311 
1312   encoder = GST_VIDEO_ENCODER (parent);
1313   klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1314 
1315   GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
1316 
1317   if (klass->src_event)
1318     ret = klass->src_event (encoder, event);
1319 
1320   return ret;
1321 }
1322 
1323 static gboolean
gst_video_encoder_src_query_default(GstVideoEncoder * enc,GstQuery * query)1324 gst_video_encoder_src_query_default (GstVideoEncoder * enc, GstQuery * query)
1325 {
1326   GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (enc);
1327   GstVideoEncoderPrivate *priv;
1328   gboolean res;
1329 
1330   priv = enc->priv;
1331 
1332   GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
1333 
1334   switch (GST_QUERY_TYPE (query)) {
1335     case GST_QUERY_CONVERT:
1336     {
1337       GstFormat src_fmt, dest_fmt;
1338       gint64 src_val, dest_val;
1339 
1340       gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
1341       GST_OBJECT_LOCK (enc);
1342       res =
1343           __gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
1344           src_val, &dest_fmt, &dest_val);
1345       GST_OBJECT_UNLOCK (enc);
1346       if (!res)
1347         goto error;
1348       gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
1349       break;
1350     }
1351     case GST_QUERY_LATENCY:
1352     {
1353       gboolean live;
1354       GstClockTime min_latency, max_latency;
1355 
1356       res = gst_pad_peer_query (enc->sinkpad, query);
1357       if (res) {
1358         gst_query_parse_latency (query, &live, &min_latency, &max_latency);
1359         GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
1360             GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
1361             GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1362 
1363         GST_OBJECT_LOCK (enc);
1364         min_latency += priv->min_latency;
1365         if (max_latency == GST_CLOCK_TIME_NONE
1366             || enc->priv->max_latency == GST_CLOCK_TIME_NONE)
1367           max_latency = GST_CLOCK_TIME_NONE;
1368         else
1369           max_latency += enc->priv->max_latency;
1370         GST_OBJECT_UNLOCK (enc);
1371 
1372         gst_query_set_latency (query, live, min_latency, max_latency);
1373       }
1374     }
1375       break;
1376     default:
1377       res = gst_pad_query_default (pad, GST_OBJECT (enc), query);
1378   }
1379   return res;
1380 
1381 error:
1382   GST_DEBUG_OBJECT (enc, "query failed");
1383   return res;
1384 }
1385 
1386 static gboolean
gst_video_encoder_src_query(GstPad * pad,GstObject * parent,GstQuery * query)1387 gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
1388 {
1389   GstVideoEncoder *encoder;
1390   GstVideoEncoderClass *encoder_class;
1391   gboolean ret = FALSE;
1392 
1393   encoder = GST_VIDEO_ENCODER (parent);
1394   encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1395 
1396   GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
1397       GST_QUERY_TYPE_NAME (query));
1398 
1399   if (encoder_class->src_query)
1400     ret = encoder_class->src_query (encoder, query);
1401 
1402   return ret;
1403 }
1404 
1405 static GstVideoCodecFrame *
gst_video_encoder_new_frame(GstVideoEncoder * encoder,GstBuffer * buf,GstClockTime pts,GstClockTime dts,GstClockTime duration)1406 gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
1407     GstClockTime pts, GstClockTime dts, GstClockTime duration)
1408 {
1409   GstVideoEncoderPrivate *priv = encoder->priv;
1410   GstVideoCodecFrame *frame;
1411 
1412   frame = g_slice_new0 (GstVideoCodecFrame);
1413 
1414   frame->ref_count = 1;
1415 
1416   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1417   frame->system_frame_number = priv->system_frame_number;
1418   priv->system_frame_number++;
1419 
1420   frame->presentation_frame_number = priv->presentation_frame_number;
1421   priv->presentation_frame_number++;
1422   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1423 
1424   frame->events = priv->current_frame_events;
1425   priv->current_frame_events = NULL;
1426   frame->input_buffer = buf;
1427   frame->pts = pts;
1428   frame->dts = dts;
1429   frame->duration = duration;
1430   frame->abidata.ABI.ts = pts;
1431 
1432   return frame;
1433 }
1434 
1435 
1436 static GstFlowReturn
gst_video_encoder_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)1437 gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1438 {
1439   GstVideoEncoder *encoder;
1440   GstVideoEncoderPrivate *priv;
1441   GstVideoEncoderClass *klass;
1442   GstVideoCodecFrame *frame;
1443   GstClockTime pts, duration;
1444   GstFlowReturn ret = GST_FLOW_OK;
1445   guint64 start, stop, cstart, cstop;
1446 
1447   encoder = GST_VIDEO_ENCODER (parent);
1448   priv = encoder->priv;
1449   klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1450 
1451   g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
1452 
1453   if (!encoder->priv->input_state)
1454     goto not_negotiated;
1455 
1456   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1457 
1458   pts = GST_BUFFER_PTS (buf);
1459   duration = GST_BUFFER_DURATION (buf);
1460 
1461   GST_LOG_OBJECT (encoder,
1462       "received buffer of size %" G_GSIZE_FORMAT " with PTS %" GST_TIME_FORMAT
1463       ", DTS %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
1464       gst_buffer_get_size (buf), GST_TIME_ARGS (pts),
1465       GST_TIME_ARGS (GST_BUFFER_DTS (buf)), GST_TIME_ARGS (duration));
1466 
1467   start = pts;
1468   if (GST_CLOCK_TIME_IS_VALID (duration))
1469     stop = start + duration;
1470   else
1471     stop = GST_CLOCK_TIME_NONE;
1472 
1473   /* Drop buffers outside of segment */
1474   if (!gst_segment_clip (&encoder->input_segment,
1475           GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
1476     GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
1477     gst_buffer_unref (buf);
1478     goto done;
1479   }
1480 
1481   if (GST_CLOCK_TIME_IS_VALID (cstop))
1482     duration = cstop - cstart;
1483   else
1484     duration = GST_CLOCK_TIME_NONE;
1485 
1486   if (priv->min_pts != GST_CLOCK_TIME_NONE
1487       && priv->time_adjustment == GST_CLOCK_TIME_NONE) {
1488     if (cstart < priv->min_pts) {
1489       priv->time_adjustment = priv->min_pts - cstart;
1490     }
1491   }
1492 
1493   if (priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1494     cstart += priv->time_adjustment;
1495   }
1496 
1497   /* incoming DTS is not really relevant and does not make sense anyway,
1498    * so pass along _NONE and maybe come up with something better later on */
1499   frame = gst_video_encoder_new_frame (encoder, buf, cstart,
1500       GST_CLOCK_TIME_NONE, duration);
1501 
1502   GST_OBJECT_LOCK (encoder);
1503   if (priv->force_key_unit) {
1504     ForcedKeyUnitEvent *fevt = NULL;
1505     GstClockTime running_time;
1506     GList *l;
1507 
1508     running_time =
1509         gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1510         cstart);
1511 
1512     for (l = priv->force_key_unit; l; l = l->next) {
1513       ForcedKeyUnitEvent *tmp = l->data;
1514 
1515       /* Skip pending keyunits */
1516       if (tmp->pending)
1517         continue;
1518 
1519       /* Simple case, keyunit ASAP */
1520       if (tmp->running_time == GST_CLOCK_TIME_NONE) {
1521         fevt = tmp;
1522         break;
1523       }
1524 
1525       /* Event for before this frame */
1526       if (tmp->running_time <= running_time) {
1527         fevt = tmp;
1528         break;
1529       }
1530     }
1531 
1532     if (fevt) {
1533       fevt->frame_id = frame->system_frame_number;
1534       GST_DEBUG_OBJECT (encoder,
1535           "Forcing a key unit at running time %" GST_TIME_FORMAT,
1536           GST_TIME_ARGS (running_time));
1537       GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
1538       if (fevt->all_headers)
1539         GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
1540       fevt->pending = TRUE;
1541     }
1542   }
1543   GST_OBJECT_UNLOCK (encoder);
1544 
1545   gst_video_codec_frame_ref (frame);
1546   priv->frames = g_list_append (priv->frames, frame);
1547 
1548   /* new data, more finish needed */
1549   priv->drained = FALSE;
1550 
1551   GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
1552       frame->presentation_frame_number);
1553 
1554   frame->deadline =
1555       gst_segment_to_running_time (&encoder->input_segment, GST_FORMAT_TIME,
1556       frame->pts);
1557 
1558   ret = klass->handle_frame (encoder, frame);
1559 
1560 done:
1561   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1562 
1563   return ret;
1564 
1565   /* ERRORS */
1566 not_negotiated:
1567   {
1568     GST_ELEMENT_ERROR (encoder, CORE, NEGOTIATION, (NULL),
1569         ("encoder not initialized"));
1570     gst_buffer_unref (buf);
1571     return GST_FLOW_NOT_NEGOTIATED;
1572   }
1573 }
1574 
1575 static GstStateChangeReturn
gst_video_encoder_change_state(GstElement * element,GstStateChange transition)1576 gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
1577 {
1578   GstVideoEncoder *encoder;
1579   GstVideoEncoderClass *encoder_class;
1580   GstStateChangeReturn ret;
1581 
1582   encoder = GST_VIDEO_ENCODER (element);
1583   encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
1584 
1585   switch (transition) {
1586     case GST_STATE_CHANGE_NULL_TO_READY:
1587       /* open device/library if needed */
1588       if (encoder_class->open && !encoder_class->open (encoder))
1589         goto open_failed;
1590       break;
1591     case GST_STATE_CHANGE_READY_TO_PAUSED:
1592       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1593       gst_video_encoder_reset (encoder, TRUE);
1594       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1595 
1596       /* Initialize device/library if needed */
1597       if (encoder_class->start && !encoder_class->start (encoder))
1598         goto start_failed;
1599       break;
1600     default:
1601       break;
1602   }
1603 
1604   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1605 
1606   switch (transition) {
1607     case GST_STATE_CHANGE_PAUSED_TO_READY:{
1608       gboolean stopped = TRUE;
1609 
1610       if (encoder_class->stop)
1611         stopped = encoder_class->stop (encoder);
1612 
1613       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1614       gst_video_encoder_reset (encoder, TRUE);
1615       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1616 
1617       if (!stopped)
1618         goto stop_failed;
1619       break;
1620     }
1621     case GST_STATE_CHANGE_READY_TO_NULL:
1622       /* close device/library if needed */
1623       if (encoder_class->close && !encoder_class->close (encoder))
1624         goto close_failed;
1625       break;
1626     default:
1627       break;
1628   }
1629 
1630   return ret;
1631 
1632   /* Errors */
1633 
1634 open_failed:
1635   {
1636     GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1637         ("Failed to open encoder"));
1638     return GST_STATE_CHANGE_FAILURE;
1639   }
1640 
1641 start_failed:
1642   {
1643     GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1644         ("Failed to start encoder"));
1645     return GST_STATE_CHANGE_FAILURE;
1646   }
1647 
1648 stop_failed:
1649   {
1650     GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1651         ("Failed to stop encoder"));
1652     return GST_STATE_CHANGE_FAILURE;
1653   }
1654 
1655 close_failed:
1656   {
1657     GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1658         ("Failed to close encoder"));
1659     return GST_STATE_CHANGE_FAILURE;
1660   }
1661 }
1662 
1663 static gboolean
gst_video_encoder_negotiate_default(GstVideoEncoder * encoder)1664 gst_video_encoder_negotiate_default (GstVideoEncoder * encoder)
1665 {
1666   GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1667   GstAllocator *allocator;
1668   GstAllocationParams params;
1669   gboolean ret = TRUE;
1670   GstVideoCodecState *state = encoder->priv->output_state;
1671   GstVideoInfo *info = &state->info;
1672   GstQuery *query = NULL;
1673   GstVideoCodecFrame *frame;
1674   GstCaps *prevcaps;
1675   gchar *colorimetry;
1676 
1677   g_return_val_if_fail (state->caps != NULL, FALSE);
1678 
1679   if (encoder->priv->output_state_changed) {
1680     state->caps = gst_caps_make_writable (state->caps);
1681 
1682     /* Fill caps */
1683     gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
1684         "height", G_TYPE_INT, info->height,
1685         "pixel-aspect-ratio", GST_TYPE_FRACTION,
1686         info->par_n, info->par_d, NULL);
1687     if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
1688       /* variable fps with a max-framerate */
1689       gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
1690           "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
1691     } else {
1692       /* no variable fps or no max-framerate */
1693       gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
1694           info->fps_n, info->fps_d, NULL);
1695     }
1696     if (state->codec_data)
1697       gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
1698           state->codec_data, NULL);
1699 
1700     gst_caps_set_simple (state->caps, "interlace-mode", G_TYPE_STRING,
1701         gst_video_interlace_mode_to_string (info->interlace_mode), NULL);
1702     if (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1703         GST_VIDEO_INFO_FIELD_ORDER (info) != GST_VIDEO_FIELD_ORDER_UNKNOWN)
1704       gst_caps_set_simple (state->caps, "field-order", G_TYPE_STRING,
1705           gst_video_field_order_to_string (GST_VIDEO_INFO_FIELD_ORDER (info)),
1706           NULL);
1707 
1708     colorimetry = gst_video_colorimetry_to_string (&info->colorimetry);
1709     if (colorimetry)
1710       gst_caps_set_simple (state->caps, "colorimetry", G_TYPE_STRING,
1711           colorimetry, NULL);
1712     g_free (colorimetry);
1713 
1714     if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN)
1715       gst_caps_set_simple (state->caps, "chroma-site", G_TYPE_STRING,
1716           gst_video_chroma_to_string (info->chroma_site), NULL);
1717 
1718     if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) {
1719       const gchar *caps_mview_mode =
1720           gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE
1721           (info));
1722 
1723       gst_caps_set_simple (state->caps, "multiview-mode", G_TYPE_STRING,
1724           caps_mview_mode, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
1725           GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL);
1726     }
1727     encoder->priv->output_state_changed = FALSE;
1728   }
1729 
1730   if (state->allocation_caps == NULL)
1731     state->allocation_caps = gst_caps_ref (state->caps);
1732 
1733   /* Push all pending pre-caps events of the oldest frame before
1734    * setting caps */
1735   frame = encoder->priv->frames ? encoder->priv->frames->data : NULL;
1736   if (frame || encoder->priv->current_frame_events) {
1737     GList **events, *l;
1738 
1739     if (frame) {
1740       events = &frame->events;
1741     } else {
1742       events = &encoder->priv->current_frame_events;
1743     }
1744 
1745     for (l = g_list_last (*events); l;) {
1746       GstEvent *event = GST_EVENT (l->data);
1747       GList *tmp;
1748 
1749       if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
1750         gst_video_encoder_push_event (encoder, event);
1751         tmp = l;
1752         l = l->prev;
1753         *events = g_list_delete_link (*events, tmp);
1754       } else {
1755         l = l->prev;
1756       }
1757     }
1758   }
1759 
1760   prevcaps = gst_pad_get_current_caps (encoder->srcpad);
1761   if (!prevcaps || !gst_caps_is_equal (prevcaps, state->caps))
1762     ret = gst_pad_set_caps (encoder->srcpad, state->caps);
1763   else
1764     ret = TRUE;
1765   if (prevcaps)
1766     gst_caps_unref (prevcaps);
1767 
1768   if (!ret)
1769     goto done;
1770 
1771   query = gst_query_new_allocation (state->allocation_caps, TRUE);
1772   if (!gst_pad_peer_query (encoder->srcpad, query)) {
1773     GST_DEBUG_OBJECT (encoder, "didn't get downstream ALLOCATION hints");
1774   }
1775 
1776   g_assert (klass->decide_allocation != NULL);
1777   ret = klass->decide_allocation (encoder, query);
1778 
1779   GST_DEBUG_OBJECT (encoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
1780       query);
1781 
1782   if (!ret)
1783     goto no_decide_allocation;
1784 
1785   /* we got configuration from our peer or the decide_allocation method,
1786    * parse them */
1787   if (gst_query_get_n_allocation_params (query) > 0) {
1788     gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
1789   } else {
1790     allocator = NULL;
1791     gst_allocation_params_init (&params);
1792   }
1793 
1794   if (encoder->priv->allocator)
1795     gst_object_unref (encoder->priv->allocator);
1796   encoder->priv->allocator = allocator;
1797   encoder->priv->params = params;
1798 
1799 done:
1800   if (query)
1801     gst_query_unref (query);
1802 
1803   return ret;
1804 
1805   /* Errors */
1806 no_decide_allocation:
1807   {
1808     GST_WARNING_OBJECT (encoder, "Subclass failed to decide allocation");
1809     goto done;
1810   }
1811 }
1812 
1813 static gboolean
gst_video_encoder_negotiate_unlocked(GstVideoEncoder * encoder)1814 gst_video_encoder_negotiate_unlocked (GstVideoEncoder * encoder)
1815 {
1816   GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1817   gboolean ret = TRUE;
1818 
1819   if (G_LIKELY (klass->negotiate))
1820     ret = klass->negotiate (encoder);
1821 
1822   return ret;
1823 }
1824 
1825 /**
1826  * gst_video_encoder_negotiate:
1827  * @encoder: a #GstVideoEncoder
1828  *
1829  * Negotiate with downstream elements to currently configured #GstVideoCodecState.
1830  * Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
1831  * negotiate fails.
1832  *
1833  * Returns: %TRUE if the negotiation succeeded, else %FALSE.
1834  */
1835 gboolean
gst_video_encoder_negotiate(GstVideoEncoder * encoder)1836 gst_video_encoder_negotiate (GstVideoEncoder * encoder)
1837 {
1838   GstVideoEncoderClass *klass;
1839   gboolean ret = TRUE;
1840 
1841   g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
1842   g_return_val_if_fail (encoder->priv->output_state, FALSE);
1843 
1844   klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1845 
1846   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1847   gst_pad_check_reconfigure (encoder->srcpad);
1848   if (klass->negotiate) {
1849     ret = klass->negotiate (encoder);
1850     if (!ret)
1851       gst_pad_mark_reconfigure (encoder->srcpad);
1852   }
1853   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1854 
1855   return ret;
1856 }
1857 
1858 /**
1859  * gst_video_encoder_allocate_output_buffer:
1860  * @encoder: a #GstVideoEncoder
1861  * @size: size of the buffer
1862  *
1863  * Helper function that allocates a buffer to hold an encoded video frame
1864  * for @encoder's current #GstVideoCodecState.
1865  *
1866  * Returns: (transfer full): allocated buffer
1867  */
1868 GstBuffer *
gst_video_encoder_allocate_output_buffer(GstVideoEncoder * encoder,gsize size)1869 gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, gsize size)
1870 {
1871   GstBuffer *buffer;
1872   gboolean needs_reconfigure = FALSE;
1873 
1874   g_return_val_if_fail (size > 0, NULL);
1875 
1876   GST_DEBUG ("alloc src buffer");
1877 
1878   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1879   needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
1880   if (G_UNLIKELY (encoder->priv->output_state_changed
1881           || (encoder->priv->output_state && needs_reconfigure))) {
1882     if (!gst_video_encoder_negotiate_unlocked (encoder)) {
1883       GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
1884       gst_pad_mark_reconfigure (encoder->srcpad);
1885       goto fallback;
1886     }
1887   }
1888 
1889   buffer =
1890       gst_buffer_new_allocate (encoder->priv->allocator, size,
1891       &encoder->priv->params);
1892   if (!buffer) {
1893     GST_INFO_OBJECT (encoder, "couldn't allocate output buffer");
1894     goto fallback;
1895   }
1896 
1897   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1898 
1899   return buffer;
1900 
1901 fallback:
1902   buffer = gst_buffer_new_allocate (NULL, size, NULL);
1903 
1904   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1905 
1906   return buffer;
1907 }
1908 
1909 /**
1910  * gst_video_encoder_allocate_output_frame:
1911  * @encoder: a #GstVideoEncoder
1912  * @frame: a #GstVideoCodecFrame
1913  * @size: size of the buffer
1914  *
1915  * Helper function that allocates a buffer to hold an encoded video frame for @encoder's
1916  * current #GstVideoCodecState.  Subclass should already have configured video
1917  * state and set src pad caps.
1918  *
1919  * The buffer allocated here is owned by the frame and you should only
1920  * keep references to the frame, not the buffer.
1921  *
1922  * Returns: %GST_FLOW_OK if an output buffer could be allocated
1923  */
1924 GstFlowReturn
gst_video_encoder_allocate_output_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame,gsize size)1925 gst_video_encoder_allocate_output_frame (GstVideoEncoder *
1926     encoder, GstVideoCodecFrame * frame, gsize size)
1927 {
1928   gboolean needs_reconfigure = FALSE;
1929 
1930   g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
1931 
1932   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1933   needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
1934   if (G_UNLIKELY (encoder->priv->output_state_changed
1935           || (encoder->priv->output_state && needs_reconfigure))) {
1936     if (!gst_video_encoder_negotiate_unlocked (encoder)) {
1937       GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
1938       gst_pad_mark_reconfigure (encoder->srcpad);
1939     }
1940   }
1941 
1942   GST_LOG_OBJECT (encoder, "alloc buffer size %" G_GSIZE_FORMAT, size);
1943 
1944   frame->output_buffer =
1945       gst_buffer_new_allocate (encoder->priv->allocator, size,
1946       &encoder->priv->params);
1947 
1948   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1949 
1950   return frame->output_buffer ? GST_FLOW_OK : GST_FLOW_ERROR;
1951 }
1952 
1953 static void
gst_video_encoder_release_frame(GstVideoEncoder * enc,GstVideoCodecFrame * frame)1954 gst_video_encoder_release_frame (GstVideoEncoder * enc,
1955     GstVideoCodecFrame * frame)
1956 {
1957   GList *link;
1958 
1959   /* unref once from the list */
1960   link = g_list_find (enc->priv->frames, frame);
1961   if (link) {
1962     gst_video_codec_frame_unref (frame);
1963     enc->priv->frames = g_list_delete_link (enc->priv->frames, link);
1964   }
1965   /* unref because this function takes ownership */
1966   gst_video_codec_frame_unref (frame);
1967 }
1968 
1969 static gboolean
gst_video_encoder_transform_meta_default(GstVideoEncoder * encoder,GstVideoCodecFrame * frame,GstMeta * meta)1970 gst_video_encoder_transform_meta_default (GstVideoEncoder *
1971     encoder, GstVideoCodecFrame * frame, GstMeta * meta)
1972 {
1973   const GstMetaInfo *info = meta->info;
1974   const gchar *const *tags;
1975 
1976   tags = gst_meta_api_type_get_tags (info->api);
1977 
1978   if (!tags || (g_strv_length ((gchar **) tags) == 1
1979           && gst_meta_api_type_has_tag (info->api,
1980               g_quark_from_string (GST_META_TAG_VIDEO_STR))))
1981     return TRUE;
1982 
1983   return FALSE;
1984 }
1985 
1986 typedef struct
1987 {
1988   GstVideoEncoder *encoder;
1989   GstVideoCodecFrame *frame;
1990 } CopyMetaData;
1991 
1992 static gboolean
foreach_metadata(GstBuffer * inbuf,GstMeta ** meta,gpointer user_data)1993 foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
1994 {
1995   CopyMetaData *data = user_data;
1996   GstVideoEncoder *encoder = data->encoder;
1997   GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1998   GstVideoCodecFrame *frame = data->frame;
1999   const GstMetaInfo *info = (*meta)->info;
2000   gboolean do_copy = FALSE;
2001 
2002   if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) {
2003     /* never call the transform_meta with memory specific metadata */
2004     GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s",
2005         g_type_name (info->api));
2006     do_copy = FALSE;
2007   } else if (klass->transform_meta) {
2008     do_copy = klass->transform_meta (encoder, frame, *meta);
2009     GST_DEBUG_OBJECT (encoder, "transformed metadata %s: copy: %d",
2010         g_type_name (info->api), do_copy);
2011   }
2012 
2013   /* we only copy metadata when the subclass implemented a transform_meta
2014    * function and when it returns %TRUE */
2015   if (do_copy && info->transform_func) {
2016     GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
2017     GST_DEBUG_OBJECT (encoder, "copy metadata %s", g_type_name (info->api));
2018     /* simply copy then */
2019     info->transform_func (frame->output_buffer, *meta, inbuf,
2020         _gst_meta_transform_copy, &copy_data);
2021   }
2022   return TRUE;
2023 }
2024 
2025 static void
gst_video_encoder_drop_frame(GstVideoEncoder * enc,GstVideoCodecFrame * frame)2026 gst_video_encoder_drop_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
2027 {
2028   GstVideoEncoderPrivate *priv = enc->priv;
2029   GstClockTime stream_time, jitter, earliest_time, qostime, timestamp;
2030   GstSegment *segment;
2031   GstMessage *qos_msg;
2032   gdouble proportion;
2033 
2034   GST_DEBUG_OBJECT (enc, "dropping frame %" GST_TIME_FORMAT,
2035       GST_TIME_ARGS (frame->pts));
2036 
2037   priv->dropped++;
2038 
2039   /* post QoS message */
2040   GST_OBJECT_LOCK (enc);
2041   proportion = priv->proportion;
2042   earliest_time = priv->earliest_time;
2043   GST_OBJECT_UNLOCK (enc);
2044 
2045   timestamp = frame->pts;
2046   segment = &enc->output_segment;
2047   if (G_UNLIKELY (segment->format == GST_FORMAT_UNDEFINED))
2048     segment = &enc->input_segment;
2049   stream_time =
2050       gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp);
2051   qostime = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp);
2052   jitter = GST_CLOCK_DIFF (qostime, earliest_time);
2053   qos_msg =
2054       gst_message_new_qos (GST_OBJECT_CAST (enc), FALSE, qostime, stream_time,
2055       timestamp, GST_CLOCK_TIME_NONE);
2056   gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
2057   gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
2058       priv->processed, priv->dropped);
2059   gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
2060 }
2061 
2062 /**
2063  * gst_video_encoder_finish_frame:
2064  * @encoder: a #GstVideoEncoder
2065  * @frame: (transfer full): an encoded #GstVideoCodecFrame
2066  *
2067  * @frame must have a valid encoded data buffer, whose metadata fields
2068  * are then appropriately set according to frame data or no buffer at
2069  * all if the frame should be dropped.
2070  * It is subsequently pushed downstream or provided to @pre_push.
2071  * In any case, the frame is considered finished and released.
2072  *
2073  * After calling this function the output buffer of the frame is to be
2074  * considered read-only. This function will also change the metadata
2075  * of the buffer.
2076  *
2077  * Returns: a #GstFlowReturn resulting from sending data downstream
2078  */
2079 GstFlowReturn
gst_video_encoder_finish_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2080 gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
2081     GstVideoCodecFrame * frame)
2082 {
2083   GstVideoEncoderPrivate *priv = encoder->priv;
2084   GstFlowReturn ret = GST_FLOW_OK;
2085   GstVideoEncoderClass *encoder_class;
2086   GList *l;
2087   gboolean send_headers = FALSE;
2088   gboolean discont = (frame->presentation_frame_number == 0);
2089   GstBuffer *buffer;
2090   gboolean needs_reconfigure = FALSE;
2091 
2092   encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2093 
2094   GST_LOG_OBJECT (encoder,
2095       "finish frame fpn %d", frame->presentation_frame_number);
2096 
2097   GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
2098       ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
2099       GST_TIME_ARGS (frame->dts));
2100 
2101   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2102 
2103   needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2104   if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
2105               && needs_reconfigure))) {
2106     if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2107       gst_pad_mark_reconfigure (encoder->srcpad);
2108       if (GST_PAD_IS_FLUSHING (encoder->srcpad))
2109         ret = GST_FLOW_FLUSHING;
2110       else
2111         ret = GST_FLOW_NOT_NEGOTIATED;
2112       goto done;
2113     }
2114   }
2115 
2116   if (G_UNLIKELY (priv->output_state == NULL))
2117     goto no_output_state;
2118 
2119   /* Push all pending events that arrived before this frame */
2120   for (l = priv->frames; l; l = l->next) {
2121     GstVideoCodecFrame *tmp = l->data;
2122 
2123     if (tmp->events) {
2124       GList *k;
2125 
2126       for (k = g_list_last (tmp->events); k; k = k->prev)
2127         gst_video_encoder_push_event (encoder, k->data);
2128       g_list_free (tmp->events);
2129       tmp->events = NULL;
2130     }
2131 
2132     if (tmp == frame)
2133       break;
2134   }
2135 
2136   gst_video_encoder_check_and_push_tags (encoder);
2137 
2138   /* no buffer data means this frame is skipped/dropped */
2139   if (!frame->output_buffer) {
2140     gst_video_encoder_drop_frame (encoder, frame);
2141     goto done;
2142   }
2143 
2144   priv->processed++;
2145 
2146   if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) {
2147     GstClockTime stream_time, running_time;
2148     GstEvent *ev;
2149     ForcedKeyUnitEvent *fevt = NULL;
2150     GList *l;
2151 
2152     running_time =
2153         gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2154         frame->pts);
2155 
2156     GST_OBJECT_LOCK (encoder);
2157     for (l = priv->force_key_unit; l; l = l->next) {
2158       ForcedKeyUnitEvent *tmp = l->data;
2159 
2160       /* Skip non-pending keyunits */
2161       if (!tmp->pending)
2162         continue;
2163 
2164       /* Exact match using the frame id */
2165       if (frame->system_frame_number == tmp->frame_id) {
2166         fevt = tmp;
2167         break;
2168       }
2169 
2170       /* Simple case, keyunit ASAP */
2171       if (tmp->running_time == GST_CLOCK_TIME_NONE) {
2172         fevt = tmp;
2173         break;
2174       }
2175 
2176       /* Event for before this frame */
2177       if (tmp->running_time <= running_time) {
2178         fevt = tmp;
2179         break;
2180       }
2181     }
2182 
2183     if (fevt) {
2184       priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
2185     }
2186     GST_OBJECT_UNLOCK (encoder);
2187 
2188     if (fevt) {
2189       stream_time =
2190           gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
2191           frame->pts);
2192 
2193       ev = gst_video_event_new_downstream_force_key_unit
2194           (frame->pts, stream_time, running_time,
2195           fevt->all_headers, fevt->count);
2196 
2197       gst_video_encoder_push_event (encoder, ev);
2198 
2199       if (fevt->all_headers)
2200         send_headers = TRUE;
2201 
2202       GST_DEBUG_OBJECT (encoder,
2203           "Forced key unit: running-time %" GST_TIME_FORMAT
2204           ", all_headers %d, count %u",
2205           GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
2206       forced_key_unit_event_free (fevt);
2207     }
2208   }
2209 
2210   if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
2211     priv->distance_from_sync = 0;
2212     GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2213     /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2214     if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2215       frame->dts = frame->pts;
2216     }
2217   } else {
2218     GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2219   }
2220 
2221   /* DTS is expected monotone ascending,
2222    * so a good guess is the lowest unsent PTS (all being OK) */
2223   {
2224     GstClockTime min_ts = GST_CLOCK_TIME_NONE;
2225     GstVideoCodecFrame *oframe = NULL;
2226     gboolean seen_none = FALSE;
2227 
2228     /* some maintenance regardless */
2229     for (l = priv->frames; l; l = l->next) {
2230       GstVideoCodecFrame *tmp = l->data;
2231 
2232       if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
2233         seen_none = TRUE;
2234         continue;
2235       }
2236 
2237       if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
2238         min_ts = tmp->abidata.ABI.ts;
2239         oframe = tmp;
2240       }
2241     }
2242     /* save a ts if needed */
2243     if (oframe && oframe != frame) {
2244       oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
2245     }
2246 
2247     /* and set if needed */
2248     if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
2249       frame->dts = min_ts;
2250       GST_DEBUG_OBJECT (encoder,
2251           "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
2252           GST_TIME_ARGS (frame->pts));
2253     }
2254   }
2255 
2256   frame->distance_from_sync = priv->distance_from_sync;
2257   priv->distance_from_sync++;
2258 
2259   GST_BUFFER_PTS (frame->output_buffer) = frame->pts;
2260   GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
2261   GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
2262 
2263   GST_OBJECT_LOCK (encoder);
2264   /* update rate estimate */
2265   priv->bytes += gst_buffer_get_size (frame->output_buffer);
2266   if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
2267     priv->time += frame->duration;
2268   } else {
2269     /* better none than nothing valid */
2270     priv->time = GST_CLOCK_TIME_NONE;
2271   }
2272   GST_OBJECT_UNLOCK (encoder);
2273 
2274   if (G_UNLIKELY (send_headers || priv->new_headers)) {
2275     GList *tmp, *copy = NULL;
2276 
2277     GST_DEBUG_OBJECT (encoder, "Sending headers");
2278 
2279     /* First make all buffers metadata-writable */
2280     for (tmp = priv->headers; tmp; tmp = tmp->next) {
2281       GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2282 
2283       copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
2284     }
2285     g_list_free (priv->headers);
2286     priv->headers = copy;
2287 
2288     for (tmp = priv->headers; tmp; tmp = tmp->next) {
2289       GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2290 
2291       GST_OBJECT_LOCK (encoder);
2292       priv->bytes += gst_buffer_get_size (tmpbuf);
2293       GST_OBJECT_UNLOCK (encoder);
2294       if (G_UNLIKELY (discont)) {
2295         GST_LOG_OBJECT (encoder, "marking discont");
2296         GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2297         discont = FALSE;
2298       }
2299 
2300       GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2301       gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
2302       GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2303     }
2304     priv->new_headers = FALSE;
2305   }
2306 
2307   if (G_UNLIKELY (discont)) {
2308     GST_LOG_OBJECT (encoder, "marking discont");
2309     GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
2310   }
2311 
2312   if (encoder_class->pre_push)
2313     ret = encoder_class->pre_push (encoder, frame);
2314 
2315   if (encoder_class->transform_meta) {
2316     if (G_LIKELY (frame->input_buffer)) {
2317       CopyMetaData data;
2318 
2319       data.encoder = encoder;
2320       data.frame = frame;
2321       gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
2322     } else {
2323       GST_WARNING_OBJECT (encoder,
2324           "Can't copy metadata because input frame disappeared");
2325     }
2326   }
2327 
2328   /* Get an additional ref to the buffer, which is going to be pushed
2329    * downstream, the original ref is owned by the frame */
2330   if (ret == GST_FLOW_OK)
2331     buffer = gst_buffer_ref (frame->output_buffer);
2332 
2333   /* Release frame so the buffer is writable when we push it downstream
2334    * if possible, i.e. if the subclass does not hold additional references
2335    * to the frame
2336    */
2337   gst_video_encoder_release_frame (encoder, frame);
2338   frame = NULL;
2339 
2340   if (ret == GST_FLOW_OK) {
2341     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2342     ret = gst_pad_push (encoder->srcpad, buffer);
2343     GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2344   }
2345 
2346 done:
2347   /* handed out */
2348   if (frame)
2349     gst_video_encoder_release_frame (encoder, frame);
2350 
2351   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2352 
2353   return ret;
2354 
2355   /* ERRORS */
2356 no_output_state:
2357   {
2358     gst_video_encoder_release_frame (encoder, frame);
2359     GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2360     GST_ERROR_OBJECT (encoder, "Output state was not configured");
2361     return GST_FLOW_ERROR;
2362   }
2363 }
2364 
2365 /**
2366  * gst_video_encoder_get_output_state:
2367  * @encoder: a #GstVideoEncoder
2368  *
2369  * Get the current #GstVideoCodecState
2370  *
2371  * Returns: (transfer full): #GstVideoCodecState describing format of video data.
2372  */
2373 GstVideoCodecState *
gst_video_encoder_get_output_state(GstVideoEncoder * encoder)2374 gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
2375 {
2376   GstVideoCodecState *state;
2377 
2378   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2379   state = gst_video_codec_state_ref (encoder->priv->output_state);
2380   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2381 
2382   return state;
2383 }
2384 
2385 /**
2386  * gst_video_encoder_set_output_state:
2387  * @encoder: a #GstVideoEncoder
2388  * @caps: (transfer full): the #GstCaps to use for the output
2389  * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
2390  *
2391  * Creates a new #GstVideoCodecState with the specified caps as the output state
2392  * for the encoder.
2393  * Any previously set output state on @encoder will be replaced by the newly
2394  * created one.
2395  *
2396  * The specified @caps should not contain any resolution, pixel-aspect-ratio,
2397  * framerate, codec-data, .... Those should be specified instead in the returned
2398  * #GstVideoCodecState.
2399  *
2400  * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
2401  * or framerate) from an existing #GstVideoCodecState, it can be provided as a
2402  * @reference.
2403  *
2404  * If the subclass wishes to override some fields from the output state (like
2405  * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
2406  *
2407  * The new output state will only take effect (set on pads and buffers) starting
2408  * from the next call to #gst_video_encoder_finish_frame().
2409  *
2410  * Returns: (transfer full): the newly configured output state.
2411  */
2412 GstVideoCodecState *
gst_video_encoder_set_output_state(GstVideoEncoder * encoder,GstCaps * caps,GstVideoCodecState * reference)2413 gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
2414     GstVideoCodecState * reference)
2415 {
2416   GstVideoEncoderPrivate *priv = encoder->priv;
2417   GstVideoCodecState *state;
2418 
2419   g_return_val_if_fail (caps != NULL, NULL);
2420 
2421   state = _new_output_state (caps, reference);
2422   if (!state)
2423     return NULL;
2424 
2425   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2426   if (priv->output_state)
2427     gst_video_codec_state_unref (priv->output_state);
2428   priv->output_state = gst_video_codec_state_ref (state);
2429 
2430   if (priv->output_state != NULL && priv->output_state->info.fps_n > 0) {
2431     priv->qos_frame_duration =
2432         gst_util_uint64_scale (GST_SECOND, priv->output_state->info.fps_d,
2433         priv->output_state->info.fps_n);
2434   } else {
2435     priv->qos_frame_duration = 0;
2436   }
2437 
2438   priv->output_state_changed = TRUE;
2439   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2440 
2441   return state;
2442 }
2443 
2444 /**
2445  * gst_video_encoder_set_latency:
2446  * @encoder: a #GstVideoEncoder
2447  * @min_latency: minimum latency
2448  * @max_latency: maximum latency
2449  *
2450  * Informs baseclass of encoding latency.
2451  */
2452 void
gst_video_encoder_set_latency(GstVideoEncoder * encoder,GstClockTime min_latency,GstClockTime max_latency)2453 gst_video_encoder_set_latency (GstVideoEncoder * encoder,
2454     GstClockTime min_latency, GstClockTime max_latency)
2455 {
2456   g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
2457   g_return_if_fail (max_latency >= min_latency);
2458 
2459   GST_OBJECT_LOCK (encoder);
2460   encoder->priv->min_latency = min_latency;
2461   encoder->priv->max_latency = max_latency;
2462   GST_OBJECT_UNLOCK (encoder);
2463 
2464   gst_element_post_message (GST_ELEMENT_CAST (encoder),
2465       gst_message_new_latency (GST_OBJECT_CAST (encoder)));
2466 }
2467 
2468 /**
2469  * gst_video_encoder_get_latency:
2470  * @encoder: a #GstVideoEncoder
2471  * @min_latency: (out) (allow-none): address of variable in which to store the
2472  *     configured minimum latency, or %NULL
2473  * @max_latency: (out) (allow-none): address of variable in which to store the
2474  *     configured maximum latency, or %NULL
2475  *
2476  * Query the configured encoding latency. Results will be returned via
2477  * @min_latency and @max_latency.
2478  */
2479 void
gst_video_encoder_get_latency(GstVideoEncoder * encoder,GstClockTime * min_latency,GstClockTime * max_latency)2480 gst_video_encoder_get_latency (GstVideoEncoder * encoder,
2481     GstClockTime * min_latency, GstClockTime * max_latency)
2482 {
2483   GST_OBJECT_LOCK (encoder);
2484   if (min_latency)
2485     *min_latency = encoder->priv->min_latency;
2486   if (max_latency)
2487     *max_latency = encoder->priv->max_latency;
2488   GST_OBJECT_UNLOCK (encoder);
2489 }
2490 
2491 /**
2492  * gst_video_encoder_get_oldest_frame:
2493  * @encoder: a #GstVideoEncoder
2494  *
2495  * Get the oldest unfinished pending #GstVideoCodecFrame
2496  *
2497  * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
2498  */
2499 GstVideoCodecFrame *
gst_video_encoder_get_oldest_frame(GstVideoEncoder * encoder)2500 gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
2501 {
2502   GstVideoCodecFrame *frame = NULL;
2503 
2504   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2505   if (encoder->priv->frames)
2506     frame = gst_video_codec_frame_ref (encoder->priv->frames->data);
2507   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2508 
2509   return (GstVideoCodecFrame *) frame;
2510 }
2511 
2512 /**
2513  * gst_video_encoder_get_frame:
2514  * @encoder: a #GstVideoEncoder
2515  * @frame_number: system_frame_number of a frame
2516  *
2517  * Get a pending unfinished #GstVideoCodecFrame
2518  *
2519  * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
2520  */
2521 GstVideoCodecFrame *
gst_video_encoder_get_frame(GstVideoEncoder * encoder,int frame_number)2522 gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
2523 {
2524   GList *g;
2525   GstVideoCodecFrame *frame = NULL;
2526 
2527   GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
2528 
2529   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2530   for (g = encoder->priv->frames; g; g = g->next) {
2531     GstVideoCodecFrame *tmp = g->data;
2532 
2533     if (tmp->system_frame_number == frame_number) {
2534       frame = gst_video_codec_frame_ref (tmp);
2535       break;
2536     }
2537   }
2538   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2539 
2540   return frame;
2541 }
2542 
2543 /**
2544  * gst_video_encoder_get_frames:
2545  * @encoder: a #GstVideoEncoder
2546  *
2547  * Get all pending unfinished #GstVideoCodecFrame
2548  *
2549  * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
2550  */
2551 GList *
gst_video_encoder_get_frames(GstVideoEncoder * encoder)2552 gst_video_encoder_get_frames (GstVideoEncoder * encoder)
2553 {
2554   GList *frames;
2555 
2556   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2557   frames = g_list_copy (encoder->priv->frames);
2558   g_list_foreach (frames, (GFunc) gst_video_codec_frame_ref, NULL);
2559   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2560 
2561   return frames;
2562 }
2563 
2564 /**
2565  * gst_video_encoder_merge_tags:
2566  * @encoder: a #GstVideoEncoder
2567  * @tags: (allow-none): a #GstTagList to merge, or NULL to unset
2568  *     previously-set tags
2569  * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
2570  *
2571  * Sets the video encoder tags and how they should be merged with any
2572  * upstream stream tags. This will override any tags previously-set
2573  * with gst_video_encoder_merge_tags().
2574  *
2575  * Note that this is provided for convenience, and the subclass is
2576  * not required to use this and can still do tag handling on its own.
2577  *
2578  * MT safe.
2579  */
2580 void
gst_video_encoder_merge_tags(GstVideoEncoder * encoder,const GstTagList * tags,GstTagMergeMode mode)2581 gst_video_encoder_merge_tags (GstVideoEncoder * encoder,
2582     const GstTagList * tags, GstTagMergeMode mode)
2583 {
2584   g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2585   g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
2586   g_return_if_fail (tags == NULL || mode != GST_TAG_MERGE_UNDEFINED);
2587 
2588   GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2589   if (encoder->priv->tags != tags) {
2590     if (encoder->priv->tags) {
2591       gst_tag_list_unref (encoder->priv->tags);
2592       encoder->priv->tags = NULL;
2593       encoder->priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
2594     }
2595     if (tags) {
2596       encoder->priv->tags = gst_tag_list_ref ((GstTagList *) tags);
2597       encoder->priv->tags_merge_mode = mode;
2598     }
2599 
2600     GST_DEBUG_OBJECT (encoder, "setting encoder tags to %" GST_PTR_FORMAT,
2601         tags);
2602     encoder->priv->tags_changed = TRUE;
2603   }
2604   GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2605 }
2606 
2607 /**
2608  * gst_video_encoder_get_allocator:
2609  * @encoder: a #GstVideoEncoder
2610  * @allocator: (out) (allow-none) (transfer full): the #GstAllocator
2611  * used
2612  * @params: (out) (allow-none) (transfer full): the
2613  * #GstAllocationParams of @allocator
2614  *
2615  * Lets #GstVideoEncoder sub-classes to know the memory @allocator
2616  * used by the base class and its @params.
2617  *
2618  * Unref the @allocator after use it.
2619  */
2620 void
gst_video_encoder_get_allocator(GstVideoEncoder * encoder,GstAllocator ** allocator,GstAllocationParams * params)2621 gst_video_encoder_get_allocator (GstVideoEncoder * encoder,
2622     GstAllocator ** allocator, GstAllocationParams * params)
2623 {
2624   g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2625 
2626   if (allocator)
2627     *allocator = encoder->priv->allocator ?
2628         gst_object_ref (encoder->priv->allocator) : NULL;
2629 
2630   if (params)
2631     *params = encoder->priv->params;
2632 }
2633 
2634 /**
2635  * gst_video_encoder_set_min_pts:
2636  * @encoder: a #GstVideoEncoder
2637  * @min_pts: minimal PTS that will be passed to handle_frame
2638  *
2639  * Request minimal value for PTS passed to handle_frame.
2640  *
2641  * For streams with reordered frames this can be used to ensure that there
2642  * is enough time to accomodate first DTS, which may be less than first PTS
2643  *
2644  * Since: 1.6
2645  */
2646 void
gst_video_encoder_set_min_pts(GstVideoEncoder * encoder,GstClockTime min_pts)2647 gst_video_encoder_set_min_pts (GstVideoEncoder * encoder, GstClockTime min_pts)
2648 {
2649   g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2650   encoder->priv->min_pts = min_pts;
2651   encoder->priv->time_adjustment = GST_CLOCK_TIME_NONE;
2652 }
2653 
2654 /**
2655  * gst_video_encoder_get_max_encode_time:
2656  * @encoder: a #GstVideoEncoder
2657  * @frame: a #GstVideoCodecFrame
2658  *
2659  * Determines maximum possible encoding time for @frame that will
2660  * allow it to encode and arrive in time (as determined by QoS events).
2661  * In particular, a negative result means encoding in time is no longer possible
2662  * and should therefore occur as soon/skippy as possible.
2663  *
2664  * If no QoS events have been received from downstream, or if
2665  * #GstVideoEncoder:qos is disabled this function returns #G_MAXINT64.
2666  *
2667  * Returns: max decoding time.
2668  * Since: 1.14
2669  */
2670 GstClockTimeDiff
gst_video_encoder_get_max_encode_time(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)2671 gst_video_encoder_get_max_encode_time (GstVideoEncoder *
2672     encoder, GstVideoCodecFrame * frame)
2673 {
2674   GstClockTimeDiff deadline;
2675   GstClockTime earliest_time;
2676 
2677   if (!g_atomic_int_get (&encoder->priv->qos_enabled))
2678     return G_MAXINT64;
2679 
2680   GST_OBJECT_LOCK (encoder);
2681   earliest_time = encoder->priv->earliest_time;
2682   if (GST_CLOCK_TIME_IS_VALID (earliest_time)
2683       && GST_CLOCK_TIME_IS_VALID (frame->deadline))
2684     deadline = GST_CLOCK_DIFF (earliest_time, frame->deadline);
2685   else
2686     deadline = G_MAXINT64;
2687 
2688   GST_LOG_OBJECT (encoder, "earliest %" GST_TIME_FORMAT
2689       ", frame deadline %" GST_TIME_FORMAT ", deadline %" GST_STIME_FORMAT,
2690       GST_TIME_ARGS (earliest_time), GST_TIME_ARGS (frame->deadline),
2691       GST_STIME_ARGS (deadline));
2692 
2693   GST_OBJECT_UNLOCK (encoder);
2694 
2695   return deadline;
2696 }
2697 
2698 /**
2699  * gst_video_encoder_set_qos_enabled:
2700  * @encoder: the encoder
2701  * @enabled: the new qos value.
2702  *
2703  * Configures @encoder to handle Quality-of-Service events from downstream.
2704  * Since: 1.14
2705  */
2706 void
gst_video_encoder_set_qos_enabled(GstVideoEncoder * encoder,gboolean enabled)2707 gst_video_encoder_set_qos_enabled (GstVideoEncoder * encoder, gboolean enabled)
2708 {
2709   g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2710 
2711   g_atomic_int_set (&encoder->priv->qos_enabled, enabled);
2712 }
2713 
2714 /**
2715  * gst_video_encoder_is_qos_enabled:
2716  * @encoder: the encoder
2717  *
2718  * Checks if @encoder is currently configured to handle Quality-of-Service
2719  * events from downstream.
2720  *
2721  * Returns: %TRUE if the encoder is configured to perform Quality-of-Service.
2722  * Since: 1.14
2723  */
2724 gboolean
gst_video_encoder_is_qos_enabled(GstVideoEncoder * encoder)2725 gst_video_encoder_is_qos_enabled (GstVideoEncoder * encoder)
2726 {
2727   gboolean res;
2728 
2729   g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
2730 
2731   res = g_atomic_int_get (&encoder->priv->qos_enabled);
2732 
2733   return res;
2734 }
2735