• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* VP8
2  * Copyright (C) 2006 David Schleef <ds@schleef.org>
3  * Copyright (C) 2010 Entropy Wave Inc
4  * Copyright (C) 2010-2012 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public
17  * License along with this library; if not, write to the
18  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19  * Boston, MA 02110-1301, USA.
20  *
21  */
22 /**
23  * SECTION:element-vp8enc
24  * @title: vp8enc
25  * @see_also: vp8dec, webmmux, oggmux
26  *
27  * This element encodes raw video into a VP8 stream.
28  * [VP8](http://www.webmproject.org) is a royalty-free video codec maintained by
29  * [Google](http://www.google.com/). It's the successor of On2 VP3, which was
30  * the base of the Theora video codec.
31  *
32  * To control the quality of the encoding, the #GstVPXEnc:target-bitrate,
33  * #GstVPXEnc:min-quantizer, #GstVPXEnc:max-quantizer or #GstVPXEnc:cq-level
34  * properties can be used. Which one is used depends on the mode selected by
35  * the #GstVPXEnc:end-usage property.
36  * See [Encoder Parameters](http://www.webmproject.org/docs/encoder-parameters/)
37  * for explanation, examples for useful encoding parameters and more details
38  * on the encoding parameters.
39  *
40  * ## Example pipeline
41  * |[
42  * gst-launch-1.0 -v videotestsrc num-buffers=1000 ! vp8enc ! webmmux ! filesink location=videotestsrc.webm
43  * ]| This example pipeline will encode a test video source to VP8 muxed in an
44  * WebM container.
45  *
46  */
47 
48 #ifdef HAVE_CONFIG_H
49 #include "config.h"
50 #endif
51 
52 #ifdef HAVE_VP8_ENCODER
53 
54 /* glib decided in 2.32 it would be a great idea to deprecated GValueArray without
55  * providing an alternative
56  *
57  * See https://bugzilla.gnome.org/show_bug.cgi?id=667228
58  * */
59 #define GLIB_DISABLE_DEPRECATION_WARNINGS
60 
61 #include <gst/tag/tag.h>
62 #include <gst/video/video.h>
63 #include <string.h>
64 
65 #include "gstvpxelements.h"
66 #include "gstvp8utils.h"
67 #include "gstvp8enc.h"
68 
69 GST_DEBUG_CATEGORY_STATIC (gst_vp8enc_debug);
70 #define GST_CAT_DEFAULT gst_vp8enc_debug
71 
72 typedef struct
73 {
74   vpx_image_t *image;
75   GList *invisible;
76   guint layer_id;
77   guint8 tl0picidx;
78   gboolean layer_sync;
79 } GstVP8EncUserData;
80 
81 static void
_gst_mini_object_unref0(GstMiniObject * obj)82 _gst_mini_object_unref0 (GstMiniObject * obj)
83 {
84   if (obj)
85     gst_mini_object_unref (obj);
86 }
87 
88 static void
gst_vp8_enc_user_data_free(GstVP8EncUserData * user_data)89 gst_vp8_enc_user_data_free (GstVP8EncUserData * user_data)
90 {
91   if (user_data->image)
92     g_slice_free (vpx_image_t, user_data->image);
93 
94   g_list_foreach (user_data->invisible, (GFunc) _gst_mini_object_unref0, NULL);
95   g_list_free (user_data->invisible);
96   g_slice_free (GstVP8EncUserData, user_data);
97 }
98 
99 static vpx_codec_iface_t *gst_vp8_enc_get_algo (GstVPXEnc * enc);
100 static gboolean gst_vp8_enc_enable_scaling (GstVPXEnc * enc);
101 static void gst_vp8_enc_set_image_format (GstVPXEnc * enc, vpx_image_t * image);
102 static GstCaps *gst_vp8_enc_get_new_simple_caps (GstVPXEnc * enc);
103 static void gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps,
104     GstVideoInfo * info);
105 static void *gst_vp8_enc_process_frame_user_data (GstVPXEnc * enc,
106     GstVideoCodecFrame * frame);
107 static GstFlowReturn gst_vp8_enc_handle_invisible_frame_buffer (GstVPXEnc * enc,
108     void *user_data, GstBuffer * buffer);
109 static void gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc,
110     GstVideoCodecFrame * frame, vpx_image_t * image);
111 static void gst_vp8_enc_apply_frame_temporal_settings (GstVPXEnc * enc,
112     GstVideoCodecFrame * frame, guint layer_id, guint8 tl0picidx,
113     gboolean layer_sync);
114 static void gst_vp8_enc_get_frame_temporal_settings (GstVPXEnc * enc,
115     GstVideoCodecFrame * frame, guint * layer_id, guint8 * tl0picidx,
116     gboolean * layer_sync);
117 static void gst_vp8_enc_preflight_buffer (GstVPXEnc * enc,
118     GstVideoCodecFrame * frame, GstBuffer * buffer,
119     gboolean layer_sync, guint layer_id, guint8 tl0picidx);
120 
121 static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * encoder,
122     GstVideoCodecFrame * frame);
123 
124 static GstStaticPadTemplate gst_vp8_enc_sink_template =
125 GST_STATIC_PAD_TEMPLATE ("sink",
126     GST_PAD_SINK,
127     GST_PAD_ALWAYS,
128     GST_STATIC_CAPS ("video/x-raw, "
129         "format = (string) \"I420\", "
130         "width = (int) [1, 16383], "
131         "height = (int) [1, 16383], framerate = (fraction) [ 0/1, MAX ]")
132     );
133 
134 static GstStaticPadTemplate gst_vp8_enc_src_template =
135 GST_STATIC_PAD_TEMPLATE ("src",
136     GST_PAD_SRC,
137     GST_PAD_ALWAYS,
138     GST_STATIC_CAPS ("video/x-vp8, " "profile = (string) {0, 1, 2, 3}")
139     );
140 
141 #define parent_class gst_vp8_enc_parent_class
142 G_DEFINE_TYPE (GstVP8Enc, gst_vp8_enc, GST_TYPE_VPX_ENC);
143 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vp8enc, "vp8enc", GST_RANK_PRIMARY,
144     gst_vp8_enc_get_type (), vpx_element_init (plugin));
145 
146 static void
gst_vp8_enc_class_init(GstVP8EncClass * klass)147 gst_vp8_enc_class_init (GstVP8EncClass * klass)
148 {
149   GstElementClass *element_class;
150   GstVideoEncoderClass *video_encoder_class;
151   GstVPXEncClass *vpx_encoder_class;
152 
153   element_class = GST_ELEMENT_CLASS (klass);
154   video_encoder_class = GST_VIDEO_ENCODER_CLASS (klass);
155   vpx_encoder_class = GST_VPX_ENC_CLASS (klass);
156 
157 
158   gst_element_class_add_static_pad_template (element_class,
159       &gst_vp8_enc_src_template);
160   gst_element_class_add_static_pad_template (element_class,
161       &gst_vp8_enc_sink_template);
162 
163   gst_element_class_set_static_metadata (element_class,
164       "On2 VP8 Encoder",
165       "Codec/Encoder/Video",
166       "Encode VP8 video streams", "David Schleef <ds@entropywave.com>, "
167       "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
168 
169   video_encoder_class->pre_push = gst_vp8_enc_pre_push;
170 
171   vpx_encoder_class->get_algo = gst_vp8_enc_get_algo;
172   vpx_encoder_class->enable_scaling = gst_vp8_enc_enable_scaling;
173   vpx_encoder_class->set_image_format = gst_vp8_enc_set_image_format;
174   vpx_encoder_class->get_new_vpx_caps = gst_vp8_enc_get_new_simple_caps;
175   vpx_encoder_class->set_stream_info = gst_vp8_enc_set_stream_info;
176   vpx_encoder_class->process_frame_user_data =
177       gst_vp8_enc_process_frame_user_data;
178   vpx_encoder_class->handle_invisible_frame_buffer =
179       gst_vp8_enc_handle_invisible_frame_buffer;
180   vpx_encoder_class->set_frame_user_data = gst_vp8_enc_set_frame_user_data;
181   vpx_encoder_class->apply_frame_temporal_settings =
182       gst_vp8_enc_apply_frame_temporal_settings;
183   vpx_encoder_class->get_frame_temporal_settings =
184       gst_vp8_enc_get_frame_temporal_settings;
185   vpx_encoder_class->preflight_buffer = gst_vp8_enc_preflight_buffer;
186 
187   GST_DEBUG_CATEGORY_INIT (gst_vp8enc_debug, "vp8enc", 0, "VP8 Encoder");
188 }
189 
190 static void
gst_vp8_enc_init(GstVP8Enc * gst_vp8_enc)191 gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc)
192 {
193   vpx_codec_err_t status;
194   GstVPXEnc *gst_vpx_enc = GST_VPX_ENC (gst_vp8_enc);
195   GST_DEBUG_OBJECT (gst_vp8_enc, "gst_vp8_enc_init");
196   status =
197       vpx_codec_enc_config_default (gst_vp8_enc_get_algo (gst_vpx_enc),
198       &gst_vpx_enc->cfg, 0);
199   if (status != VPX_CODEC_OK) {
200     GST_ERROR_OBJECT (gst_vpx_enc,
201         "Failed to get default encoder configuration: %s",
202         gst_vpx_error_name (status));
203     gst_vpx_enc->have_default_config = FALSE;
204   } else {
205     gst_vpx_enc->have_default_config = TRUE;
206   }
207 }
208 
209 static vpx_codec_iface_t *
gst_vp8_enc_get_algo(GstVPXEnc * enc)210 gst_vp8_enc_get_algo (GstVPXEnc * enc)
211 {
212   return &vpx_codec_vp8_cx_algo;
213 }
214 
215 static gboolean
gst_vp8_enc_enable_scaling(GstVPXEnc * enc)216 gst_vp8_enc_enable_scaling (GstVPXEnc * enc)
217 {
218   return TRUE;
219 }
220 
221 static void
gst_vp8_enc_set_image_format(GstVPXEnc * enc,vpx_image_t * image)222 gst_vp8_enc_set_image_format (GstVPXEnc * enc, vpx_image_t * image)
223 {
224   image->fmt = VPX_IMG_FMT_I420;
225   image->bps = 12;
226   image->x_chroma_shift = image->y_chroma_shift = 1;
227 }
228 
229 static GstCaps *
gst_vp8_enc_get_new_simple_caps(GstVPXEnc * enc)230 gst_vp8_enc_get_new_simple_caps (GstVPXEnc * enc)
231 {
232   GstCaps *caps;
233   gchar *profile_str = g_strdup_printf ("%d", enc->cfg.g_profile);
234   caps = gst_caps_new_simple ("video/x-vp8",
235       "profile", G_TYPE_STRING, profile_str, NULL);
236   g_free (profile_str);
237   return caps;
238 }
239 
240 static void
gst_vp8_enc_set_stream_info(GstVPXEnc * enc,GstCaps * caps,GstVideoInfo * info)241 gst_vp8_enc_set_stream_info (GstVPXEnc * enc, GstCaps * caps,
242     GstVideoInfo * info)
243 {
244   GstStructure *s;
245   GstVideoEncoder *video_encoder;
246   GstBuffer *stream_hdr, *vorbiscomment;
247   const GstTagList *iface_tags;
248   GValue array = { 0, };
249   GValue value = { 0, };
250   guint8 *data = NULL;
251   GstMapInfo map;
252 
253   video_encoder = GST_VIDEO_ENCODER (enc);
254   s = gst_caps_get_structure (caps, 0);
255 
256   /* put buffers in a fixed list */
257   g_value_init (&array, GST_TYPE_ARRAY);
258   g_value_init (&value, GST_TYPE_BUFFER);
259 
260   /* Create Ogg stream-info */
261   stream_hdr = gst_buffer_new_and_alloc (26);
262   gst_buffer_map (stream_hdr, &map, GST_MAP_WRITE);
263   data = map.data;
264 
265   GST_WRITE_UINT8 (data, 0x4F);
266   GST_WRITE_UINT32_BE (data + 1, 0x56503830);   /* "VP80" */
267   GST_WRITE_UINT8 (data + 5, 0x01);     /* stream info header */
268   GST_WRITE_UINT8 (data + 6, 1);        /* Major version 1 */
269   GST_WRITE_UINT8 (data + 7, 0);        /* Minor version 0 */
270   GST_WRITE_UINT16_BE (data + 8, GST_VIDEO_INFO_WIDTH (info));
271   GST_WRITE_UINT16_BE (data + 10, GST_VIDEO_INFO_HEIGHT (info));
272   GST_WRITE_UINT24_BE (data + 12, GST_VIDEO_INFO_PAR_N (info));
273   GST_WRITE_UINT24_BE (data + 15, GST_VIDEO_INFO_PAR_D (info));
274   GST_WRITE_UINT32_BE (data + 18, GST_VIDEO_INFO_FPS_N (info));
275   GST_WRITE_UINT32_BE (data + 22, GST_VIDEO_INFO_FPS_D (info));
276 
277   gst_buffer_unmap (stream_hdr, &map);
278 
279   GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_HEADER);
280   gst_value_set_buffer (&value, stream_hdr);
281   gst_value_array_append_value (&array, &value);
282   g_value_unset (&value);
283   gst_buffer_unref (stream_hdr);
284 
285   iface_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (video_encoder));
286   if (iface_tags) {
287     vorbiscomment =
288         gst_tag_list_to_vorbiscomment_buffer (iface_tags,
289         (const guint8 *) "OVP80\2 ", 7,
290         "Encoded with GStreamer vp8enc " PACKAGE_VERSION);
291 
292     GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_HEADER);
293 
294     g_value_init (&value, GST_TYPE_BUFFER);
295     gst_value_set_buffer (&value, vorbiscomment);
296     gst_value_array_append_value (&array, &value);
297     g_value_unset (&value);
298     gst_buffer_unref (vorbiscomment);
299   }
300 
301   gst_structure_set_value (s, "streamheader", &array);
302   g_value_unset (&array);
303 
304 }
305 
306 static void *
gst_vp8_enc_process_frame_user_data(GstVPXEnc * enc,GstVideoCodecFrame * frame)307 gst_vp8_enc_process_frame_user_data (GstVPXEnc * enc,
308     GstVideoCodecFrame * frame)
309 {
310   GstVP8EncUserData *user_data;
311 
312   user_data = gst_video_codec_frame_get_user_data (frame);
313 
314   if (!user_data) {
315     GST_ERROR_OBJECT (enc, "Have no frame user data");
316     return NULL;
317   }
318 
319   if (user_data->image)
320     g_slice_free (vpx_image_t, user_data->image);
321   user_data->image = NULL;
322   return user_data;
323 }
324 
325 static GstFlowReturn
gst_vp8_enc_handle_invisible_frame_buffer(GstVPXEnc * enc,void * user_data,GstBuffer * buffer)326 gst_vp8_enc_handle_invisible_frame_buffer (GstVPXEnc * enc, void *user_data,
327     GstBuffer * buffer)
328 {
329   GstVP8EncUserData *vp8_user_data = (GstVP8EncUserData *) user_data;
330 
331   if (!vp8_user_data) {
332     GST_ERROR_OBJECT (enc, "Have no frame user data");
333     return GST_FLOW_ERROR;
334   }
335 
336   vp8_user_data->invisible = g_list_append (vp8_user_data->invisible, buffer);
337 
338   return GST_FLOW_OK;
339 }
340 
341 static void
gst_vp8_enc_set_frame_user_data(GstVPXEnc * enc,GstVideoCodecFrame * frame,vpx_image_t * image)342 gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc, GstVideoCodecFrame * frame,
343     vpx_image_t * image)
344 {
345   GstVP8EncUserData *user_data;
346   user_data = g_slice_new0 (GstVP8EncUserData);
347   user_data->image = image;
348   gst_video_codec_frame_set_user_data (frame, user_data,
349       (GDestroyNotify) gst_vp8_enc_user_data_free);
350   return;
351 }
352 
353 static void
gst_vp8_enc_apply_frame_temporal_settings(GstVPXEnc * enc,GstVideoCodecFrame * frame,guint layer_id,guint8 tl0picidx,gboolean layer_sync)354 gst_vp8_enc_apply_frame_temporal_settings (GstVPXEnc * enc,
355     GstVideoCodecFrame * frame, guint layer_id, guint8 tl0picidx,
356     gboolean layer_sync)
357 {
358   GstVP8EncUserData *user_data;
359 
360   user_data = gst_video_codec_frame_get_user_data (frame);
361 
362   if (!user_data) {
363     GST_ERROR_OBJECT (enc, "Have no frame user data");
364     return;
365   }
366 
367   vpx_codec_control (&enc->encoder, VP8E_SET_TEMPORAL_LAYER_ID, layer_id);
368   user_data->layer_id = layer_id;
369   user_data->tl0picidx = tl0picidx;
370   user_data->layer_sync = layer_sync;
371 
372   return;
373 }
374 
375 static void
gst_vp8_enc_get_frame_temporal_settings(GstVPXEnc * enc,GstVideoCodecFrame * frame,guint * layer_id,guint8 * tl0picidx,gboolean * layer_sync)376 gst_vp8_enc_get_frame_temporal_settings (GstVPXEnc * enc,
377     GstVideoCodecFrame * frame, guint * layer_id, guint8 * tl0picidx,
378     gboolean * layer_sync)
379 {
380   GstVP8EncUserData *user_data;
381 
382   user_data = gst_video_codec_frame_get_user_data (frame);
383 
384   if (!user_data) {
385     GST_ERROR_OBJECT (enc, "Have no frame user data");
386     *layer_id = 0;
387     *tl0picidx = 0;
388     *layer_sync = FALSE;
389     return;
390   }
391 
392   *layer_id = user_data->layer_id;
393   *tl0picidx = user_data->tl0picidx;
394   *layer_sync = user_data->layer_sync;
395 
396   return;
397 }
398 
399 static void
gst_vp8_enc_preflight_buffer(GstVPXEnc * enc,GstVideoCodecFrame * frame,GstBuffer * buffer,gboolean layer_sync,guint layer_id,guint8 tl0picidx)400 gst_vp8_enc_preflight_buffer (GstVPXEnc * enc,
401     GstVideoCodecFrame * frame, GstBuffer * buffer,
402     gboolean layer_sync, guint layer_id, guint8 tl0picidx)
403 {
404   GstCustomMeta *meta = gst_buffer_add_custom_meta (buffer, "GstVP8Meta");
405   GstStructure *s = gst_custom_meta_get_structure (meta);
406 
407   gst_structure_set (s,
408       "use-temporal-scaling", G_TYPE_BOOLEAN, (enc->cfg.ts_periodicity != 0),
409       "layer-sync", G_TYPE_BOOLEAN, layer_sync,
410       "layer-id", G_TYPE_UINT, layer_id,
411       "tl0picidx", G_TYPE_UINT, tl0picidx, NULL);
412 }
413 
414 static guint64
_to_granulepos(guint64 frame_end_number,guint inv_count,guint keyframe_dist)415 _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist)
416 {
417   guint64 granulepos;
418   guint inv;
419 
420   inv = (inv_count == 0) ? 0x3 : inv_count - 1;
421 
422   granulepos = (frame_end_number << 32) | (inv << 30) | (keyframe_dist << 3);
423   return granulepos;
424 }
425 
426 static GstFlowReturn
gst_vp8_enc_pre_push(GstVideoEncoder * video_encoder,GstVideoCodecFrame * frame)427 gst_vp8_enc_pre_push (GstVideoEncoder * video_encoder,
428     GstVideoCodecFrame * frame)
429 {
430   GstVP8Enc *encoder;
431   GstVPXEnc *vpx_enc;
432   GstBuffer *buf;
433   GstFlowReturn ret = GST_FLOW_OK;
434   GstVP8EncUserData *user_data = gst_video_codec_frame_get_user_data (frame);
435   GList *l;
436   gint inv_count;
437   GstVideoInfo *info;
438 
439   GST_DEBUG_OBJECT (video_encoder, "pre_push");
440 
441   encoder = GST_VP8_ENC (video_encoder);
442   vpx_enc = GST_VPX_ENC (encoder);
443 
444   info = &vpx_enc->input_state->info;
445 
446   g_assert (user_data != NULL);
447 
448   for (inv_count = 0, l = user_data->invisible; l; inv_count++, l = l->next) {
449     buf = l->data;
450     l->data = NULL;
451 
452     /* FIXME : All of this should have already been handled by base classes, no ? */
453     if (l == user_data->invisible
454         && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
455       GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
456       encoder->keyframe_distance = 0;
457     } else {
458       GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
459       encoder->keyframe_distance++;
460     }
461 
462     GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY);
463     GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->output_buffer);
464     GST_BUFFER_DURATION (buf) = 0;
465     if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) {
466       GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
467       GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
468     } else {
469       GST_BUFFER_OFFSET_END (buf) =
470           _to_granulepos (frame->presentation_frame_number + 1,
471           inv_count, encoder->keyframe_distance);
472       GST_BUFFER_OFFSET (buf) =
473           gst_util_uint64_scale (frame->presentation_frame_number + 1,
474           GST_SECOND * GST_VIDEO_INFO_FPS_D (info),
475           GST_VIDEO_INFO_FPS_N (info));
476     }
477 
478     ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (video_encoder), buf);
479 
480     if (ret != GST_FLOW_OK) {
481       GST_WARNING_OBJECT (encoder, "flow error %d", ret);
482       goto done;
483     }
484   }
485 
486   buf = frame->output_buffer;
487 
488   /* FIXME : All of this should have already been handled by base classes, no ? */
489   if (!user_data->invisible && GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
490     GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
491     encoder->keyframe_distance = 0;
492   } else {
493     GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
494     encoder->keyframe_distance++;
495   }
496 
497   if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) {
498     GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
499     GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
500   } else {
501     GST_BUFFER_OFFSET_END (buf) =
502         _to_granulepos (frame->presentation_frame_number + 1, 0,
503         encoder->keyframe_distance);
504     GST_BUFFER_OFFSET (buf) =
505         gst_util_uint64_scale (frame->presentation_frame_number + 1,
506         GST_SECOND * GST_VIDEO_INFO_FPS_D (info), GST_VIDEO_INFO_FPS_N (info));
507   }
508 
509   GST_LOG_OBJECT (video_encoder, "src ts: %" GST_TIME_FORMAT,
510       GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
511 
512 done:
513   return ret;
514 }
515 
516 #endif /* HAVE_VP8_ENCODER */
517