• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Small helper element for format conversion
2  * Copyright (C) 2005 Tim-Philipp Müller <tim centricular net>
3  * Copyright (C) 2010 Brandon Lewis <brandon.lewis@collabora.co.uk>
4  * Copyright (C) 2010 Edward Hervey <edward.hervey@collabora.co.uk>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public
17  * License along with this library; if not, write to the
18  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19  * Boston, MA 02110-1301, USA.
20  */
21 #ifdef HAVE_CONFIG_H
22 #include "config.h"
23 #endif
24 
25 #include <string.h>
26 #include "video.h"
27 
28 static gboolean
caps_are_raw(const GstCaps * caps)29 caps_are_raw (const GstCaps * caps)
30 {
31   guint i, len;
32 
33   len = gst_caps_get_size (caps);
34 
35   for (i = 0; i < len; i++) {
36     GstStructure *st = gst_caps_get_structure (caps, i);
37     if (gst_structure_has_name (st, "video/x-raw"))
38       return TRUE;
39   }
40 
41   return FALSE;
42 }
43 
44 static gboolean
create_element(const gchar * factory_name,GstElement ** element,GError ** err)45 create_element (const gchar * factory_name, GstElement ** element,
46     GError ** err)
47 {
48   *element = gst_element_factory_make (factory_name, NULL);
49   if (*element)
50     return TRUE;
51 
52   if (err && *err == NULL) {
53     *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN,
54         "cannot create element '%s' - please check your GStreamer installation",
55         factory_name);
56   }
57 
58   return FALSE;
59 }
60 
61 static GstElement *
get_encoder(const GstCaps * caps,GError ** err)62 get_encoder (const GstCaps * caps, GError ** err)
63 {
64   GList *encoders = NULL;
65   GList *filtered = NULL;
66   GstElementFactory *factory = NULL;
67   GstElement *encoder = NULL;
68 
69   encoders =
70       gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_ENCODER |
71       GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE, GST_RANK_NONE);
72 
73   if (encoders == NULL) {
74     *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN,
75         "Cannot find any image encoder");
76     goto fail;
77   }
78 
79   GST_INFO ("got factory list %p", encoders);
80   gst_plugin_feature_list_debug (encoders);
81 
82   filtered =
83       gst_element_factory_list_filter (encoders, caps, GST_PAD_SRC, FALSE);
84   GST_INFO ("got filtered list %p", filtered);
85 
86   if (filtered == NULL) {
87     gchar *tmp = gst_caps_to_string (caps);
88     *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN,
89         "Cannot find any image encoder for caps %s", tmp);
90     g_free (tmp);
91     goto fail;
92   }
93 
94   gst_plugin_feature_list_debug (filtered);
95 
96   factory = (GstElementFactory *) filtered->data;
97 
98   GST_INFO ("got factory %p", factory);
99   encoder = gst_element_factory_create (factory, NULL);
100 
101   GST_INFO ("created encoder element %p, %s", encoder,
102       GST_ELEMENT_NAME (encoder));
103 
104 fail:
105   if (encoders)
106     gst_plugin_feature_list_free (encoders);
107   if (filtered)
108     gst_plugin_feature_list_free (filtered);
109 
110   return encoder;
111 }
112 
113 static GstElement *
build_convert_frame_pipeline(GstElement ** src_element,GstElement ** sink_element,const GstCaps * from_caps,GstVideoCropMeta * cmeta,const GstCaps * to_caps,GError ** err)114 build_convert_frame_pipeline (GstElement ** src_element,
115     GstElement ** sink_element, const GstCaps * from_caps,
116     GstVideoCropMeta * cmeta, const GstCaps * to_caps, GError ** err)
117 {
118   GstElement *vcrop = NULL, *csp = NULL, *csp2 = NULL, *vscale = NULL;
119   GstElement *src = NULL, *sink = NULL, *encoder = NULL, *pipeline;
120   GstVideoInfo info;
121   GError *error = NULL;
122 
123   if (cmeta) {
124     if (!create_element ("videocrop", &vcrop, &error)) {
125       g_error_free (error);
126       g_warning
127           ("build_convert_frame_pipeline: Buffer has crop metadata but videocrop element is not found. Cropping will be disabled");
128     } else {
129       if (!create_element ("videoconvert", &csp2, &error))
130         goto no_elements;
131     }
132   }
133 
134   /* videoscale is here to correct for the pixel-aspect-ratio for us */
135   GST_DEBUG ("creating elements");
136   if (!create_element ("appsrc", &src, &error) ||
137       !create_element ("videoconvert", &csp, &error) ||
138       !create_element ("videoscale", &vscale, &error) ||
139       !create_element ("appsink", &sink, &error))
140     goto no_elements;
141 
142   pipeline = gst_pipeline_new ("videoconvert-pipeline");
143   if (pipeline == NULL)
144     goto no_pipeline;
145 
146   /* Add black borders if necessary to keep the DAR */
147   g_object_set (vscale, "add-borders", TRUE, NULL);
148 
149   GST_DEBUG ("adding elements");
150   gst_bin_add_many (GST_BIN (pipeline), src, csp, vscale, sink, NULL);
151   if (vcrop)
152     gst_bin_add_many (GST_BIN (pipeline), vcrop, csp2, NULL);
153 
154   /* set caps */
155   g_object_set (src, "caps", from_caps, NULL);
156   if (vcrop) {
157     gst_video_info_from_caps (&info, from_caps);
158     g_object_set (vcrop, "left", cmeta->x, NULL);
159     g_object_set (vcrop, "top", cmeta->y, NULL);
160     g_object_set (vcrop, "right", GST_VIDEO_INFO_WIDTH (&info) - cmeta->width,
161         NULL);
162     g_object_set (vcrop, "bottom",
163         GST_VIDEO_INFO_HEIGHT (&info) - cmeta->height, NULL);
164     GST_DEBUG ("crop meta [x,y,width,height]: %d %d %d %d", cmeta->x, cmeta->y,
165         cmeta->width, cmeta->height);
166   }
167   g_object_set (sink, "caps", to_caps, NULL);
168 
169   /* FIXME: linking is still way too expensive, profile this properly */
170   if (vcrop) {
171     GST_DEBUG ("linking src->csp2");
172     if (!gst_element_link_pads (src, "src", csp2, "sink"))
173       goto link_failed;
174 
175     GST_DEBUG ("linking csp2->vcrop");
176     if (!gst_element_link_pads (csp2, "src", vcrop, "sink"))
177       goto link_failed;
178 
179     GST_DEBUG ("linking vcrop->csp");
180     if (!gst_element_link_pads (vcrop, "src", csp, "sink"))
181       goto link_failed;
182   } else {
183     GST_DEBUG ("linking src->csp");
184     if (!gst_element_link_pads (src, "src", csp, "sink"))
185       goto link_failed;
186   }
187 
188   GST_DEBUG ("linking csp->vscale");
189   if (!gst_element_link_pads_full (csp, "src", vscale, "sink",
190           GST_PAD_LINK_CHECK_NOTHING))
191     goto link_failed;
192 
193   if (caps_are_raw (to_caps)) {
194     GST_DEBUG ("linking vscale->sink");
195 
196     if (!gst_element_link_pads_full (vscale, "src", sink, "sink",
197             GST_PAD_LINK_CHECK_NOTHING))
198       goto link_failed;
199   } else {
200     encoder = get_encoder (to_caps, &error);
201     if (!encoder)
202       goto no_encoder;
203     gst_bin_add (GST_BIN (pipeline), encoder);
204 
205     GST_DEBUG ("linking vscale->encoder");
206     if (!gst_element_link (vscale, encoder))
207       goto link_failed;
208 
209     GST_DEBUG ("linking encoder->sink");
210     if (!gst_element_link_pads (encoder, "src", sink, "sink"))
211       goto link_failed;
212   }
213 
214   g_object_set (src, "emit-signals", TRUE, NULL);
215   g_object_set (sink, "emit-signals", TRUE, NULL);
216 
217   *src_element = src;
218   *sink_element = sink;
219 
220   return pipeline;
221   /* ERRORS */
222 no_encoder:
223   {
224     gst_object_unref (pipeline);
225 
226     GST_ERROR ("could not find an encoder for provided caps");
227     if (err)
228       *err = error;
229     else
230       g_error_free (error);
231 
232     return NULL;
233   }
234 no_elements:
235   {
236     if (src)
237       gst_object_unref (src);
238     if (vcrop)
239       gst_object_unref (vcrop);
240     if (csp)
241       gst_object_unref (csp);
242     if (csp2)
243       gst_object_unref (csp2);
244     if (vscale)
245       gst_object_unref (vscale);
246     if (sink)
247       gst_object_unref (sink);
248     GST_ERROR ("Could not convert video frame: %s", error->message);
249     if (err)
250       *err = error;
251     else
252       g_error_free (error);
253     return NULL;
254   }
255 no_pipeline:
256   {
257     gst_object_unref (src);
258     if (vcrop)
259       gst_object_unref (vcrop);
260     gst_object_unref (csp);
261     if (csp2)
262       gst_object_unref (csp2);
263     gst_object_unref (vscale);
264     gst_object_unref (sink);
265 
266     GST_ERROR ("Could not convert video frame: no pipeline (unknown error)");
267     if (err)
268       *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
269           "Could not convert video frame: no pipeline (unknown error)");
270     return NULL;
271   }
272 link_failed:
273   {
274     gst_object_unref (pipeline);
275 
276     GST_ERROR ("Could not convert video frame: failed to link elements");
277     if (err)
278       *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION,
279           "Could not convert video frame: failed to link elements");
280     return NULL;
281   }
282 }
283 
284 /**
285  * gst_video_convert_sample:
286  * @sample: a #GstSample
287  * @to_caps: the #GstCaps to convert to
288  * @timeout: the maximum amount of time allowed for the processing.
289  * @error: pointer to a #GError. Can be %NULL.
290  *
291  * Converts a raw video buffer into the specified output caps.
292  *
293  * The output caps can be any raw video formats or any image formats (jpeg, png, ...).
294  *
295  * The width, height and pixel-aspect-ratio can also be specified in the output caps.
296  *
297  * Returns: The converted #GstSample, or %NULL if an error happened (in which case @err
298  * will point to the #GError).
299  */
300 GstSample *
gst_video_convert_sample(GstSample * sample,const GstCaps * to_caps,GstClockTime timeout,GError ** error)301 gst_video_convert_sample (GstSample * sample, const GstCaps * to_caps,
302     GstClockTime timeout, GError ** error)
303 {
304   GstMessage *msg;
305   GstBuffer *buf;
306   GstSample *result = NULL;
307   GError *err = NULL;
308   GstBus *bus;
309   GstCaps *from_caps, *to_caps_copy = NULL;
310   GstFlowReturn ret;
311   GstElement *pipeline, *src, *sink;
312   guint i, n;
313 
314   g_return_val_if_fail (sample != NULL, NULL);
315   g_return_val_if_fail (to_caps != NULL, NULL);
316 
317   buf = gst_sample_get_buffer (sample);
318   g_return_val_if_fail (buf != NULL, NULL);
319 
320   from_caps = gst_sample_get_caps (sample);
321   g_return_val_if_fail (from_caps != NULL, NULL);
322 
323   to_caps_copy = gst_caps_new_empty ();
324   n = gst_caps_get_size (to_caps);
325   for (i = 0; i < n; i++) {
326     GstStructure *s = gst_caps_get_structure (to_caps, i);
327 
328     s = gst_structure_copy (s);
329     gst_structure_remove_field (s, "framerate");
330     gst_caps_append_structure (to_caps_copy, s);
331   }
332 
333   pipeline =
334       build_convert_frame_pipeline (&src, &sink, from_caps,
335       gst_buffer_get_video_crop_meta (buf), to_caps_copy, &err);
336   if (!pipeline)
337     goto no_pipeline;
338 
339   /* now set the pipeline to the paused state, after we push the buffer into
340    * appsrc, this should preroll the converted buffer in appsink */
341   GST_DEBUG ("running conversion pipeline to caps %" GST_PTR_FORMAT,
342       to_caps_copy);
343   if (gst_element_set_state (pipeline,
344           GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
345     goto state_change_failed;
346 
347   /* feed buffer in appsrc */
348   GST_DEBUG ("feeding buffer %p, size %" G_GSIZE_FORMAT ", caps %"
349       GST_PTR_FORMAT, buf, gst_buffer_get_size (buf), from_caps);
350   g_signal_emit_by_name (src, "push-buffer", buf, &ret);
351 
352   /* now see what happens. We either got an error somewhere or the pipeline
353    * prerolled */
354   bus = gst_element_get_bus (pipeline);
355   msg = gst_bus_timed_pop_filtered (bus,
356       timeout, GST_MESSAGE_ERROR | GST_MESSAGE_ASYNC_DONE);
357 
358   if (msg) {
359     switch (GST_MESSAGE_TYPE (msg)) {
360       case GST_MESSAGE_ASYNC_DONE:
361       {
362         /* we're prerolled, get the frame from appsink */
363         g_signal_emit_by_name (sink, "pull-preroll", &result);
364 
365         if (result) {
366           GST_DEBUG ("conversion successful: result = %p", result);
367         } else {
368           GST_ERROR ("prerolled but no result frame?!");
369         }
370         break;
371       }
372       case GST_MESSAGE_ERROR:{
373         gchar *dbg = NULL;
374 
375         gst_message_parse_error (msg, &err, &dbg);
376         if (err) {
377           GST_ERROR ("Could not convert video frame: %s", err->message);
378           GST_DEBUG ("%s [debug: %s]", err->message, GST_STR_NULL (dbg));
379           if (error)
380             *error = err;
381           else
382             g_error_free (err);
383         }
384         g_free (dbg);
385         break;
386       }
387       default:{
388         g_return_val_if_reached (NULL);
389       }
390     }
391     gst_message_unref (msg);
392   } else {
393     GST_ERROR ("Could not convert video frame: timeout during conversion");
394     if (error)
395       *error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
396           "Could not convert video frame: timeout during conversion");
397   }
398 
399   gst_element_set_state (pipeline, GST_STATE_NULL);
400   gst_object_unref (bus);
401   gst_object_unref (pipeline);
402   gst_caps_unref (to_caps_copy);
403 
404   return result;
405 
406   /* ERRORS */
407 no_pipeline:
408 state_change_failed:
409   {
410     gst_caps_unref (to_caps_copy);
411 
412     if (error)
413       *error = err;
414     else
415       g_error_free (err);
416 
417     return NULL;
418   }
419 }
420 
421 typedef struct
422 {
423   gint ref_count;
424   GMutex mutex;
425   GstElement *pipeline;
426   GstVideoConvertSampleCallback callback;
427   gpointer user_data;
428   GDestroyNotify destroy_notify;
429   GMainContext *context;
430   GstSample *sample;
431   GSource *timeout_source;
432   gboolean finished;
433 
434   /* Results */
435   GstSample *converted_sample;
436   GError *error;
437 } GstVideoConvertSampleContext;
438 
439 static GstVideoConvertSampleContext *
gst_video_convert_frame_context_ref(GstVideoConvertSampleContext * ctx)440 gst_video_convert_frame_context_ref (GstVideoConvertSampleContext * ctx)
441 {
442   g_atomic_int_inc (&ctx->ref_count);
443 
444   return ctx;
445 }
446 
447 static void
gst_video_convert_frame_context_unref(GstVideoConvertSampleContext * ctx)448 gst_video_convert_frame_context_unref (GstVideoConvertSampleContext * ctx)
449 {
450   if (!g_atomic_int_dec_and_test (&ctx->ref_count))
451     return;
452 
453   g_mutex_clear (&ctx->mutex);
454   if (ctx->timeout_source)
455     g_source_destroy (ctx->timeout_source);
456   if (ctx->sample)
457     gst_sample_unref (ctx->sample);
458   if (ctx->converted_sample)
459     gst_sample_unref (ctx->converted_sample);
460   g_clear_error (&ctx->error);
461   g_main_context_unref (ctx->context);
462 
463   /* The pipeline was already destroyed in finish() earlier and we
464    * must not end up here without finish() being called */
465   g_warn_if_fail (ctx->pipeline == NULL);
466 
467   g_slice_free (GstVideoConvertSampleContext, ctx);
468 }
469 
470 static gboolean
convert_frame_dispatch_callback(GstVideoConvertSampleContext * ctx)471 convert_frame_dispatch_callback (GstVideoConvertSampleContext * ctx)
472 {
473   GstSample *sample;
474   GError *error;
475 
476   g_return_val_if_fail (ctx->converted_sample != NULL
477       || ctx->error != NULL, FALSE);
478 
479   sample = ctx->converted_sample;
480   error = ctx->error;
481   ctx->converted_sample = NULL;
482   ctx->error = NULL;
483 
484   ctx->callback (sample, error, ctx->user_data);
485 
486   if (ctx->destroy_notify)
487     ctx->destroy_notify (ctx->user_data);
488 
489   return FALSE;
490 }
491 
492 static void
convert_frame_stop_pipeline(GstElement * element,gpointer user_data)493 convert_frame_stop_pipeline (GstElement * element, gpointer user_data)
494 {
495   gst_element_set_state (element, GST_STATE_NULL);
496 }
497 
498 static void
convert_frame_finish(GstVideoConvertSampleContext * context,GstSample * sample,GError * error)499 convert_frame_finish (GstVideoConvertSampleContext * context,
500     GstSample * sample, GError * error)
501 {
502   GSource *source;
503 
504   g_return_if_fail (!context->finished);
505   g_return_if_fail (sample != NULL || error != NULL);
506 
507   context->finished = TRUE;
508   context->converted_sample = sample;
509   context->error = error;
510 
511   if (context->timeout_source)
512     g_source_destroy (context->timeout_source);
513   context->timeout_source = NULL;
514 
515   source = g_timeout_source_new (0);
516   g_source_set_callback (source,
517       (GSourceFunc) convert_frame_dispatch_callback,
518       gst_video_convert_frame_context_ref (context),
519       (GDestroyNotify) gst_video_convert_frame_context_unref);
520   g_source_attach (source, context->context);
521   g_source_unref (source);
522 
523   /* Asynchronously stop the pipeline here: this will set its
524    * state to NULL and get rid of its last reference, which in turn
525    * will get rid of all remaining references to our context and free
526    * it too. We can't do this directly here as we might be called from
527    * a streaming thread.
528    *
529    * We don't use the main loop here because the user might shut down it
530    * immediately after getting the result of the conversion above.
531    */
532   if (context->pipeline) {
533     gst_element_call_async (context->pipeline, convert_frame_stop_pipeline,
534         NULL, NULL);
535     gst_object_unref (context->pipeline);
536     context->pipeline = NULL;
537   }
538 }
539 
540 static gboolean
convert_frame_timeout_callback(GstVideoConvertSampleContext * context)541 convert_frame_timeout_callback (GstVideoConvertSampleContext * context)
542 {
543   GError *error;
544 
545   g_mutex_lock (&context->mutex);
546 
547   if (context->finished)
548     goto done;
549 
550   GST_ERROR ("Could not convert video frame: timeout");
551 
552   error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
553       "Could not convert video frame: timeout");
554 
555   convert_frame_finish (context, NULL, error);
556 
557 done:
558   g_mutex_unlock (&context->mutex);
559   return FALSE;
560 }
561 
562 static gboolean
convert_frame_bus_callback(GstBus * bus,GstMessage * message,GstVideoConvertSampleContext * context)563 convert_frame_bus_callback (GstBus * bus, GstMessage * message,
564     GstVideoConvertSampleContext * context)
565 {
566   g_mutex_lock (&context->mutex);
567 
568   if (context->finished)
569     goto done;
570 
571   switch (GST_MESSAGE_TYPE (message)) {
572     case GST_MESSAGE_ERROR:{
573       GError *error;
574       gchar *dbg = NULL;
575 
576       gst_message_parse_error (message, &error, &dbg);
577 
578       GST_ERROR ("Could not convert video frame: %s", error->message);
579       GST_DEBUG ("%s [debug: %s]", error->message, GST_STR_NULL (dbg));
580 
581       convert_frame_finish (context, NULL, error);
582 
583       g_free (dbg);
584       break;
585     }
586     default:
587       break;
588   }
589 
590 done:
591   g_mutex_unlock (&context->mutex);
592 
593   return FALSE;
594 }
595 
596 static void
convert_frame_need_data_callback(GstElement * src,guint size,GstVideoConvertSampleContext * context)597 convert_frame_need_data_callback (GstElement * src, guint size,
598     GstVideoConvertSampleContext * context)
599 {
600   GstFlowReturn ret = GST_FLOW_ERROR;
601   GError *error;
602   GstBuffer *buffer;
603 
604   g_mutex_lock (&context->mutex);
605 
606   if (context->finished)
607     goto done;
608 
609   buffer = gst_sample_get_buffer (context->sample);
610   g_signal_emit_by_name (src, "push-buffer", buffer, &ret);
611   gst_sample_unref (context->sample);
612   context->sample = NULL;
613 
614   if (ret != GST_FLOW_OK) {
615     GST_ERROR ("Could not push video frame: %s", gst_flow_get_name (ret));
616 
617     error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
618         "Could not push video frame: %s", gst_flow_get_name (ret));
619 
620     convert_frame_finish (context, NULL, error);
621   }
622 
623 done:
624   g_mutex_unlock (&context->mutex);
625 
626   g_signal_handlers_disconnect_by_func (src, convert_frame_need_data_callback,
627       context);
628 }
629 
630 static GstFlowReturn
convert_frame_new_preroll_callback(GstElement * sink,GstVideoConvertSampleContext * context)631 convert_frame_new_preroll_callback (GstElement * sink,
632     GstVideoConvertSampleContext * context)
633 {
634   GstSample *sample = NULL;
635   GError *error = NULL;
636 
637   g_mutex_lock (&context->mutex);
638 
639   if (context->finished)
640     goto done;
641 
642   g_signal_emit_by_name (sink, "pull-preroll", &sample);
643 
644   if (!sample) {
645     error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_FAILED,
646         "Could not get converted video sample");
647   }
648   convert_frame_finish (context, sample, error);
649 
650 done:
651   g_mutex_unlock (&context->mutex);
652 
653   g_signal_handlers_disconnect_by_func (sink, convert_frame_need_data_callback,
654       context);
655 
656   return GST_FLOW_OK;
657 }
658 
659 /**
660  * gst_video_convert_sample_async:
661  * @sample: a #GstSample
662  * @to_caps: the #GstCaps to convert to
663  * @timeout: the maximum amount of time allowed for the processing.
664  * @callback: %GstVideoConvertSampleCallback that will be called after conversion.
665  * @user_data: extra data that will be passed to the @callback
666  * @destroy_notify: %GDestroyNotify to be called after @user_data is not needed anymore
667  *
668  * Converts a raw video buffer into the specified output caps.
669  *
670  * The output caps can be any raw video formats or any image formats (jpeg, png, ...).
671  *
672  * The width, height and pixel-aspect-ratio can also be specified in the output caps.
673  *
674  * @callback will be called after conversion, when an error occurred or if conversion didn't
675  * finish after @timeout. @callback will always be called from the thread default
676  * %GMainContext, see g_main_context_get_thread_default(). If GLib before 2.22 is used,
677  * this will always be the global default main context.
678  *
679  * @destroy_notify will be called after the callback was called and @user_data is not needed
680  * anymore.
681  */
682 void
gst_video_convert_sample_async(GstSample * sample,const GstCaps * to_caps,GstClockTime timeout,GstVideoConvertSampleCallback callback,gpointer user_data,GDestroyNotify destroy_notify)683 gst_video_convert_sample_async (GstSample * sample,
684     const GstCaps * to_caps, GstClockTime timeout,
685     GstVideoConvertSampleCallback callback, gpointer user_data,
686     GDestroyNotify destroy_notify)
687 {
688   GMainContext *context = NULL;
689   GError *error = NULL;
690   GstBus *bus;
691   GstBuffer *buf;
692   GstCaps *from_caps, *to_caps_copy = NULL;
693   GstElement *pipeline, *src, *sink;
694   guint i, n;
695   GSource *source;
696   GstVideoConvertSampleContext *ctx;
697 
698   g_return_if_fail (sample != NULL);
699   buf = gst_sample_get_buffer (sample);
700   g_return_if_fail (buf != NULL);
701 
702   g_return_if_fail (to_caps != NULL);
703 
704   from_caps = gst_sample_get_caps (sample);
705   g_return_if_fail (from_caps != NULL);
706   g_return_if_fail (callback != NULL);
707 
708   context = g_main_context_get_thread_default ();
709 
710   if (!context)
711     context = g_main_context_default ();
712 
713   to_caps_copy = gst_caps_new_empty ();
714   n = gst_caps_get_size (to_caps);
715   for (i = 0; i < n; i++) {
716     GstStructure *s = gst_caps_get_structure (to_caps, i);
717 
718     s = gst_structure_copy (s);
719     gst_structure_remove_field (s, "framerate");
720     gst_caps_append_structure (to_caps_copy, s);
721   }
722 
723   /* There's a reference cycle between the context and the pipeline, which is
724    * broken up once the finish() is called on the context. At latest when the
725    * timeout triggers the context will be freed */
726   ctx = g_slice_new0 (GstVideoConvertSampleContext);
727   ctx->ref_count = 1;
728   g_mutex_init (&ctx->mutex);
729   ctx->sample = gst_sample_ref (sample);
730   ctx->callback = callback;
731   ctx->user_data = user_data;
732   ctx->destroy_notify = destroy_notify;
733   ctx->context = g_main_context_ref (context);
734   ctx->finished = FALSE;
735 
736   pipeline =
737       build_convert_frame_pipeline (&src, &sink, from_caps,
738       gst_buffer_get_video_crop_meta (buf), to_caps_copy, &error);
739   if (!pipeline)
740     goto no_pipeline;
741   ctx->pipeline = pipeline;
742 
743   bus = gst_element_get_bus (pipeline);
744 
745   if (timeout != GST_CLOCK_TIME_NONE) {
746     ctx->timeout_source = g_timeout_source_new (timeout / GST_MSECOND);
747     g_source_set_callback (ctx->timeout_source,
748         (GSourceFunc) convert_frame_timeout_callback,
749         gst_video_convert_frame_context_ref (ctx),
750         (GDestroyNotify) gst_video_convert_frame_context_unref);
751     g_source_attach (ctx->timeout_source, context);
752   }
753 
754   g_signal_connect_data (src, "need-data",
755       G_CALLBACK (convert_frame_need_data_callback),
756       gst_video_convert_frame_context_ref (ctx),
757       (GClosureNotify) gst_video_convert_frame_context_unref, 0);
758   g_signal_connect_data (sink, "new-preroll",
759       G_CALLBACK (convert_frame_new_preroll_callback),
760       gst_video_convert_frame_context_ref (ctx),
761       (GClosureNotify) gst_video_convert_frame_context_unref, 0);
762 
763   source = gst_bus_create_watch (bus);
764   g_source_set_callback (source, (GSourceFunc) convert_frame_bus_callback,
765       gst_video_convert_frame_context_ref (ctx),
766       (GDestroyNotify) gst_video_convert_frame_context_unref);
767   g_source_attach (source, context);
768   g_source_unref (source);
769   gst_object_unref (bus);
770 
771   if (gst_element_set_state (pipeline,
772           GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
773     goto state_change_failed;
774 
775   gst_caps_unref (to_caps_copy);
776 
777   gst_video_convert_frame_context_unref (ctx);
778 
779   return;
780   /* ERRORS */
781 no_pipeline:
782   {
783     gst_caps_unref (to_caps_copy);
784 
785     g_mutex_lock (&ctx->mutex);
786     convert_frame_finish (ctx, NULL, error);
787     g_mutex_unlock (&ctx->mutex);
788     gst_video_convert_frame_context_unref (ctx);
789 
790     return;
791   }
792 state_change_failed:
793   {
794     gst_caps_unref (to_caps_copy);
795 
796     error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_STATE_CHANGE,
797         "failed to change state to PLAYING");
798 
799     g_mutex_lock (&ctx->mutex);
800     convert_frame_finish (ctx, NULL, error);
801     g_mutex_unlock (&ctx->mutex);
802     gst_video_convert_frame_context_unref (ctx);
803 
804     return;
805   }
806 }
807