• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * GStreamer
3 * Copyright (C) 2015 Matthew Waters <matthew@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21/**
22 * SECTION:element-avsamplebufferlayersink
23 *
24 * avsamplebufferlayersink renders video frames to a CALayer that can placed
25 * inside a Core Animation render tree.
26 */
27
28#ifdef HAVE_CONFIG_H
29#include "config.h"
30#endif
31
32#include "avsamplevideosink.h"
33
34GST_DEBUG_CATEGORY (gst_debug_av_sink);
35#define GST_CAT_DEFAULT gst_debug_av_sink
36
37static void gst_av_sample_video_sink_finalize (GObject * object);
38static void gst_av_sample_video_sink_set_property (GObject * object, guint prop_id,
39    const GValue * value, GParamSpec * param_spec);
40static void gst_av_sample_video_sink_get_property (GObject * object, guint prop_id,
41    GValue * value, GParamSpec * param_spec);
42
43static gboolean gst_av_sample_video_sink_start (GstBaseSink * bsink);
44static gboolean gst_av_sample_video_sink_stop (GstBaseSink * bsink);
45
46static void gst_av_sample_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
47    GstClockTime * start, GstClockTime * end);
48static gboolean gst_av_sample_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps);
49static GstCaps * gst_av_sample_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter);
50static GstFlowReturn gst_av_sample_video_sink_prepare (GstBaseSink * bsink,
51    GstBuffer * buf);
52static GstFlowReturn gst_av_sample_video_sink_show_frame (GstVideoSink * bsink,
53    GstBuffer * buf);
54static gboolean gst_av_sample_video_sink_propose_allocation (GstBaseSink * bsink,
55    GstQuery * query);
56
57static GstStaticPadTemplate gst_av_sample_video_sink_template =
58    GST_STATIC_PAD_TEMPLATE ("sink",
59    GST_PAD_SINK,
60    GST_PAD_ALWAYS,
61    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGB, BGR, ARGB, BGRA, ABGR, RGBA, YUY2, UYVY, NV12, I420 }"))
62    );
63
64enum
65{
66  PROR_0,
67  PROP_FORCE_ASPECT_RATIO,
68  PROP_LAYER,
69};
70
71#define gst_av_sample_video_sink_parent_class parent_class
72G_DEFINE_TYPE_WITH_CODE (GstAVSampleVideoSink, gst_av_sample_video_sink,
73    GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT (gst_debug_av_sink, "avsamplevideosink", 0,
74        "AV Sample Video Sink"));
75
76static void
77gst_av_sample_video_sink_class_init (GstAVSampleVideoSinkClass * klass)
78{
79  GObjectClass *gobject_class;
80  GstElementClass *gstelement_class;
81  GstBaseSinkClass *gstbasesink_class;
82  GstVideoSinkClass *gstvideosink_class;
83  GstElementClass *element_class;
84
85  gobject_class = (GObjectClass *) klass;
86  gstelement_class = (GstElementClass *) klass;
87  gstbasesink_class = (GstBaseSinkClass *) klass;
88  gstvideosink_class = (GstVideoSinkClass *) klass;
89  element_class = GST_ELEMENT_CLASS (klass);
90
91  gobject_class->set_property = gst_av_sample_video_sink_set_property;
92  gobject_class->get_property = gst_av_sample_video_sink_get_property;
93
94  g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
95      g_param_spec_boolean ("force-aspect-ratio",
96          "Force aspect ratio",
97          "When enabled, scaling will respect original aspect ratio", TRUE,
98          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
99
100  g_object_class_install_property (gobject_class, PROP_LAYER,
101      g_param_spec_pointer ("layer", "CALayer",
102          "The CoreAnimation layer that can be placed in the render tree",
103          G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
104
105  gst_element_class_set_metadata (element_class, "AV Sample video sink",
106      "Sink/Video", "A videosink based on AVSampleBuffers",
107      "Matthew Waters <matthew@centricular.com>");
108
109  gst_element_class_add_static_pad_template (element_class, &gst_av_sample_video_sink_template);
110
111  gobject_class->finalize = gst_av_sample_video_sink_finalize;
112
113  gstbasesink_class->get_caps = gst_av_sample_video_sink_get_caps;
114  gstbasesink_class->set_caps = gst_av_sample_video_sink_set_caps;
115  gstbasesink_class->get_times = gst_av_sample_video_sink_get_times;
116  gstbasesink_class->prepare = gst_av_sample_video_sink_prepare;
117  gstbasesink_class->propose_allocation = gst_av_sample_video_sink_propose_allocation;
118  gstbasesink_class->stop = gst_av_sample_video_sink_stop;
119  gstbasesink_class->start = gst_av_sample_video_sink_start;
120
121  gstvideosink_class->show_frame =
122      GST_DEBUG_FUNCPTR (gst_av_sample_video_sink_show_frame);
123}
124
125static void
126gst_av_sample_video_sink_init (GstAVSampleVideoSink * av_sink)
127{
128  av_sink->keep_aspect_ratio = TRUE;
129
130  g_mutex_init (&av_sink->render_lock);
131}
132
133static void
134gst_av_sample_video_sink_set_property (GObject * object, guint prop_id,
135    const GValue * value, GParamSpec * pspec)
136{
137  GstAVSampleVideoSink *av_sink;
138
139  g_return_if_fail (GST_IS_AV_SAMPLE_VIDEO_SINK (object));
140
141  av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
142
143  switch (prop_id) {
144    case PROP_FORCE_ASPECT_RATIO:
145    {
146      av_sink->keep_aspect_ratio = g_value_get_boolean (value);
147      break;
148    }
149    default:
150      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
151      break;
152  }
153}
154
155static void
156gst_av_sample_video_sink_finalize (GObject * object)
157{
158  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
159  __block gpointer layer = av_sink->layer;
160
161  if (layer) {
162    dispatch_async (dispatch_get_main_queue (), ^{
163      CFBridgingRelease(layer);
164    });
165  }
166
167  g_mutex_clear (&av_sink->render_lock);
168
169  G_OBJECT_CLASS (parent_class)->finalize (object);
170}
171
172static void
173gst_av_sample_video_sink_get_property (GObject * object, guint prop_id,
174    GValue * value, GParamSpec * pspec)
175{
176  GstAVSampleVideoSink *av_sink;
177
178  g_return_if_fail (GST_IS_AV_SAMPLE_VIDEO_SINK (object));
179
180  av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
181
182  switch (prop_id) {
183    case PROP_FORCE_ASPECT_RATIO:
184      g_value_set_boolean (value, av_sink->keep_aspect_ratio);
185      break;
186    case PROP_LAYER:
187      g_value_set_pointer (value, av_sink->layer);
188      break;
189    default:
190      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
191      break;
192  }
193}
194
195static gboolean
196gst_av_sample_video_sink_start (GstBaseSink * bsink)
197{
198  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
199
200  if ([NSThread isMainThread]) {
201      AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
202    av_sink->layer = (__bridge_retained gpointer)layer;
203    if (av_sink->keep_aspect_ratio)
204      layer.videoGravity = AVLayerVideoGravityResizeAspect;
205    else
206      layer.videoGravity = AVLayerVideoGravityResize;
207    g_object_notify (G_OBJECT (av_sink), "layer");
208  } else {
209    dispatch_sync (dispatch_get_main_queue (), ^{
210      AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
211      av_sink->layer = (__bridge_retained gpointer)layer;
212      if (av_sink->keep_aspect_ratio)
213        layer.videoGravity = AVLayerVideoGravityResizeAspect;
214      else
215        layer.videoGravity = AVLayerVideoGravityResize;
216      g_object_notify (G_OBJECT (av_sink), "layer");
217    });
218  }
219
220  return TRUE;
221}
222
223/* with render lock */
224static void
225_stop_requesting_data (GstAVSampleVideoSink * av_sink)
226{
227  if (av_sink->layer) {
228    if (av_sink->layer_requesting_data)
229      [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData];
230    av_sink->layer_requesting_data = FALSE;
231  }
232}
233
234static gboolean
235gst_av_sample_video_sink_stop (GstBaseSink * bsink)
236{
237  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
238
239  if (av_sink->pool) {
240    gst_object_unref (av_sink->pool);
241    av_sink->pool = NULL;
242  }
243
244  if (av_sink->layer) {
245    g_mutex_lock (&av_sink->render_lock);
246    _stop_requesting_data (av_sink);
247    g_mutex_unlock (&av_sink->render_lock);
248    [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage];
249  }
250
251  return TRUE;
252}
253
254static void
255gst_av_sample_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
256    GstClockTime * start, GstClockTime * end)
257{
258  GstAVSampleVideoSink *av_sink;
259
260  av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
261
262  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
263    *start = GST_BUFFER_TIMESTAMP (buf);
264    if (GST_BUFFER_DURATION_IS_VALID (buf))
265      *end = *start + GST_BUFFER_DURATION (buf);
266    else {
267      if (GST_VIDEO_INFO_FPS_N (&av_sink->info) > 0) {
268        *end = *start +
269            gst_util_uint64_scale_int (GST_SECOND,
270            GST_VIDEO_INFO_FPS_D (&av_sink->info),
271            GST_VIDEO_INFO_FPS_N (&av_sink->info));
272      }
273    }
274  }
275}
276
277static unsigned int
278_cv_pixel_format_type_from_video_format (GstVideoFormat format)
279{
280  switch (format) {
281    case GST_VIDEO_FORMAT_BGRA:
282      return kCVPixelFormatType_32BGRA;
283    case GST_VIDEO_FORMAT_ARGB:
284      return kCVPixelFormatType_32ARGB;
285    case GST_VIDEO_FORMAT_ABGR:
286      return kCVPixelFormatType_32ABGR;
287    case GST_VIDEO_FORMAT_RGBA:
288      return kCVPixelFormatType_32RGBA;
289    case GST_VIDEO_FORMAT_RGB:
290      return kCVPixelFormatType_24RGB;
291    case GST_VIDEO_FORMAT_BGR:
292      return kCVPixelFormatType_24BGR;
293#if 0
294    /* FIXME doesn't seem to work */
295    case GST_VIDEO_FORMAT_NV12:
296      return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
297#endif
298    case GST_VIDEO_FORMAT_I420:
299      return kCVPixelFormatType_420YpCbCr8Planar;
300    case GST_VIDEO_FORMAT_YUY2:
301      return kCVPixelFormatType_422YpCbCr8_yuvs;
302    case GST_VIDEO_FORMAT_UYVY:
303      return kCVPixelFormatType_422YpCbCr8;
304    default:
305      return 0;
306  }
307}
308
309static GstVideoFormat
310_pixel_format_description_to_video_format (CFDictionaryRef attrs)
311{
312  CFNumberRef id_ref;
313  unsigned int id;
314
315  id_ref = (CFNumberRef) CFDictionaryGetValue (attrs, kCVPixelFormatConstant);
316  CFNumberGetValue (id_ref, kCFNumberIntType, &id);
317
318  GST_TRACE ("pixel format description id %u", id);
319
320  CFRelease (id_ref);
321
322  switch (id) {
323    case kCVPixelFormatType_32BGRA:
324      return GST_VIDEO_FORMAT_BGRA;
325    case kCVPixelFormatType_32ARGB:
326      return GST_VIDEO_FORMAT_ARGB;
327    case kCVPixelFormatType_32ABGR:
328      return GST_VIDEO_FORMAT_ABGR;
329    case kCVPixelFormatType_32RGBA:
330      return GST_VIDEO_FORMAT_RGBA;
331    case kCVPixelFormatType_24RGB:
332      return GST_VIDEO_FORMAT_RGB;
333    case kCVPixelFormatType_24BGR:
334      return GST_VIDEO_FORMAT_BGR;
335#if 0
336    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
337      return GST_VIDEO_FORMAT_NV12;
338#endif
339    case kCVPixelFormatType_420YpCbCr8Planar:
340      return GST_VIDEO_FORMAT_I420;
341    case kCVPixelFormatType_422YpCbCr8_yuvs:
342      return GST_VIDEO_FORMAT_YUY2;
343    case kCVPixelFormatType_422YpCbCr8:
344      return GST_VIDEO_FORMAT_UYVY;
345    default:
346      return GST_VIDEO_FORMAT_UNKNOWN;
347  }
348}
349
350static GstCaps *
351gst_av_sample_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
352{
353  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
354  CFArrayRef formats;
355  GstCaps *ret, *tmp;
356  int i, n;
357
358  formats =
359      CVPixelFormatDescriptionArrayCreateWithAllPixelFormatTypes
360      (kCFAllocatorDefault);
361
362  ret = gst_caps_new_empty ();
363
364  n = CFArrayGetCount (formats);
365  for (i = 0; i < n; i++) {
366    CFDictionaryRef attrs;
367    CFNumberRef fourcc;
368    unsigned int pixel_format;
369    GstVideoFormat v_format;
370    const char *format_str;
371    char *caps_str;
372
373    fourcc = (CFNumberRef)CFArrayGetValueAtIndex(formats, i);
374    CFNumberGetValue (fourcc, kCFNumberIntType, &pixel_format);
375    attrs = CVPixelFormatDescriptionCreateWithPixelFormatType (kCFAllocatorDefault,
376        pixel_format);
377
378    CFRelease (fourcc);
379
380    v_format = _pixel_format_description_to_video_format (attrs);
381    if (v_format != GST_VIDEO_FORMAT_UNKNOWN) {
382      format_str = gst_video_format_to_string (v_format);
383
384      caps_str = g_strdup_printf ("video/x-raw, format=%s", format_str);
385
386      ret = gst_caps_merge (ret, gst_caps_from_string (caps_str));
387
388      g_free (caps_str);
389    }
390
391    CFRelease (attrs);
392  }
393
394  ret = gst_caps_simplify (ret);
395
396  gst_caps_set_simple (ret, "width", GST_TYPE_INT_RANGE, 0, G_MAXINT, "height",
397      GST_TYPE_INT_RANGE, 0, G_MAXINT, "framerate", GST_TYPE_FRACTION_RANGE, 0,
398      1, G_MAXINT, 1, NULL);
399  GST_DEBUG_OBJECT (av_sink, "returning caps %" GST_PTR_FORMAT, ret);
400
401  if (filter) {
402    tmp = gst_caps_intersect_full (ret, filter, GST_CAPS_INTERSECT_FIRST);
403    gst_caps_unref (ret);
404    ret = tmp;
405  }
406
407  CFRelease (formats);
408
409  return ret;
410}
411
412static gboolean
413gst_av_sample_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
414{
415  GstAVSampleVideoSink *av_sink;
416  gint width;
417  gint height;
418  gboolean ok;
419  gint par_n, par_d;
420  gint display_par_n, display_par_d;
421  guint display_ratio_num, display_ratio_den;
422  GstVideoInfo vinfo;
423  GstStructure *structure;
424  GstBufferPool *newpool, *oldpool;
425
426  GST_DEBUG_OBJECT (bsink, "set caps with %" GST_PTR_FORMAT, caps);
427
428  av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
429
430  ok = gst_video_info_from_caps (&vinfo, caps);
431  if (!ok)
432    return FALSE;
433
434  width = GST_VIDEO_INFO_WIDTH (&vinfo);
435  height = GST_VIDEO_INFO_HEIGHT (&vinfo);
436
437  par_n = GST_VIDEO_INFO_PAR_N (&vinfo);
438  par_d = GST_VIDEO_INFO_PAR_D (&vinfo);
439
440  if (!par_n)
441    par_n = 1;
442
443  display_par_n = 1;
444  display_par_d = 1;
445
446  ok = gst_video_calculate_display_ratio (&display_ratio_num,
447      &display_ratio_den, width, height, par_n, par_d, display_par_n,
448      display_par_d);
449
450  if (!ok)
451    return FALSE;
452
453  GST_TRACE_OBJECT (bsink, "PAR: %u/%u DAR:%u/%u", par_n, par_d, display_par_n,
454      display_par_d);
455
456  if (height % display_ratio_den == 0) {
457    GST_DEBUG_OBJECT (bsink, "keeping video height");
458    GST_VIDEO_SINK_WIDTH (av_sink) = (guint)
459        gst_util_uint64_scale_int (height, display_ratio_num,
460        display_ratio_den);
461    GST_VIDEO_SINK_HEIGHT (av_sink) = height;
462  } else if (width % display_ratio_num == 0) {
463    GST_DEBUG_OBJECT (bsink, "keeping video width");
464    GST_VIDEO_SINK_WIDTH (av_sink) = width;
465    GST_VIDEO_SINK_HEIGHT (av_sink) = (guint)
466        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
467  } else {
468    GST_DEBUG_OBJECT (bsink, "approximating while keeping video height");
469    GST_VIDEO_SINK_WIDTH (av_sink) = (guint)
470        gst_util_uint64_scale_int (height, display_ratio_num,
471        display_ratio_den);
472    GST_VIDEO_SINK_HEIGHT (av_sink) = height;
473  }
474  GST_DEBUG_OBJECT (bsink, "scaling to %dx%d", GST_VIDEO_SINK_WIDTH (av_sink),
475      GST_VIDEO_SINK_HEIGHT (av_sink));
476
477  av_sink->info = vinfo;
478
479  newpool = gst_video_buffer_pool_new ();
480  structure = gst_buffer_pool_get_config (newpool);
481  gst_buffer_pool_config_set_params (structure, caps, vinfo.size, 2, 0);
482  gst_buffer_pool_set_config (newpool, structure);
483
484  oldpool = av_sink->pool;
485  /* we don't activate the pool yet, this will be done by downstream after it
486   * has configured the pool. If downstream does not want our pool we will
487   * activate it when we render into it */
488  av_sink->pool = newpool;
489
490  /* unref the old sink */
491  if (oldpool) {
492    /* we don't deactivate, some elements might still be using it, it will
493     * be deactivated when the last ref is gone */
494    gst_object_unref (oldpool);
495  }
496
497  return TRUE;
498}
499
500static void
501_unmap_planar_frame (GstVideoFrame * v_frame, const void * data, gsize dataSize,
502    gsize numberOfPlanes, const void *planeAddressed[])
503{
504  GST_TRACE ("freeing video frame %p", v_frame);
505
506  gst_video_frame_unmap (v_frame);
507  g_free (v_frame);
508}
509
510static void
511_unmap_frame (GstVideoFrame * v_frame, const void * data)
512{
513  GST_TRACE ("freeing video frame %p", v_frame);
514
515  gst_video_frame_unmap (v_frame);
516  g_free (v_frame);
517}
518
519/* with render lock */
520static gboolean
521_enqueue_sample (GstAVSampleVideoSink * av_sink, GstBuffer *buf)
522{
523  CVPixelBufferRef pbuf;
524  CMVideoFormatDescriptionRef v_format_desc;
525  GstVideoFrame *v_frame;
526  CMSampleTimingInfo sample_time;
527  __block CMSampleBufferRef sample_buf;
528  CFArrayRef sample_attachments;
529  gsize l, r, t, b;
530  gint i;
531
532  GST_TRACE_OBJECT (av_sink, "redisplay of size:%ux%u, window size:%ux%u",
533      GST_VIDEO_INFO_WIDTH (&av_sink->info),
534      GST_VIDEO_INFO_HEIGHT (&av_sink->info),
535      GST_VIDEO_SINK_WIDTH (av_sink),
536      GST_VIDEO_SINK_HEIGHT (av_sink));
537
538  v_frame = g_new0 (GstVideoFrame, 1);
539
540  if (!gst_video_frame_map (v_frame, &av_sink->info, buf, GST_MAP_READ)) {
541    GST_ERROR_OBJECT (av_sink, "Failed to map input video frame");
542    g_free (v_frame);
543    return FALSE;
544  }
545
546  if (GST_VIDEO_INFO_N_PLANES (&v_frame->info) == 1) {
547    /* single plane */
548    if (kCVReturnSuccess != CVPixelBufferCreateWithBytes (NULL,
549        GST_VIDEO_INFO_WIDTH (&v_frame->info),
550        GST_VIDEO_INFO_HEIGHT (&v_frame->info),
551        _cv_pixel_format_type_from_video_format (GST_VIDEO_INFO_FORMAT (&v_frame->info)),
552        v_frame->data[0], v_frame->info.stride[0],
553        (CVPixelBufferReleaseBytesCallback) _unmap_frame, v_frame, NULL,
554        &pbuf)) {
555      GST_ERROR_OBJECT (av_sink, "Error creating Core Video pixel buffer");
556      gst_video_frame_unmap (v_frame);
557      g_free (v_frame);
558      return FALSE;
559    }
560  } else {
561    /* multi-planar */
562    gsize widths[GST_VIDEO_MAX_PLANES] = { 0, };
563    gsize heights[GST_VIDEO_MAX_PLANES] = { 0, };
564    gsize strides[GST_VIDEO_MAX_PLANES] = { 0, };
565    gint i;
566
567    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&v_frame->info); i++) {
568      widths[i] = GST_VIDEO_INFO_COMP_WIDTH (&v_frame->info, i);
569      heights[i] = GST_VIDEO_INFO_COMP_HEIGHT (&v_frame->info, i);
570      strides[i] = GST_VIDEO_INFO_COMP_STRIDE (&v_frame->info, i);
571    }
572
573    if (kCVReturnSuccess != CVPixelBufferCreateWithPlanarBytes (NULL,
574        GST_VIDEO_INFO_WIDTH (&v_frame->info),
575        GST_VIDEO_INFO_HEIGHT (&v_frame->info),
576        _cv_pixel_format_type_from_video_format (GST_VIDEO_INFO_FORMAT (&v_frame->info)),
577         /* have to put something for these two parameters otherwise
578          * the callback is not called resulting in a big leak */
579        v_frame, v_frame->info.size,
580        GST_VIDEO_INFO_N_PLANES (&v_frame->info), v_frame->data,
581        widths, heights, strides,
582        (CVPixelBufferReleasePlanarBytesCallback) _unmap_planar_frame,
583        v_frame, NULL, &pbuf)) {
584      GST_ERROR_OBJECT (av_sink, "Error creating Core Video pixel buffer");
585      gst_video_frame_unmap (v_frame);
586      g_free (v_frame);
587      return FALSE;
588    }
589  }
590
591  CVPixelBufferLockBaseAddress (pbuf, kCVPixelBufferLock_ReadOnly);
592
593  CVPixelBufferGetExtendedPixels (pbuf, &l, &r, &t, &b);
594
595  GST_TRACE_OBJECT (av_sink, "CVPixelBuffer n_planes %u width %u height %u"
596      " data size %" G_GSIZE_FORMAT " extra pixels l %u r %u t %u b %u",
597      (guint) CVPixelBufferGetPlaneCount (pbuf),
598      (guint) CVPixelBufferGetWidth (pbuf),
599      (guint) CVPixelBufferGetHeight (pbuf),
600      CVPixelBufferGetDataSize (pbuf),
601      (guint) l, (guint) r, (guint) t, (guint) b);
602
603  GST_TRACE_OBJECT (av_sink, "GstVideoFrame n_planes %u width %u height %u"
604      " data size %"G_GSIZE_FORMAT " extra pixels l %u r %u t %u b %u",
605      GST_VIDEO_INFO_N_PLANES (&v_frame->info),
606      GST_VIDEO_INFO_WIDTH (&v_frame->info),
607      GST_VIDEO_INFO_HEIGHT (&v_frame->info),
608      v_frame->info.size, 0, 0, 0, 0);
609
610  if (GST_VIDEO_INFO_N_PLANES (&v_frame->info) > 1) {
611    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&v_frame->info); i++) {
612      GST_TRACE_OBJECT (av_sink, "plane %i CVPixelBuffer width %u height %u "
613          "stride %u data %p", i,
614          (guint) CVPixelBufferGetWidthOfPlane (pbuf, i),
615          (guint) CVPixelBufferGetHeightOfPlane (pbuf, i),
616          (guint) CVPixelBufferGetBytesPerRowOfPlane (pbuf, i),
617          CVPixelBufferGetBaseAddressOfPlane (pbuf, i));
618      GST_TRACE_OBJECT (av_sink, "plane %i GstVideoFrame width %u height %u "
619          "stride %u data %p", i,
620          GST_VIDEO_INFO_COMP_WIDTH (&v_frame->info, i),
621          GST_VIDEO_INFO_COMP_HEIGHT (&v_frame->info, i),
622          GST_VIDEO_INFO_COMP_STRIDE (&v_frame->info, i),
623          CVPixelBufferGetBaseAddressOfPlane (pbuf, i));
624    }
625  } else {
626    GST_TRACE_OBJECT (av_sink, "CVPixelBuffer attrs stride %u data %p",
627      (guint) CVPixelBufferGetBytesPerRow (pbuf),
628      CVPixelBufferGetBaseAddress (pbuf));
629    GST_TRACE_OBJECT (av_sink, "GstVideoFrame attrs stride %u data %p",
630        v_frame->info.stride[0], v_frame->data[0]);
631  }
632
633  CVPixelBufferUnlockBaseAddress (pbuf, kCVPixelBufferLock_ReadOnly);
634
635  if (0 != CMVideoFormatDescriptionCreateForImageBuffer (kCFAllocatorDefault,
636        pbuf, &v_format_desc)) {
637    GST_ERROR_OBJECT (av_sink, "Failed to retrieve video format from "
638        "pixel buffer");
639    CFRelease (pbuf);
640    return FALSE;
641  }
642
643  sample_time.duration = CMTimeMake (GST_BUFFER_DURATION (buf), GST_SECOND);
644  sample_time.presentationTimeStamp = CMTimeMake (GST_BUFFER_PTS (buf), GST_SECOND);
645  sample_time.decodeTimeStamp = kCMTimeInvalid;
646
647  if (0 != CMSampleBufferCreateForImageBuffer (kCFAllocatorDefault, pbuf, TRUE,
648        NULL, NULL, v_format_desc, &sample_time, &sample_buf)) {
649    GST_ERROR_OBJECT (av_sink, "Failed to create CMSampleBuffer from "
650        "CVImageBuffer");
651    CFRelease (v_format_desc);
652    CFRelease (pbuf);
653    return FALSE;
654  }
655  CFRelease (v_format_desc);
656
657  sample_attachments = CMSampleBufferGetSampleAttachmentsArray (sample_buf, TRUE);
658  for (i = 0; i < CFArrayGetCount (sample_attachments); i++) {
659    CFMutableDictionaryRef attachments =
660       (CFMutableDictionaryRef) CFArrayGetValueAtIndex (sample_attachments, i);
661    /* Until we slave the CoreMedia clock, just display everything ASAP */
662    CFDictionarySetValue (attachments, kCMSampleAttachmentKey_DisplayImmediately,
663        kCFBooleanTrue);
664  }
665
666  AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
667  if (av_sink->keep_aspect_ratio)
668    layer.videoGravity = AVLayerVideoGravityResizeAspect;
669  else
670    layer.videoGravity = AVLayerVideoGravityResize;
671  [layer enqueueSampleBuffer:sample_buf];
672
673  CFRelease (pbuf);
674  CFRelease (sample_buf);
675
676  return TRUE;
677}
678
679static void
680_request_data (GstAVSampleVideoSink * av_sink)
681{
682  av_sink->layer_requesting_data = TRUE;
683
684  AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
685  [layer requestMediaDataWhenReadyOnQueue:
686        dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
687        usingBlock:^{
688    while (TRUE) {
689      /* don't needlessly fill up avsamplebufferdisplaylayer's queue.
690       * This also allows us to skip displaying late frames */
691      if (!layer.readyForMoreMediaData)
692        break;
693
694      g_mutex_lock (&av_sink->render_lock);
695
696      if (!av_sink->buffer || av_sink->render_flow_return != GST_FLOW_OK) {
697        _stop_requesting_data (av_sink);
698        g_mutex_unlock (&av_sink->render_lock);
699        break;
700      }
701
702      if (!_enqueue_sample (av_sink, av_sink->buffer)) {
703        gst_buffer_unref (av_sink->buffer);
704        av_sink->buffer = NULL;
705        av_sink->render_flow_return = GST_FLOW_ERROR;
706        g_mutex_unlock (&av_sink->render_lock);
707        break;
708      }
709
710      gst_buffer_unref (av_sink->buffer);
711      av_sink->buffer = NULL;
712      av_sink->render_flow_return = GST_FLOW_OK;
713      g_mutex_unlock (&av_sink->render_lock);
714    }
715  }];
716}
717
718static GstFlowReturn
719gst_av_sample_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buf)
720{
721  GstAVSampleVideoSink *av_sink;
722
723  av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
724
725  GST_LOG_OBJECT (bsink, "preparing buffer:%p", buf);
726
727  if (GST_VIDEO_SINK_WIDTH (av_sink) < 1 ||
728      GST_VIDEO_SINK_HEIGHT (av_sink) < 1) {
729    return GST_FLOW_NOT_NEGOTIATED;
730  }
731
732  return GST_FLOW_OK;
733}
734
735static GstFlowReturn
736gst_av_sample_video_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
737{
738  GstAVSampleVideoSink *av_sink;
739  GstFlowReturn ret;
740
741  GST_TRACE_OBJECT (vsink, "rendering buffer:%p", buf);
742
743  av_sink = GST_AV_SAMPLE_VIDEO_SINK (vsink);
744
745  g_mutex_lock (&av_sink->render_lock);
746  if (av_sink->buffer)
747    gst_buffer_unref (av_sink->buffer);
748  av_sink->buffer = gst_buffer_ref (buf);
749  ret = av_sink->render_flow_return;
750
751  if (!av_sink->layer_requesting_data)
752    _request_data (av_sink);
753  g_mutex_unlock (&av_sink->render_lock);
754
755#if defined(MAC_OS_X_VERSION_MAX_ALLOWED) && \
756    MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \
757    defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \
758    MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
759    AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
760  if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
761    GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s",
762        [[[layer error] description] UTF8String]);
763    return GST_FLOW_ERROR;
764  }
765#endif
766
767  return ret;
768}
769
770static gboolean
771gst_av_sample_video_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
772{
773  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
774  GstBufferPool *pool;
775  GstStructure *config;
776  GstCaps *caps;
777  guint size;
778  gboolean need_pool;
779
780  gst_query_parse_allocation (query, &caps, &need_pool);
781
782  if (caps == NULL)
783    goto no_caps;
784
785  /* FIXME re-using buffer pool breaks renegotiation */
786  if ((pool = av_sink->pool))
787    gst_object_ref (pool);
788
789  if (pool != NULL) {
790    GstCaps *pcaps;
791
792    /* we had a pool, check caps */
793    GST_DEBUG_OBJECT (av_sink, "check existing pool caps");
794    config = gst_buffer_pool_get_config (pool);
795    gst_buffer_pool_config_get_params (config, &pcaps, &size, NULL, NULL);
796
797    if (!gst_caps_is_equal (caps, pcaps)) {
798      GST_DEBUG_OBJECT (av_sink, "pool has different caps");
799      /* different caps, we can't use this pool */
800      gst_object_unref (pool);
801      pool = NULL;
802    }
803    gst_structure_free (config);
804  } else {
805    GstVideoInfo info;
806
807    if (!gst_video_info_from_caps (&info, caps))
808      goto invalid_caps;
809
810    /* the normal size of a frame */
811    size = info.size;
812  }
813
814  if (pool == NULL && need_pool) {
815    GST_DEBUG_OBJECT (av_sink, "create new pool");
816    pool = gst_video_buffer_pool_new ();
817
818    config = gst_buffer_pool_get_config (pool);
819    gst_buffer_pool_config_set_params (config, caps, size, 0, 0);
820    if (!gst_buffer_pool_set_config (pool, config))
821      goto config_failed;
822  }
823  /* we need at least 2 buffer because we hold on to the last one */
824  gst_query_add_allocation_pool (query, pool, size, 2, 0);
825  if (pool)
826    gst_object_unref (pool);
827
828  /* we also support various metadata */
829  gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, 0);
830
831  return TRUE;
832
833  /* ERRORS */
834no_caps:
835  {
836    GST_DEBUG_OBJECT (bsink, "no caps specified");
837    return FALSE;
838  }
839invalid_caps:
840  {
841    GST_DEBUG_OBJECT (bsink, "invalid caps specified");
842    return FALSE;
843  }
844config_failed:
845  {
846    GST_DEBUG_OBJECT (bsink, "failed setting config");
847    return FALSE;
848  }
849}
850