• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * GStreamer
3 * Copyright (C) 2015 Matthew Waters <matthew@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21/**
22 * SECTION:element-avsamplebufferlayersink
23 *
24 * avsamplebufferlayersink renders video frames to a CALayer that can placed
25 * inside a Core Animation render tree.
26 */
27
28#ifdef HAVE_CONFIG_H
29#include "config.h"
30#endif
31
32#include "avsamplevideosink.h"
33
34GST_DEBUG_CATEGORY (gst_debug_av_sink);
35#define GST_CAT_DEFAULT gst_debug_av_sink
36
37static void gst_av_sample_video_sink_finalize (GObject * object);
38static void gst_av_sample_video_sink_set_property (GObject * object, guint prop_id,
39    const GValue * value, GParamSpec * param_spec);
40static void gst_av_sample_video_sink_get_property (GObject * object, guint prop_id,
41    GValue * value, GParamSpec * param_spec);
42
43static gboolean gst_av_sample_video_sink_start (GstBaseSink * bsink);
44static gboolean gst_av_sample_video_sink_stop (GstBaseSink * bsink);
45
46static void gst_av_sample_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
47    GstClockTime * start, GstClockTime * end);
48static gboolean gst_av_sample_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps);
49static GstCaps * gst_av_sample_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter);
50static GstFlowReturn gst_av_sample_video_sink_prepare (GstBaseSink * bsink,
51    GstBuffer * buf);
52static GstFlowReturn gst_av_sample_video_sink_show_frame (GstVideoSink * bsink,
53    GstBuffer * buf);
54static gboolean gst_av_sample_video_sink_propose_allocation (GstBaseSink * bsink,
55    GstQuery * query);
56
57static GstStaticPadTemplate gst_av_sample_video_sink_template =
58    GST_STATIC_PAD_TEMPLATE ("sink",
59    GST_PAD_SINK,
60    GST_PAD_ALWAYS,
61    GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGB, BGR, ARGB, BGRA, ABGR, RGBA, YUY2, UYVY, NV12, I420 }"))
62    );
63
64enum
65{
66  PROR_0,
67  PROP_FORCE_ASPECT_RATIO,
68  PROP_LAYER,
69};
70
71#define gst_av_sample_video_sink_parent_class parent_class
72G_DEFINE_TYPE_WITH_CODE (GstAVSampleVideoSink, gst_av_sample_video_sink,
73    GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT (gst_debug_av_sink, "avsamplevideosink", 0,
74        "AV Sample Video Sink"));
75
76static void
77gst_av_sample_video_sink_class_init (GstAVSampleVideoSinkClass * klass)
78{
79  GObjectClass *gobject_class;
80  GstBaseSinkClass *gstbasesink_class;
81  GstVideoSinkClass *gstvideosink_class;
82  GstElementClass *element_class;
83
84  gobject_class = (GObjectClass *) klass;
85  gstbasesink_class = (GstBaseSinkClass *) klass;
86  gstvideosink_class = (GstVideoSinkClass *) klass;
87  element_class = GST_ELEMENT_CLASS (klass);
88
89  gobject_class->set_property = gst_av_sample_video_sink_set_property;
90  gobject_class->get_property = gst_av_sample_video_sink_get_property;
91
92  g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
93      g_param_spec_boolean ("force-aspect-ratio",
94          "Force aspect ratio",
95          "When enabled, scaling will respect original aspect ratio", TRUE,
96          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
97
98  g_object_class_install_property (gobject_class, PROP_LAYER,
99      g_param_spec_pointer ("layer", "CALayer",
100          "The CoreAnimation layer that can be placed in the render tree",
101          G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
102
103  gst_element_class_set_metadata (element_class, "AV Sample video sink",
104      "Sink/Video", "A videosink based on AVSampleBuffers",
105      "Matthew Waters <matthew@centricular.com>");
106
107  gst_element_class_add_static_pad_template (element_class, &gst_av_sample_video_sink_template);
108
109  gobject_class->finalize = gst_av_sample_video_sink_finalize;
110
111  gstbasesink_class->get_caps = gst_av_sample_video_sink_get_caps;
112  gstbasesink_class->set_caps = gst_av_sample_video_sink_set_caps;
113  gstbasesink_class->get_times = gst_av_sample_video_sink_get_times;
114  gstbasesink_class->prepare = gst_av_sample_video_sink_prepare;
115  gstbasesink_class->propose_allocation = gst_av_sample_video_sink_propose_allocation;
116  gstbasesink_class->stop = gst_av_sample_video_sink_stop;
117  gstbasesink_class->start = gst_av_sample_video_sink_start;
118
119  gstvideosink_class->show_frame =
120      GST_DEBUG_FUNCPTR (gst_av_sample_video_sink_show_frame);
121}
122
123static void
124gst_av_sample_video_sink_init (GstAVSampleVideoSink * av_sink)
125{
126  av_sink->keep_aspect_ratio = TRUE;
127
128  g_mutex_init (&av_sink->render_lock);
129}
130
131static void
132gst_av_sample_video_sink_set_property (GObject * object, guint prop_id,
133    const GValue * value, GParamSpec * pspec)
134{
135  GstAVSampleVideoSink *av_sink;
136
137  g_return_if_fail (GST_IS_AV_SAMPLE_VIDEO_SINK (object));
138
139  av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
140
141  switch (prop_id) {
142    case PROP_FORCE_ASPECT_RATIO:
143    {
144      av_sink->keep_aspect_ratio = g_value_get_boolean (value);
145      break;
146    }
147    default:
148      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
149      break;
150  }
151}
152
153static void
154gst_av_sample_video_sink_finalize (GObject * object)
155{
156  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
157  __block gpointer layer = av_sink->layer;
158
159  if (layer) {
160    dispatch_async (dispatch_get_main_queue (), ^{
161      CFBridgingRelease(layer);
162    });
163  }
164
165  g_mutex_clear (&av_sink->render_lock);
166
167  G_OBJECT_CLASS (parent_class)->finalize (object);
168}
169
170static void
171gst_av_sample_video_sink_get_property (GObject * object, guint prop_id,
172    GValue * value, GParamSpec * pspec)
173{
174  GstAVSampleVideoSink *av_sink;
175
176  g_return_if_fail (GST_IS_AV_SAMPLE_VIDEO_SINK (object));
177
178  av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
179
180  switch (prop_id) {
181    case PROP_FORCE_ASPECT_RATIO:
182      g_value_set_boolean (value, av_sink->keep_aspect_ratio);
183      break;
184    case PROP_LAYER:
185      g_value_set_pointer (value, av_sink->layer);
186      break;
187    default:
188      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
189      break;
190  }
191}
192
193static gboolean
194gst_av_sample_video_sink_start (GstBaseSink * bsink)
195{
196  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
197
198  if ([NSThread isMainThread]) {
199      AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
200    av_sink->layer = (__bridge_retained gpointer)layer;
201    if (av_sink->keep_aspect_ratio)
202      layer.videoGravity = AVLayerVideoGravityResizeAspect;
203    else
204      layer.videoGravity = AVLayerVideoGravityResize;
205    g_object_notify (G_OBJECT (av_sink), "layer");
206  } else {
207    dispatch_sync (dispatch_get_main_queue (), ^{
208      AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
209      av_sink->layer = (__bridge_retained gpointer)layer;
210      if (av_sink->keep_aspect_ratio)
211        layer.videoGravity = AVLayerVideoGravityResizeAspect;
212      else
213        layer.videoGravity = AVLayerVideoGravityResize;
214      g_object_notify (G_OBJECT (av_sink), "layer");
215    });
216  }
217
218  return TRUE;
219}
220
221/* with render lock */
222static void
223_stop_requesting_data (GstAVSampleVideoSink * av_sink)
224{
225  if (av_sink->layer) {
226    if (av_sink->layer_requesting_data)
227      [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData];
228    av_sink->layer_requesting_data = FALSE;
229  }
230}
231
232static gboolean
233gst_av_sample_video_sink_stop (GstBaseSink * bsink)
234{
235  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
236
237  if (av_sink->pool) {
238    gst_object_unref (av_sink->pool);
239    av_sink->pool = NULL;
240  }
241
242  if (av_sink->layer) {
243    g_mutex_lock (&av_sink->render_lock);
244    _stop_requesting_data (av_sink);
245    g_mutex_unlock (&av_sink->render_lock);
246    [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage];
247  }
248
249  return TRUE;
250}
251
252static void
253gst_av_sample_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buf,
254    GstClockTime * start, GstClockTime * end)
255{
256  GstAVSampleVideoSink *av_sink;
257
258  av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
259
260  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
261    *start = GST_BUFFER_TIMESTAMP (buf);
262    if (GST_BUFFER_DURATION_IS_VALID (buf))
263      *end = *start + GST_BUFFER_DURATION (buf);
264    else {
265      if (GST_VIDEO_INFO_FPS_N (&av_sink->info) > 0) {
266        *end = *start +
267            gst_util_uint64_scale_int (GST_SECOND,
268            GST_VIDEO_INFO_FPS_D (&av_sink->info),
269            GST_VIDEO_INFO_FPS_N (&av_sink->info));
270      }
271    }
272  }
273}
274
275static unsigned int
276_cv_pixel_format_type_from_video_format (GstVideoFormat format)
277{
278  switch (format) {
279    case GST_VIDEO_FORMAT_BGRA:
280      return kCVPixelFormatType_32BGRA;
281    case GST_VIDEO_FORMAT_ARGB:
282      return kCVPixelFormatType_32ARGB;
283    case GST_VIDEO_FORMAT_ABGR:
284      return kCVPixelFormatType_32ABGR;
285    case GST_VIDEO_FORMAT_RGBA:
286      return kCVPixelFormatType_32RGBA;
287    case GST_VIDEO_FORMAT_RGB:
288      return kCVPixelFormatType_24RGB;
289    case GST_VIDEO_FORMAT_BGR:
290      return kCVPixelFormatType_24BGR;
291#if 0
292    /* FIXME doesn't seem to work */
293    case GST_VIDEO_FORMAT_NV12:
294      return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
295#endif
296    case GST_VIDEO_FORMAT_I420:
297      return kCVPixelFormatType_420YpCbCr8Planar;
298    case GST_VIDEO_FORMAT_YUY2:
299      return kCVPixelFormatType_422YpCbCr8_yuvs;
300    case GST_VIDEO_FORMAT_UYVY:
301      return kCVPixelFormatType_422YpCbCr8;
302    default:
303      return 0;
304  }
305}
306
307static GstVideoFormat
308_pixel_format_description_to_video_format (CFDictionaryRef attrs)
309{
310  CFNumberRef id_ref;
311  unsigned int id;
312
313  id_ref = (CFNumberRef) CFDictionaryGetValue (attrs, kCVPixelFormatConstant);
314  CFNumberGetValue (id_ref, kCFNumberIntType, &id);
315
316  GST_TRACE ("pixel format description id %u", id);
317
318  CFRelease (id_ref);
319
320  switch (id) {
321    case kCVPixelFormatType_32BGRA:
322      return GST_VIDEO_FORMAT_BGRA;
323    case kCVPixelFormatType_32ARGB:
324      return GST_VIDEO_FORMAT_ARGB;
325    case kCVPixelFormatType_32ABGR:
326      return GST_VIDEO_FORMAT_ABGR;
327    case kCVPixelFormatType_32RGBA:
328      return GST_VIDEO_FORMAT_RGBA;
329    case kCVPixelFormatType_24RGB:
330      return GST_VIDEO_FORMAT_RGB;
331    case kCVPixelFormatType_24BGR:
332      return GST_VIDEO_FORMAT_BGR;
333#if 0
334    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
335      return GST_VIDEO_FORMAT_NV12;
336#endif
337    case kCVPixelFormatType_420YpCbCr8Planar:
338      return GST_VIDEO_FORMAT_I420;
339    case kCVPixelFormatType_422YpCbCr8_yuvs:
340      return GST_VIDEO_FORMAT_YUY2;
341    case kCVPixelFormatType_422YpCbCr8:
342      return GST_VIDEO_FORMAT_UYVY;
343    default:
344      return GST_VIDEO_FORMAT_UNKNOWN;
345  }
346}
347
348static GstCaps *
349gst_av_sample_video_sink_get_caps (GstBaseSink * bsink, GstCaps * filter)
350{
351  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
352  CFArrayRef formats;
353  GstCaps *ret, *tmp;
354  int i, n;
355
356  formats =
357      CVPixelFormatDescriptionArrayCreateWithAllPixelFormatTypes
358      (kCFAllocatorDefault);
359
360  ret = gst_caps_new_empty ();
361
362  n = CFArrayGetCount (formats);
363  for (i = 0; i < n; i++) {
364    CFDictionaryRef attrs;
365    CFNumberRef fourcc;
366    unsigned int pixel_format;
367    GstVideoFormat v_format;
368    const char *format_str;
369    char *caps_str;
370
371    fourcc = (CFNumberRef)CFArrayGetValueAtIndex(formats, i);
372    CFNumberGetValue (fourcc, kCFNumberIntType, &pixel_format);
373    attrs = CVPixelFormatDescriptionCreateWithPixelFormatType (kCFAllocatorDefault,
374        pixel_format);
375
376    CFRelease (fourcc);
377
378    v_format = _pixel_format_description_to_video_format (attrs);
379    if (v_format != GST_VIDEO_FORMAT_UNKNOWN) {
380      format_str = gst_video_format_to_string (v_format);
381
382      caps_str = g_strdup_printf ("video/x-raw, format=%s", format_str);
383
384      ret = gst_caps_merge (ret, gst_caps_from_string (caps_str));
385
386      g_free (caps_str);
387    }
388
389    CFRelease (attrs);
390  }
391
392  ret = gst_caps_simplify (ret);
393
394  gst_caps_set_simple (ret, "width", GST_TYPE_INT_RANGE, 0, G_MAXINT, "height",
395      GST_TYPE_INT_RANGE, 0, G_MAXINT, "framerate", GST_TYPE_FRACTION_RANGE, 0,
396      1, G_MAXINT, 1, NULL);
397  GST_DEBUG_OBJECT (av_sink, "returning caps %" GST_PTR_FORMAT, ret);
398
399  if (filter) {
400    tmp = gst_caps_intersect_full (ret, filter, GST_CAPS_INTERSECT_FIRST);
401    gst_caps_unref (ret);
402    ret = tmp;
403  }
404
405  CFRelease (formats);
406
407  return ret;
408}
409
410static gboolean
411gst_av_sample_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
412{
413  GstAVSampleVideoSink *av_sink;
414  gint width;
415  gint height;
416  gboolean ok;
417  gint par_n, par_d;
418  gint display_par_n, display_par_d;
419  guint display_ratio_num, display_ratio_den;
420  GstVideoInfo vinfo;
421  GstStructure *structure;
422  GstBufferPool *newpool, *oldpool;
423
424  GST_DEBUG_OBJECT (bsink, "set caps with %" GST_PTR_FORMAT, caps);
425
426  av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
427
428  ok = gst_video_info_from_caps (&vinfo, caps);
429  if (!ok)
430    return FALSE;
431
432  width = GST_VIDEO_INFO_WIDTH (&vinfo);
433  height = GST_VIDEO_INFO_HEIGHT (&vinfo);
434
435  par_n = GST_VIDEO_INFO_PAR_N (&vinfo);
436  par_d = GST_VIDEO_INFO_PAR_D (&vinfo);
437
438  if (!par_n)
439    par_n = 1;
440
441  display_par_n = 1;
442  display_par_d = 1;
443
444  ok = gst_video_calculate_display_ratio (&display_ratio_num,
445      &display_ratio_den, width, height, par_n, par_d, display_par_n,
446      display_par_d);
447
448  if (!ok)
449    return FALSE;
450
451  GST_TRACE_OBJECT (bsink, "PAR: %u/%u DAR:%u/%u", par_n, par_d, display_par_n,
452      display_par_d);
453
454  if (height % display_ratio_den == 0) {
455    GST_DEBUG_OBJECT (bsink, "keeping video height");
456    GST_VIDEO_SINK_WIDTH (av_sink) = (guint)
457        gst_util_uint64_scale_int (height, display_ratio_num,
458        display_ratio_den);
459    GST_VIDEO_SINK_HEIGHT (av_sink) = height;
460  } else if (width % display_ratio_num == 0) {
461    GST_DEBUG_OBJECT (bsink, "keeping video width");
462    GST_VIDEO_SINK_WIDTH (av_sink) = width;
463    GST_VIDEO_SINK_HEIGHT (av_sink) = (guint)
464        gst_util_uint64_scale_int (width, display_ratio_den, display_ratio_num);
465  } else {
466    GST_DEBUG_OBJECT (bsink, "approximating while keeping video height");
467    GST_VIDEO_SINK_WIDTH (av_sink) = (guint)
468        gst_util_uint64_scale_int (height, display_ratio_num,
469        display_ratio_den);
470    GST_VIDEO_SINK_HEIGHT (av_sink) = height;
471  }
472  GST_DEBUG_OBJECT (bsink, "scaling to %dx%d", GST_VIDEO_SINK_WIDTH (av_sink),
473      GST_VIDEO_SINK_HEIGHT (av_sink));
474
475  av_sink->info = vinfo;
476
477  newpool = gst_video_buffer_pool_new ();
478  structure = gst_buffer_pool_get_config (newpool);
479  gst_buffer_pool_config_set_params (structure, caps, vinfo.size, 2, 0);
480  gst_buffer_pool_set_config (newpool, structure);
481
482  oldpool = av_sink->pool;
483  /* we don't activate the pool yet, this will be done by downstream after it
484   * has configured the pool. If downstream does not want our pool we will
485   * activate it when we render into it */
486  av_sink->pool = newpool;
487
488  /* unref the old sink */
489  if (oldpool) {
490    /* we don't deactivate, some elements might still be using it, it will
491     * be deactivated when the last ref is gone */
492    gst_object_unref (oldpool);
493  }
494
495  return TRUE;
496}
497
498static void
499_unmap_planar_frame (GstVideoFrame * v_frame, const void * data, gsize dataSize,
500    gsize numberOfPlanes, const void *planeAddressed[])
501{
502  GST_TRACE ("freeing video frame %p", v_frame);
503
504  gst_video_frame_unmap (v_frame);
505  g_free (v_frame);
506}
507
508static void
509_unmap_frame (GstVideoFrame * v_frame, const void * data)
510{
511  GST_TRACE ("freeing video frame %p", v_frame);
512
513  gst_video_frame_unmap (v_frame);
514  g_free (v_frame);
515}
516
517/* with render lock */
518static gboolean
519_enqueue_sample (GstAVSampleVideoSink * av_sink, GstBuffer *buf)
520{
521  CVPixelBufferRef pbuf;
522  CMVideoFormatDescriptionRef v_format_desc;
523  GstVideoFrame *v_frame;
524  CMSampleTimingInfo sample_time;
525  __block CMSampleBufferRef sample_buf;
526  CFArrayRef sample_attachments;
527  gsize l, r, t, b;
528  gint i;
529
530  GST_TRACE_OBJECT (av_sink, "redisplay of size:%ux%u, window size:%ux%u",
531      GST_VIDEO_INFO_WIDTH (&av_sink->info),
532      GST_VIDEO_INFO_HEIGHT (&av_sink->info),
533      GST_VIDEO_SINK_WIDTH (av_sink),
534      GST_VIDEO_SINK_HEIGHT (av_sink));
535
536  v_frame = g_new0 (GstVideoFrame, 1);
537
538  if (!gst_video_frame_map (v_frame, &av_sink->info, buf, GST_MAP_READ)) {
539    GST_ERROR_OBJECT (av_sink, "Failed to map input video frame");
540    g_free (v_frame);
541    return FALSE;
542  }
543
544  if (GST_VIDEO_INFO_N_PLANES (&v_frame->info) == 1) {
545    /* single plane */
546    if (kCVReturnSuccess != CVPixelBufferCreateWithBytes (NULL,
547        GST_VIDEO_INFO_WIDTH (&v_frame->info),
548        GST_VIDEO_INFO_HEIGHT (&v_frame->info),
549        _cv_pixel_format_type_from_video_format (GST_VIDEO_INFO_FORMAT (&v_frame->info)),
550        v_frame->data[0], v_frame->info.stride[0],
551        (CVPixelBufferReleaseBytesCallback) _unmap_frame, v_frame, NULL,
552        &pbuf)) {
553      GST_ERROR_OBJECT (av_sink, "Error creating Core Video pixel buffer");
554      gst_video_frame_unmap (v_frame);
555      g_free (v_frame);
556      return FALSE;
557    }
558  } else {
559    /* multi-planar */
560    gsize widths[GST_VIDEO_MAX_PLANES] = { 0, };
561    gsize heights[GST_VIDEO_MAX_PLANES] = { 0, };
562    gsize strides[GST_VIDEO_MAX_PLANES] = { 0, };
563    gint i;
564
565    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&v_frame->info); i++) {
566      widths[i] = GST_VIDEO_INFO_COMP_WIDTH (&v_frame->info, i);
567      heights[i] = GST_VIDEO_INFO_COMP_HEIGHT (&v_frame->info, i);
568      strides[i] = GST_VIDEO_INFO_COMP_STRIDE (&v_frame->info, i);
569    }
570
571    if (kCVReturnSuccess != CVPixelBufferCreateWithPlanarBytes (NULL,
572        GST_VIDEO_INFO_WIDTH (&v_frame->info),
573        GST_VIDEO_INFO_HEIGHT (&v_frame->info),
574        _cv_pixel_format_type_from_video_format (GST_VIDEO_INFO_FORMAT (&v_frame->info)),
575         /* have to put something for these two parameters otherwise
576          * the callback is not called resulting in a big leak */
577        v_frame, v_frame->info.size,
578        GST_VIDEO_INFO_N_PLANES (&v_frame->info), v_frame->data,
579        widths, heights, strides,
580        (CVPixelBufferReleasePlanarBytesCallback) _unmap_planar_frame,
581        v_frame, NULL, &pbuf)) {
582      GST_ERROR_OBJECT (av_sink, "Error creating Core Video pixel buffer");
583      gst_video_frame_unmap (v_frame);
584      g_free (v_frame);
585      return FALSE;
586    }
587  }
588
589  CVPixelBufferLockBaseAddress (pbuf, kCVPixelBufferLock_ReadOnly);
590
591  CVPixelBufferGetExtendedPixels (pbuf, &l, &r, &t, &b);
592
593  GST_TRACE_OBJECT (av_sink, "CVPixelBuffer n_planes %u width %u height %u"
594      " data size %" G_GSIZE_FORMAT " extra pixels l %u r %u t %u b %u",
595      (guint) CVPixelBufferGetPlaneCount (pbuf),
596      (guint) CVPixelBufferGetWidth (pbuf),
597      (guint) CVPixelBufferGetHeight (pbuf),
598      CVPixelBufferGetDataSize (pbuf),
599      (guint) l, (guint) r, (guint) t, (guint) b);
600
601  GST_TRACE_OBJECT (av_sink, "GstVideoFrame n_planes %u width %u height %u"
602      " data size %"G_GSIZE_FORMAT " extra pixels l %u r %u t %u b %u",
603      GST_VIDEO_INFO_N_PLANES (&v_frame->info),
604      GST_VIDEO_INFO_WIDTH (&v_frame->info),
605      GST_VIDEO_INFO_HEIGHT (&v_frame->info),
606      v_frame->info.size, 0, 0, 0, 0);
607
608  if (GST_VIDEO_INFO_N_PLANES (&v_frame->info) > 1) {
609    for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&v_frame->info); i++) {
610      GST_TRACE_OBJECT (av_sink, "plane %i CVPixelBuffer width %u height %u "
611          "stride %u data %p", i,
612          (guint) CVPixelBufferGetWidthOfPlane (pbuf, i),
613          (guint) CVPixelBufferGetHeightOfPlane (pbuf, i),
614          (guint) CVPixelBufferGetBytesPerRowOfPlane (pbuf, i),
615          CVPixelBufferGetBaseAddressOfPlane (pbuf, i));
616      GST_TRACE_OBJECT (av_sink, "plane %i GstVideoFrame width %u height %u "
617          "stride %u data %p", i,
618          GST_VIDEO_INFO_COMP_WIDTH (&v_frame->info, i),
619          GST_VIDEO_INFO_COMP_HEIGHT (&v_frame->info, i),
620          GST_VIDEO_INFO_COMP_STRIDE (&v_frame->info, i),
621          CVPixelBufferGetBaseAddressOfPlane (pbuf, i));
622    }
623  } else {
624    GST_TRACE_OBJECT (av_sink, "CVPixelBuffer attrs stride %u data %p",
625      (guint) CVPixelBufferGetBytesPerRow (pbuf),
626      CVPixelBufferGetBaseAddress (pbuf));
627    GST_TRACE_OBJECT (av_sink, "GstVideoFrame attrs stride %u data %p",
628        v_frame->info.stride[0], v_frame->data[0]);
629  }
630
631  CVPixelBufferUnlockBaseAddress (pbuf, kCVPixelBufferLock_ReadOnly);
632
633  if (0 != CMVideoFormatDescriptionCreateForImageBuffer (kCFAllocatorDefault,
634        pbuf, &v_format_desc)) {
635    GST_ERROR_OBJECT (av_sink, "Failed to retrieve video format from "
636        "pixel buffer");
637    CFRelease (pbuf);
638    return FALSE;
639  }
640
641  sample_time.duration = CMTimeMake (GST_BUFFER_DURATION (buf), GST_SECOND);
642  sample_time.presentationTimeStamp = CMTimeMake (GST_BUFFER_PTS (buf), GST_SECOND);
643  sample_time.decodeTimeStamp = kCMTimeInvalid;
644
645  if (0 != CMSampleBufferCreateForImageBuffer (kCFAllocatorDefault, pbuf, TRUE,
646        NULL, NULL, v_format_desc, &sample_time, &sample_buf)) {
647    GST_ERROR_OBJECT (av_sink, "Failed to create CMSampleBuffer from "
648        "CVImageBuffer");
649    CFRelease (v_format_desc);
650    CFRelease (pbuf);
651    return FALSE;
652  }
653  CFRelease (v_format_desc);
654
655  sample_attachments = CMSampleBufferGetSampleAttachmentsArray (sample_buf, TRUE);
656  for (i = 0; i < CFArrayGetCount (sample_attachments); i++) {
657    CFMutableDictionaryRef attachments =
658       (CFMutableDictionaryRef) CFArrayGetValueAtIndex (sample_attachments, i);
659    /* Until we slave the CoreMedia clock, just display everything ASAP */
660    CFDictionarySetValue (attachments, kCMSampleAttachmentKey_DisplayImmediately,
661        kCFBooleanTrue);
662  }
663
664  AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
665  if (av_sink->keep_aspect_ratio)
666    layer.videoGravity = AVLayerVideoGravityResizeAspect;
667  else
668    layer.videoGravity = AVLayerVideoGravityResize;
669  [layer enqueueSampleBuffer:sample_buf];
670
671  CFRelease (pbuf);
672  CFRelease (sample_buf);
673
674  return TRUE;
675}
676
677static void
678_request_data (GstAVSampleVideoSink * av_sink)
679{
680  av_sink->layer_requesting_data = TRUE;
681
682  AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
683  [layer requestMediaDataWhenReadyOnQueue:
684        dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
685        usingBlock:^{
686    while (TRUE) {
687      /* don't needlessly fill up avsamplebufferdisplaylayer's queue.
688       * This also allows us to skip displaying late frames */
689      if (!layer.readyForMoreMediaData)
690        break;
691
692      g_mutex_lock (&av_sink->render_lock);
693
694      if (!av_sink->buffer || av_sink->render_flow_return != GST_FLOW_OK) {
695        _stop_requesting_data (av_sink);
696        g_mutex_unlock (&av_sink->render_lock);
697        break;
698      }
699
700      if (!_enqueue_sample (av_sink, av_sink->buffer)) {
701        gst_buffer_unref (av_sink->buffer);
702        av_sink->buffer = NULL;
703        av_sink->render_flow_return = GST_FLOW_ERROR;
704        g_mutex_unlock (&av_sink->render_lock);
705        break;
706      }
707
708      gst_buffer_unref (av_sink->buffer);
709      av_sink->buffer = NULL;
710      av_sink->render_flow_return = GST_FLOW_OK;
711      g_mutex_unlock (&av_sink->render_lock);
712    }
713  }];
714}
715
716static GstFlowReturn
717gst_av_sample_video_sink_prepare (GstBaseSink * bsink, GstBuffer * buf)
718{
719  GstAVSampleVideoSink *av_sink;
720
721  av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
722
723  GST_LOG_OBJECT (bsink, "preparing buffer:%p", buf);
724
725  if (GST_VIDEO_SINK_WIDTH (av_sink) < 1 ||
726      GST_VIDEO_SINK_HEIGHT (av_sink) < 1) {
727    return GST_FLOW_NOT_NEGOTIATED;
728  }
729
730  return GST_FLOW_OK;
731}
732
733static GstFlowReturn
734gst_av_sample_video_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
735{
736  GstAVSampleVideoSink *av_sink;
737  GstFlowReturn ret;
738
739  GST_TRACE_OBJECT (vsink, "rendering buffer:%p", buf);
740
741  av_sink = GST_AV_SAMPLE_VIDEO_SINK (vsink);
742
743  g_mutex_lock (&av_sink->render_lock);
744  if (av_sink->buffer)
745    gst_buffer_unref (av_sink->buffer);
746  av_sink->buffer = gst_buffer_ref (buf);
747  ret = av_sink->render_flow_return;
748
749  if (!av_sink->layer_requesting_data)
750    _request_data (av_sink);
751  g_mutex_unlock (&av_sink->render_lock);
752
753#if defined(MAC_OS_X_VERSION_MAX_ALLOWED) && \
754    MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \
755    defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \
756    MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
757    AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
758  if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
759    GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s",
760        [[[layer error] description] UTF8String]);
761    return GST_FLOW_ERROR;
762  }
763#endif
764
765  return ret;
766}
767
768static gboolean
769gst_av_sample_video_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
770{
771  GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
772  GstBufferPool *pool;
773  GstStructure *config;
774  GstCaps *caps;
775  guint size;
776  gboolean need_pool;
777
778  gst_query_parse_allocation (query, &caps, &need_pool);
779
780  if (caps == NULL)
781    goto no_caps;
782
783  /* FIXME re-using buffer pool breaks renegotiation */
784  if ((pool = av_sink->pool))
785    gst_object_ref (pool);
786
787  if (pool != NULL) {
788    GstCaps *pcaps;
789
790    /* we had a pool, check caps */
791    GST_DEBUG_OBJECT (av_sink, "check existing pool caps");
792    config = gst_buffer_pool_get_config (pool);
793    gst_buffer_pool_config_get_params (config, &pcaps, &size, NULL, NULL);
794
795    if (!gst_caps_is_equal (caps, pcaps)) {
796      GST_DEBUG_OBJECT (av_sink, "pool has different caps");
797      /* different caps, we can't use this pool */
798      gst_object_unref (pool);
799      pool = NULL;
800    }
801    gst_structure_free (config);
802  } else {
803    GstVideoInfo info;
804
805    if (!gst_video_info_from_caps (&info, caps))
806      goto invalid_caps;
807
808    /* the normal size of a frame */
809    size = info.size;
810  }
811
812  if (pool == NULL && need_pool) {
813    GST_DEBUG_OBJECT (av_sink, "create new pool");
814    pool = gst_video_buffer_pool_new ();
815
816    config = gst_buffer_pool_get_config (pool);
817    gst_buffer_pool_config_set_params (config, caps, size, 0, 0);
818    if (!gst_buffer_pool_set_config (pool, config))
819      goto config_failed;
820  }
821  /* we need at least 2 buffer because we hold on to the last one */
822  gst_query_add_allocation_pool (query, pool, size, 2, 0);
823  if (pool)
824    gst_object_unref (pool);
825
826  /* we also support various metadata */
827  gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, 0);
828
829  return TRUE;
830
831  /* ERRORS */
832no_caps:
833  {
834    GST_DEBUG_OBJECT (bsink, "no caps specified");
835    return FALSE;
836  }
837invalid_caps:
838  {
839    GST_DEBUG_OBJECT (bsink, "invalid caps specified");
840    return FALSE;
841  }
842config_failed:
843  {
844    GST_DEBUG_OBJECT (bsink, "failed setting config");
845    return FALSE;
846  }
847}
848