• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (C) 2010 Ole André Vadla Ravnås <oleavr@soundrop.com>
3 * Copyright (C) 2016 Alessandro Decina <twi@centricular.com>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 * Library General Public License for more details.
14 *
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
19 */
20
21#ifdef HAVE_CONFIG_H
22#  include "config.h"
23#endif
24
25#include "avfvideosrc.h"
26#include "glcontexthelper.h"
27
28#import <AVFoundation/AVFoundation.h>
29#if !HAVE_IOS
30#import <AppKit/AppKit.h>
31#endif
32#include <gst/video/video.h>
33#include <gst/gl/gstglcontext.h>
34#include "coremediabuffer.h"
35#include "videotexturecache-gl.h"
36
37#define DEFAULT_DEVICE_INDEX  -1
38#define DEFAULT_POSITION      GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT
39#define DEFAULT_ORIENTATION   GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT
40#define DEFAULT_DEVICE_TYPE   GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT
41#define DEFAULT_DO_STATS      FALSE
42
43#define DEVICE_FPS_N          25
44#define DEVICE_FPS_D          1
45
46#define BUFFER_QUEUE_SIZE     2
47
48GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
49#define GST_CAT_DEFAULT gst_avf_video_src_debug
50
51static GstVideoFormat get_gst_video_format(NSNumber *pixel_format);
52static CMVideoDimensions
53get_oriented_dimensions(GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions);
54
55static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
56    GST_PAD_SRC,
57    GST_PAD_ALWAYS,
58    GST_STATIC_CAPS (
59#if !HAVE_IOS
60        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
61        (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
62            "UYVY") ", "
63        "texture-target = " GST_GL_TEXTURE_TARGET_RECTANGLE_STR ";"
64#else
65        GST_VIDEO_CAPS_MAKE_WITH_FEATURES
66        (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
67            "NV12") ", "
68        "texture-target = " GST_GL_TEXTURE_TARGET_2D_STR "; "
69#endif
70        "video/x-raw, "
71        "format = (string) { NV12, UYVY, YUY2 }, "
72        "framerate = " GST_VIDEO_FPS_RANGE ", "
73        "width = " GST_VIDEO_SIZE_RANGE ", "
74        "height = " GST_VIDEO_SIZE_RANGE "; "
75
76        "video/x-raw, "
77        "format = (string) BGRA, "
78        "framerate = " GST_VIDEO_FPS_RANGE ", "
79        "width = " GST_VIDEO_SIZE_RANGE ", "
80        "height = " GST_VIDEO_SIZE_RANGE "; "
81));
82
83typedef enum _QueueState {
84  NO_BUFFERS = 1,
85  HAS_BUFFER_OR_STOP_REQUEST,
86} QueueState;
87
88#define gst_avf_video_src_parent_class parent_class
89G_DEFINE_TYPE (GstAVFVideoSrc, gst_avf_video_src, GST_TYPE_PUSH_SRC);
90
91#define GST_TYPE_AVF_VIDEO_SOURCE_POSITION (gst_avf_video_source_position_get_type ())
92static GType
93gst_avf_video_source_position_get_type (void)
94{
95  static GType avf_video_source_position_type = 0;
96
97  if (!avf_video_source_position_type) {
98    static GEnumValue position_types[] = {
99      { GST_AVF_VIDEO_SOURCE_POSITION_FRONT, "Front-facing camera", "front" },
100      { GST_AVF_VIDEO_SOURCE_POSITION_BACK,  "Back-facing camera", "back"  },
101      { GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT,  "Default", "default"  },
102      { 0, NULL, NULL },
103    };
104
105    avf_video_source_position_type =
106    g_enum_register_static ("GstAVFVideoSourcePosition",
107                            position_types);
108  }
109
110  return avf_video_source_position_type;
111}
112
113#define GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION (gst_avf_video_source_orientation_get_type ())
114static GType
115gst_avf_video_source_orientation_get_type (void)
116{
117  static GType avf_video_source_orientation_type = 0;
118
119  if (!avf_video_source_orientation_type) {
120    static GEnumValue orientation_types[] = {
121      { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT, "Indicates that video should be oriented vertically, top at the top.", "portrait" },
122      { GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN, "Indicates that video should be oriented vertically, top at the bottom.", "portrat-upside-down" },
123      { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT, "Indicates that video should be oriented horizontally, top on the left.", "landscape-right" },
124      { GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT, "Indicates that video should be oriented horizontally, top on the right.", "landscape-left" },
125      { GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT, "Default", "default" },
126      { 0, NULL, NULL },
127    };
128
129    avf_video_source_orientation_type =
130    g_enum_register_static ("GstAVFVideoSourceOrientation",
131                            orientation_types);
132  }
133
134  return avf_video_source_orientation_type;
135}
136
137#define GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE (gst_avf_video_source_device_type_get_type ())
138static GType
139gst_avf_video_source_device_type_get_type (void)
140{
141  static GType avf_video_source_device_type_type = 0;
142
143  if (!avf_video_source_device_type_type) {
144    static GEnumValue device_type_types[] = {
145      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA, "A built-in wide angle camera. These devices are suitable for general purpose use.", "wide-angle" },
146      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA, "A built-in camera device with a longer focal length than a wide-angle camera.", "telephoto" },
147      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA, "A dual camera device, combining built-in wide-angle and telephoto cameras that work together as a single capture device.", "dual" },
148      { GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT, "Default", "default" },
149      { 0, NULL, NULL },
150    };
151
152    avf_video_source_device_type_type =
153    g_enum_register_static ("GstAVFVideoSourceDeviceType",
154                            device_type_types);
155  }
156
157  return avf_video_source_device_type_type;
158}
159
160@interface GstAVFVideoSrcImpl : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
161  GstElement *element;
162  GstBaseSrc *baseSrc;
163  GstPushSrc *pushSrc;
164
165  gint deviceIndex;
166  const gchar *deviceName;
167  GstAVFVideoSourcePosition position;
168  GstAVFVideoSourceOrientation orientation;
169  GstAVFVideoSourceDeviceType deviceType;
170  BOOL doStats;
171
172  AVCaptureSession *session;
173  AVCaptureInput *input;
174  AVCaptureVideoDataOutput *output;
175  AVCaptureDevice *device;
176  AVCaptureConnection *connection;
177  CMClockRef inputClock;
178
179  dispatch_queue_t mainQueue;
180  dispatch_queue_t workerQueue;
181  NSConditionLock *bufQueueLock;
182  NSMutableArray *bufQueue;
183  BOOL stopRequest;
184
185  GstCaps *caps;
186  GstVideoFormat format;
187  gint width, height;
188  GstClockTime latency;
189  guint64 offset;
190
191  GstClockTime lastSampling;
192  guint count;
193  gint fps;
194  BOOL captureScreen;
195  BOOL captureScreenCursor;
196  BOOL captureScreenMouseClicks;
197
198  BOOL useVideoMeta;
199  GstGLContextHelper *ctxh;
200  GstVideoTextureCache *textureCache;
201}
202
203- (id)init;
204- (id)initWithSrc:(GstPushSrc *)src;
205- (void)finalize;
206
207@property int deviceIndex;
208@property const gchar *deviceName;
209@property GstAVFVideoSourcePosition position;
210@property GstAVFVideoSourceOrientation orientation;
211@property GstAVFVideoSourceDeviceType deviceType;
212@property BOOL doStats;
213@property int fps;
214@property BOOL captureScreen;
215@property BOOL captureScreenCursor;
216@property BOOL captureScreenMouseClicks;
217
218- (BOOL)openScreenInput;
219- (BOOL)openDeviceInput;
220- (BOOL)openDevice;
221- (void)closeDevice;
222- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format;
223#if !HAVE_IOS
224- (CGDirectDisplayID)getDisplayIdFromDeviceIndex;
225- (float)getScaleFactorFromDeviceIndex;
226#endif
227- (GstCaps *)getDeviceCaps;
228- (BOOL)setDeviceCaps:(GstVideoInfo *)info;
229- (BOOL)getSessionPresetCaps:(GstCaps *)result;
230- (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
231- (GstCaps *)getCaps;
232- (BOOL)setCaps:(GstCaps *)new_caps;
233- (BOOL)start;
234- (BOOL)stop;
235- (BOOL)unlock;
236- (BOOL)unlockStop;
237- (BOOL)query:(GstQuery *)query;
238- (void)setContext:(GstContext *)context;
239- (GstStateChangeReturn)changeState:(GstStateChange)transition;
240- (GstFlowReturn)create:(GstBuffer **)buf;
241- (GstCaps *)fixate:(GstCaps *)caps;
242- (BOOL)decideAllocation:(GstQuery *)query;
243- (void)updateStatistics;
244- (void)captureOutput:(AVCaptureOutput *)captureOutput
245didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
246       fromConnection:(AVCaptureConnection *)connection;
247
248@end
249
250#if HAVE_IOS
251
252static AVCaptureDeviceType GstAVFVideoSourceDeviceType2AVCaptureDeviceType(GstAVFVideoSourceDeviceType deviceType) {
253  switch (deviceType) {
254    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_WIDE_ANGLE_CAMERA:
255      return AVCaptureDeviceTypeBuiltInWideAngleCamera;
256    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_TELEPHOTO_CAMERA:
257      return AVCaptureDeviceTypeBuiltInTelephotoCamera;
258    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_BUILT_IN_DUAL_CAMERA:
259      return AVCaptureDeviceTypeBuiltInDuoCamera;
260    case GST_AVF_VIDEO_SOURCE_DEVICE_TYPE_DEFAULT:
261      g_assert_not_reached();
262  }
263}
264
265static AVCaptureDevicePosition GstAVFVideoSourcePosition2AVCaptureDevicePosition(GstAVFVideoSourcePosition position) {
266  switch (position) {
267    case GST_AVF_VIDEO_SOURCE_POSITION_FRONT:
268      return AVCaptureDevicePositionFront;
269    case GST_AVF_VIDEO_SOURCE_POSITION_BACK:
270      return AVCaptureDevicePositionBack;
271    case GST_AVF_VIDEO_SOURCE_POSITION_DEFAULT:
272      g_assert_not_reached();
273  }
274
275}
276
277static AVCaptureVideoOrientation GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(GstAVFVideoSourceOrientation orientation) {
278  switch (orientation) {
279    case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT:
280      return AVCaptureVideoOrientationPortrait;
281    case GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN:
282      return AVCaptureVideoOrientationPortraitUpsideDown;
283    case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_LEFT:
284      return AVCaptureVideoOrientationLandscapeLeft;
285    case GST_AVF_VIDEO_SOURCE_ORIENTATION_LANDSCAPE_RIGHT:
286      return AVCaptureVideoOrientationLandscapeRight;
287    case GST_AVF_VIDEO_SOURCE_ORIENTATION_DEFAULT:
288      g_assert_not_reached();
289  }
290}
291
292#endif
293
294@implementation GstAVFVideoSrcImpl
295
296@synthesize deviceIndex, deviceName, position, orientation, deviceType, doStats,
297    fps, captureScreen, captureScreenCursor, captureScreenMouseClicks;
298
299- (id)init
300{
301  return [self initWithSrc:NULL];
302}
303
304- (id)initWithSrc:(GstPushSrc *)src
305{
306  if ((self = [super init])) {
307    element = GST_ELEMENT_CAST (src);
308    baseSrc = GST_BASE_SRC_CAST (src);
309    pushSrc = src;
310
311    deviceIndex = DEFAULT_DEVICE_INDEX;
312    deviceName = NULL;
313    position = DEFAULT_POSITION;
314    orientation = DEFAULT_ORIENTATION;
315    deviceType = DEFAULT_DEVICE_TYPE;
316    captureScreen = NO;
317    captureScreenCursor = NO;
318    captureScreenMouseClicks = NO;
319    useVideoMeta = NO;
320    textureCache = NULL;
321    ctxh = gst_gl_context_helper_new (element);
322    mainQueue =
323        dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.main", NULL);
324    workerQueue =
325        dispatch_queue_create ("org.freedesktop.gstreamer.avfvideosrc.output", NULL);
326
327    gst_base_src_set_live (baseSrc, TRUE);
328    gst_base_src_set_format (baseSrc, GST_FORMAT_TIME);
329  }
330
331  return self;
332}
333
334- (void)finalize
335{
336  mainQueue = NULL;
337  workerQueue = NULL;
338}
339
340- (BOOL)openDeviceInput
341{
342  NSString *mediaType = AVMediaTypeVideo;
343  NSError *err;
344
345  if (deviceIndex == DEFAULT_DEVICE_INDEX) {
346#ifdef HAVE_IOS
347    if (deviceType != DEFAULT_DEVICE_TYPE && position != DEFAULT_POSITION) {
348      device = [AVCaptureDevice
349                defaultDeviceWithDeviceType:GstAVFVideoSourceDeviceType2AVCaptureDeviceType(deviceType)
350                mediaType:mediaType
351                position:GstAVFVideoSourcePosition2AVCaptureDevicePosition(position)];
352    } else {
353      device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
354    }
355#else
356      device = [AVCaptureDevice defaultDeviceWithMediaType:mediaType];
357#endif
358    if (device == nil) {
359      GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
360                          ("No video capture devices found"), (NULL));
361      return NO;
362    }
363  } else { // deviceIndex takes priority over position and deviceType
364    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
365    if (deviceIndex >= [devices count]) {
366      GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
367                          ("Invalid video capture device index"), (NULL));
368      return NO;
369    }
370    device = [devices objectAtIndex:deviceIndex];
371  }
372  g_assert (device != nil);
373
374  deviceName = [[device localizedName] UTF8String];
375  GST_INFO ("Opening '%s'", deviceName);
376
377  input = [AVCaptureDeviceInput deviceInputWithDevice:device
378                                                error:&err];
379  if (input == nil) {
380    GST_ELEMENT_ERROR (element, RESOURCE, BUSY,
381        ("Failed to open device: %s",
382        [[err localizedDescription] UTF8String]),
383        (NULL));
384    device = nil;
385    return NO;
386  }
387  return YES;
388}
389
390- (BOOL)openScreenInput
391{
392#if HAVE_IOS
393  return NO;
394#else
395  CGDirectDisplayID displayId;
396
397  GST_DEBUG_OBJECT (element, "Opening screen input");
398
399  displayId = [self getDisplayIdFromDeviceIndex];
400  if (displayId == 0)
401    return NO;
402
403  AVCaptureScreenInput *screenInput =
404      [[AVCaptureScreenInput alloc] initWithDisplayID:displayId];
405
406
407  @try {
408    [screenInput setValue:[NSNumber numberWithBool:captureScreenCursor]
409                 forKey:@"capturesCursor"];
410
411  } @catch (NSException *exception) {
412    if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
413      GST_WARNING ("An unexpected error occurred: %s",
414                   [[exception reason] UTF8String]);
415    }
416    GST_WARNING ("Capturing cursor is only supported in OS X >= 10.8");
417  }
418  screenInput.capturesMouseClicks = captureScreenMouseClicks;
419  input = screenInput;
420  return YES;
421#endif
422}
423
424- (BOOL)openDevice
425{
426  BOOL success = NO, *successPtr = &success;
427
428  GST_DEBUG_OBJECT (element, "Opening device");
429
430  // Since Mojave, permissions are now supposed to be explicitly granted
431  // before performing anything on a device
432  if (@available(macOS 10.14, *)) {
433    if (captureScreen)
434      goto checked;
435
436    // Check if permission has already been granted (or denied)
437    AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
438    switch (authStatus) {
439      case AVAuthorizationStatusDenied:
440        // The user has explicitly denied permission for media capture.
441        GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
442          ("Device video access permission has been explicitly denied before"), ("Authorization status: %d", (int)authStatus));
443          return success;
444      case AVAuthorizationStatusRestricted:
445        // The user is not allowed to access media capture devices.
446        GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
447          ("Device video access permission cannot be granted by the user"), ("Authorization status: %d", (int)authStatus));
448        return success;
449      case AVAuthorizationStatusAuthorized:
450        // The user has explicitly granted permission for media capture,
451        // or explicit user permission is not necessary for the media type in question.
452        GST_DEBUG_OBJECT (element, "Device video access permission has already been granted");
453        break;
454      case AVAuthorizationStatusNotDetermined:
455        ;
456        // Explicit user permission is required for media capture,
457        // but the user has not yet granted or denied such permission.
458        dispatch_semaphore_t sema = dispatch_semaphore_create(0);
459        dispatch_sync (mainQueue, ^{
460          [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
461            GST_DEBUG_OBJECT (element, "Device video access permission %s", granted ? "granted" : "not granted");
462            dispatch_semaphore_signal(sema);
463          }];
464        });
465        // Block on dialog being answered
466        if (![NSThread isMainThread]) {
467            dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
468        } else {
469            while (dispatch_semaphore_wait(sema, DISPATCH_TIME_NOW)) {
470                [[NSRunLoop currentRunLoop] runMode:NSDefaultRunLoopMode beforeDate:[NSDate dateWithTimeIntervalSinceNow:0]];
471            }
472        }
473        // Check if permission has been granted
474        AVAuthorizationStatus videoAuthorizationStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
475        if (videoAuthorizationStatus != AVAuthorizationStatusAuthorized) {
476          GST_ELEMENT_ERROR (element, RESOURCE, NOT_AUTHORIZED,
477            ("Device video access permission has just been denied"), ("Authorization status: %d", (int)videoAuthorizationStatus));
478          return success;
479        }
480    }
481  }
482
483checked:
484  dispatch_sync (mainQueue, ^{
485    BOOL ret;
486
487    if (captureScreen)
488      ret = [self openScreenInput];
489    else
490      ret = [self openDeviceInput];
491
492    if (!ret)
493      return;
494
495    output = [[AVCaptureVideoDataOutput alloc] init];
496    [output setSampleBufferDelegate:self
497                              queue:workerQueue];
498    output.alwaysDiscardsLateVideoFrames = YES;
499    output.videoSettings = nil; /* device native format */
500
501    session = [[AVCaptureSession alloc] init];
502    [session addInput:input];
503    [session addOutput:output];
504
505    /* retained by session */
506    connection = [[output connections] firstObject];
507#ifdef HAVE_IOS
508    if (orientation != DEFAULT_ORIENTATION)
509      connection.videoOrientation = GstAVFVideoSourceOrientation2AVCaptureVideoOrientation(orientation);
510#endif
511    inputClock = ((AVCaptureInputPort *)connection.inputPorts[0]).clock;
512
513    *successPtr = YES;
514  });
515
516  GST_DEBUG_OBJECT (element, "Opening device %s", success ? "succeed" : "failed");
517
518  return success;
519}
520
521- (void)closeDevice
522{
523  GST_DEBUG_OBJECT (element, "Closing device");
524
525  dispatch_sync (mainQueue, ^{
526    g_assert (![session isRunning]);
527
528    connection = nil;
529    inputClock = nil;
530
531    [session removeInput:input];
532    [session removeOutput:output];
533
534    session = nil;
535
536    input = nil;
537
538    output = nil;
539
540    if (!captureScreen) {
541      device = nil;
542    }
543
544    if (caps)
545      gst_caps_unref (caps);
546    caps = NULL;
547  });
548}
549
550#define GST_AVF_CAPS_NEW(format, w, h, fps_n, fps_d)                  \
551    (gst_caps_new_simple ("video/x-raw",                              \
552        "width", G_TYPE_INT, w,                                       \
553        "height", G_TYPE_INT, h,                                      \
554        "format", G_TYPE_STRING, gst_video_format_to_string (format), \
555        "framerate", GST_TYPE_FRACTION, (fps_n), (fps_d),             \
556        NULL))
557
558#define GST_AVF_FPS_RANGE_CAPS_NEW(format, w, h, min_fps_n, min_fps_d, max_fps_n, max_fps_d) \
559    (gst_caps_new_simple ("video/x-raw",                              \
560        "width", G_TYPE_INT, w,                                       \
561        "height", G_TYPE_INT, h,                                      \
562        "format", G_TYPE_STRING, gst_video_format_to_string (format), \
563        "framerate", GST_TYPE_FRACTION_RANGE, (min_fps_n), (min_fps_d), (max_fps_n), (max_fps_d), \
564        NULL))
565
566- (GstVideoFormat)getGstVideoFormat:(NSNumber *)pixel_format
567{
568  GstVideoFormat gst_format = get_gst_video_format(pixel_format);
569  if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) {
570    GST_LOG_OBJECT (element, "Pixel format %s is not handled by avfvideosrc",
571        [[pixel_format stringValue] UTF8String]);
572  }
573  return gst_format;
574}
575
576#if !HAVE_IOS
577- (CGDirectDisplayID)getDisplayIdFromDeviceIndex
578{
579  NSDictionary *description;
580  NSNumber *displayId;
581  NSArray *screens = [NSScreen screens];
582
583  if (deviceIndex == DEFAULT_DEVICE_INDEX)
584    return kCGDirectMainDisplay;
585  if (deviceIndex >= [screens count]) {
586    GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
587                        ("Invalid screen capture device index"), (NULL));
588    return 0;
589  }
590  description = [[screens objectAtIndex:deviceIndex] deviceDescription];
591  displayId = [description objectForKey:@"NSScreenNumber"];
592  return [displayId unsignedIntegerValue];
593}
594
595- (float)getScaleFactorFromDeviceIndex
596{
597  NSArray *screens = [NSScreen screens];
598
599  if (deviceIndex == DEFAULT_DEVICE_INDEX)
600    return [[NSScreen mainScreen] backingScaleFactor];
601  if (deviceIndex >= [screens count]) {
602    GST_ELEMENT_ERROR (element, RESOURCE, NOT_FOUND,
603                        ("Invalid screen capture device index"), (NULL));
604    return 1.0;
605  }
606  return [[screens objectAtIndex:deviceIndex] backingScaleFactor];
607}
608#endif
609
610
611- (CMVideoDimensions)orientedDimensions:(CMVideoDimensions)dimensions
612{
613  return get_oriented_dimensions(orientation, dimensions);
614}
615
616- (GstCaps *)getDeviceCaps
617{
618  GST_DEBUG_OBJECT (element, "Getting device caps");
619  GstCaps *device_caps = gst_av_capture_device_get_caps (device, output, orientation);
620  GST_DEBUG_OBJECT (element, "Device returned the following caps %" GST_PTR_FORMAT, device_caps);
621
622  return device_caps;
623}
624
625- (BOOL)setDeviceCaps:(GstVideoInfo *)info
626{
627  double framerate;
628  gboolean found_format = FALSE, found_framerate = FALSE;
629  NSArray *formats = [device valueForKey:@"formats"];
630  gst_util_fraction_to_double (info->fps_n, info->fps_d, &framerate);
631
632  GST_DEBUG_OBJECT (element, "Setting device caps");
633
634  if ([device lockForConfiguration:NULL] == YES) {
635    for (NSObject *f in formats) {
636      CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
637      CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
638      dimensions = [self orientedDimensions:dimensions];
639      if (dimensions.width == info->width && dimensions.height == info->height) {
640        found_format = TRUE;
641        [device setValue:f forKey:@"activeFormat"];
642        for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
643          gdouble min_frame_rate, max_frame_rate;
644
645          [[rate valueForKey:@"minFrameRate"] getValue:&min_frame_rate];
646          [[rate valueForKey:@"maxFrameRate"] getValue:&max_frame_rate];
647          if ((framerate >= min_frame_rate - 0.00001) &&
648              (framerate <= max_frame_rate + 0.00001)) {
649            NSValue *frame_duration_value;
650            found_framerate = TRUE;
651            if (min_frame_rate == max_frame_rate) {
652              /* on mac we get tight ranges and an exception is raised if the
653               * frame duration doesn't match the one reported in the range to
654               * the last decimal point
655               */
656              frame_duration_value = [rate valueForKey:@"minFrameDuration"];
657            } else {
658              // Invert fps_n and fps_d to get frame duration value and timescale (or numerator and denominator)
659              frame_duration_value = [NSValue valueWithCMTime:CMTimeMake (info->fps_d, info->fps_n)];
660            }
661            [device setValue:frame_duration_value forKey:@"activeVideoMinFrameDuration"];
662            @try {
663              /* Only available on OSX >= 10.8 and iOS >= 7.0 */
664              [device setValue:frame_duration_value forKey:@"activeVideoMaxFrameDuration"];
665            } @catch (NSException *exception) {
666              if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
667                GST_WARNING ("An unexcepted error occurred: %s",
668                              [exception.reason UTF8String]);
669              }
670            }
671            break;
672          }
673        }
674
675        if (found_framerate) {
676          break;
677        }
678      }
679    }
680    if (!found_format) {
681      GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
682      return NO;
683    }
684    if (!found_framerate) {
685      GST_WARNING ("Unsupported capture framerate %d/%d", info->fps_n, info->fps_d);
686      return NO;
687    }
688  } else {
689    GST_WARNING ("Couldn't lock device for configuration");
690    return NO;
691  }
692  return YES;
693}
694
695- (BOOL)getSessionPresetCaps:(GstCaps *)result
696{
697  NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
698  for (NSNumber *pixel_format in pixel_formats) {
699    GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
700    if (gst_format == GST_VIDEO_FORMAT_UNKNOWN)
701      continue;
702
703#if HAVE_IOS
704    if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
705      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1920, 1080, DEVICE_FPS_N, DEVICE_FPS_D));
706#endif
707    if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
708      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 1280, 720, DEVICE_FPS_N, DEVICE_FPS_D));
709    if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
710      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 640, 480, DEVICE_FPS_N, DEVICE_FPS_D));
711    if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
712      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 480, 360, DEVICE_FPS_N, DEVICE_FPS_D));
713    if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
714      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 352, 288, DEVICE_FPS_N, DEVICE_FPS_D));
715    if ([session canSetSessionPreset:AVCaptureSessionPresetLow])
716      gst_caps_append (result, GST_AVF_CAPS_NEW (gst_format, 192, 144, DEVICE_FPS_N, DEVICE_FPS_D));
717  }
718
719  GST_LOG_OBJECT (element, "Session presets returned the following caps %" GST_PTR_FORMAT, result);
720
721  return YES;
722}
723
724- (BOOL)setSessionPresetCaps:(GstVideoInfo *)info;
725{
726  GST_DEBUG_OBJECT (element, "Setting session presset caps");
727
728  if ([device lockForConfiguration:NULL] != YES) {
729    GST_WARNING ("Couldn't lock device for configuration");
730    return NO;
731  }
732
733  switch (info->width) {
734  case 192:
735    session.sessionPreset = AVCaptureSessionPresetLow;
736    break;
737  case 352:
738    session.sessionPreset = AVCaptureSessionPreset352x288;
739    break;
740  case 480:
741    session.sessionPreset = AVCaptureSessionPresetMedium;
742    break;
743  case 640:
744    session.sessionPreset = AVCaptureSessionPreset640x480;
745    break;
746  case 1280:
747    session.sessionPreset = AVCaptureSessionPreset1280x720;
748    break;
749#if HAVE_IOS
750  case 1920:
751    session.sessionPreset = AVCaptureSessionPreset1920x1080;
752    break;
753#endif
754  default:
755    GST_WARNING ("Unsupported capture dimensions %dx%d", info->width, info->height);
756    return NO;
757  }
758  return YES;
759}
760
761- (GstCaps *)getCaps
762{
763  GstCaps *result;
764  NSArray *pixel_formats;
765
766  if (session == nil)
767    return NULL; /* BaseSrc will return template caps */
768
769  result = gst_caps_new_empty ();
770  pixel_formats = output.availableVideoCVPixelFormatTypes;
771
772  if (captureScreen) {
773#if !HAVE_IOS
774    CGRect rect = CGDisplayBounds ([self getDisplayIdFromDeviceIndex]);
775    float scale = [self getScaleFactorFromDeviceIndex];
776    for (NSNumber *pixel_format in pixel_formats) {
777      GstVideoFormat gst_format = [self getGstVideoFormat:pixel_format];
778      if (gst_format != GST_VIDEO_FORMAT_UNKNOWN)
779        gst_caps_append (result, gst_caps_new_simple ("video/x-raw",
780            "width", G_TYPE_INT, (int)(rect.size.width * scale),
781            "height", G_TYPE_INT, (int)(rect.size.height * scale),
782            "format", G_TYPE_STRING, gst_video_format_to_string (gst_format),
783            NULL));
784    }
785#else
786    GST_WARNING ("Screen capture is not supported by iOS");
787#endif
788    return result;
789  }
790
791  @try {
792    result = gst_caps_merge (result, [self getDeviceCaps]);
793  } @catch (NSException *exception) {
794    if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
795      GST_WARNING ("An unexcepted error occurred: %s", [exception.reason UTF8String]);
796      return result;
797    }
798
799    /* Fallback on session presets API for iOS < 7.0 */
800    [self getSessionPresetCaps:result];
801  }
802
803  return result;
804}
805
806- (BOOL)setCaps:(GstCaps *)new_caps
807{
808  GstVideoInfo info;
809  BOOL success = YES, *successPtr = &success;
810
811  gst_video_info_init (&info);
812  gst_video_info_from_caps (&info, new_caps);
813
814  width = info.width;
815  height = info.height;
816  format = info.finfo->format;
817  latency = gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
818
819  dispatch_sync (mainQueue, ^{
820    int newformat;
821
822    if (captureScreen) {
823#if !HAVE_IOS
824      AVCaptureScreenInput *screenInput = (AVCaptureScreenInput *)input;
825      screenInput.minFrameDuration = CMTimeMake(info.fps_d, info.fps_n);
826#else
827      GST_WARNING ("Screen capture is not supported by iOS");
828      *successPtr = NO;
829      return;
830#endif
831    } else {
832      @try {
833
834        /* formats and activeFormat keys are only available on OSX >= 10.7 and iOS >= 7.0 */
835        *successPtr = [self setDeviceCaps:(GstVideoInfo *)&info];
836        if (*successPtr != YES)
837          return;
838
839      } @catch (NSException *exception) {
840
841        if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
842          GST_WARNING ("An unexcepted error occurred: %s", [exception.reason UTF8String]);
843          *successPtr = NO;
844          return;
845        }
846
847        /* Fallback on session presets API for iOS < 7.0 */
848        *successPtr = [self setSessionPresetCaps:(GstVideoInfo *)&info];
849        if (*successPtr != YES)
850          return;
851      }
852    }
853
854    switch (format) {
855      case GST_VIDEO_FORMAT_NV12:
856        newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
857        break;
858      case GST_VIDEO_FORMAT_UYVY:
859        newformat = kCVPixelFormatType_422YpCbCr8;
860        break;
861      case GST_VIDEO_FORMAT_YUY2:
862        newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
863        break;
864      case GST_VIDEO_FORMAT_BGRA:
865        newformat = kCVPixelFormatType_32BGRA;
866        break;
867      default:
868        *successPtr = NO;
869        GST_WARNING ("Unsupported output format %s",
870            gst_video_format_to_string (format));
871        return;
872    }
873
874    GST_INFO_OBJECT (element,
875        "width: %d height: %d format: %s", width, height,
876        gst_video_format_to_string (format));
877
878    output.videoSettings = [NSDictionary
879        dictionaryWithObject:[NSNumber numberWithInt:newformat]
880        forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
881
882    gst_caps_replace (&caps, new_caps);
883    GST_INFO_OBJECT (element, "configured caps %"GST_PTR_FORMAT, caps);
884
885    if (![session isRunning])
886      [session startRunning];
887
888    /* Unlock device configuration only after session is started so the session
889     * won't reset the capture formats */
890    [device unlockForConfiguration];
891  });
892
893  return success;
894}
895
896- (BOOL)start
897{
898  bufQueueLock = [[NSConditionLock alloc] initWithCondition:NO_BUFFERS];
899  bufQueue = [[NSMutableArray alloc] initWithCapacity:BUFFER_QUEUE_SIZE];
900  stopRequest = NO;
901
902  offset = 0;
903  latency = GST_CLOCK_TIME_NONE;
904
905  lastSampling = GST_CLOCK_TIME_NONE;
906  count = 0;
907  fps = -1;
908
909  return YES;
910}
911
912- (BOOL)stop
913{
914  dispatch_sync (mainQueue, ^{ [session stopRunning]; });
915  dispatch_sync (workerQueue, ^{});
916
917  bufQueueLock = nil;
918  bufQueue = nil;
919
920  if (textureCache)
921    g_object_unref (textureCache);
922  textureCache = NULL;
923
924  if (ctxh)
925    gst_gl_context_helper_free (ctxh);
926  ctxh = NULL;
927
928  return YES;
929}
930
931- (BOOL)query:(GstQuery *)query
932{
933  BOOL result = NO;
934
935  if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
936    if (device != nil && caps != NULL) {
937      GstClockTime min_latency, max_latency;
938
939      min_latency = max_latency = latency;
940      result = YES;
941
942      GST_DEBUG_OBJECT (element, "reporting latency of min %" GST_TIME_FORMAT
943          " max %" GST_TIME_FORMAT,
944          GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
945      gst_query_set_latency (query, TRUE, min_latency, max_latency);
946    }
947  } else {
948    result = GST_BASE_SRC_CLASS (parent_class)->query (baseSrc, query);
949  }
950
951  return result;
952}
953
954- (BOOL)unlock
955{
956  [bufQueueLock lock];
957  stopRequest = YES;
958  [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
959
960  return YES;
961}
962
963- (BOOL)unlockStop
964{
965  [bufQueueLock lock];
966  stopRequest = NO;
967  [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
968
969  return YES;
970}
971
972- (GstStateChangeReturn)changeState:(GstStateChange)transition
973{
974  GstStateChangeReturn ret;
975
976  if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
977    if (![self openDevice])
978      return GST_STATE_CHANGE_FAILURE;
979  }
980
981  ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
982
983  if (transition == GST_STATE_CHANGE_READY_TO_NULL)
984    [self closeDevice];
985
986  return ret;
987}
988
989- (void)captureOutput:(AVCaptureOutput *)captureOutput
990didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
991       fromConnection:(AVCaptureConnection *)aConnection
992{
993  GstClockTime timestamp, duration;
994
995  [bufQueueLock lock];
996
997  if (stopRequest) {
998    [bufQueueLock unlock];
999    return;
1000  }
1001
1002  [self getSampleBuffer:sampleBuffer timestamp:&timestamp duration:&duration];
1003
1004  if (timestamp == GST_CLOCK_TIME_NONE) {
1005    [bufQueueLock unlockWithCondition:([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
1006    return;
1007  }
1008
1009  if ([bufQueue count] == BUFFER_QUEUE_SIZE)
1010    [bufQueue removeLastObject];
1011
1012  [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer,
1013                           @"timestamp": @(timestamp),
1014                           @"duration": @(duration)}
1015                 atIndex:0];
1016
1017  [bufQueueLock unlockWithCondition:HAS_BUFFER_OR_STOP_REQUEST];
1018}
1019
1020- (GstFlowReturn)create:(GstBuffer **)buf
1021{
1022  CMSampleBufferRef sbuf;
1023  CVImageBufferRef image_buf;
1024  CVPixelBufferRef pixel_buf;
1025  size_t cur_width, cur_height;
1026  GstClockTime timestamp, duration;
1027
1028  [bufQueueLock lockWhenCondition:HAS_BUFFER_OR_STOP_REQUEST];
1029  if (stopRequest) {
1030    [bufQueueLock unlock];
1031    return GST_FLOW_FLUSHING;
1032  }
1033
1034  NSDictionary *dic = (NSDictionary *) [bufQueue lastObject];
1035  sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"];
1036  timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue];
1037  duration = (GstClockTime) [dic[@"duration"] longLongValue];
1038  CFRetain (sbuf);
1039  [bufQueue removeLastObject];
1040  [bufQueueLock unlockWithCondition:
1041      ([bufQueue count] == 0) ? NO_BUFFERS : HAS_BUFFER_OR_STOP_REQUEST];
1042
1043  /* Check output frame size dimensions */
1044  image_buf = CMSampleBufferGetImageBuffer (sbuf);
1045  if (image_buf) {
1046    pixel_buf = (CVPixelBufferRef) image_buf;
1047    cur_width = CVPixelBufferGetWidth (pixel_buf);
1048    cur_height = CVPixelBufferGetHeight (pixel_buf);
1049
1050    if (width != cur_width || height != cur_height) {
1051      /* Set new caps according to current frame dimensions */
1052      GST_WARNING ("Output frame size has changed %dx%d -> %dx%d, updating caps",
1053          width, height, (int)cur_width, (int)cur_height);
1054      width = cur_width;
1055      height = cur_height;
1056      gst_caps_set_simple (caps,
1057        "width", G_TYPE_INT, width,
1058        "height", G_TYPE_INT, height,
1059        NULL);
1060      gst_pad_push_event (GST_BASE_SINK_PAD (baseSrc), gst_event_new_caps (caps));
1061    }
1062  }
1063
1064  *buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache);
1065  if (*buf == NULL) {
1066    CFRelease (sbuf);
1067    return GST_FLOW_ERROR;
1068  }
1069  CFRelease (sbuf);
1070
1071  GST_BUFFER_OFFSET (*buf) = offset++;
1072  GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (*buf) + 1;
1073  GST_BUFFER_TIMESTAMP (*buf) = timestamp;
1074  GST_BUFFER_DURATION (*buf) = duration;
1075
1076  if (doStats)
1077    [self updateStatistics];
1078
1079  return GST_FLOW_OK;
1080}
1081
1082- (GstCaps *)fixate:(GstCaps *)new_caps
1083{
1084  GstStructure *structure;
1085
1086  new_caps = gst_caps_make_writable (new_caps);
1087  new_caps = gst_caps_truncate (new_caps);
1088  structure = gst_caps_get_structure (new_caps, 0);
1089  /* crank up to 11. This is what the presets do, but we don't use the presets
1090   * in ios >= 7.0 */
1091  gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
1092  gst_structure_fixate_field_nearest_fraction (structure, "framerate", G_MAXINT, 1);
1093
1094  return gst_caps_fixate (new_caps);
1095}
1096
1097- (BOOL)decideAllocation:(GstQuery *)query
1098{
1099  GstCaps *alloc_caps;
1100  GstCapsFeatures *features;
1101  gboolean ret;
1102
1103  ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (baseSrc, query);
1104  if (!ret)
1105    return ret;
1106
1107  gst_query_parse_allocation (query, &alloc_caps, NULL);
1108  features = gst_caps_get_features (alloc_caps, 0);
1109  if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
1110    GstVideoTextureCacheGL *cache_gl;
1111
1112    cache_gl = textureCache ? GST_VIDEO_TEXTURE_CACHE_GL (textureCache) : NULL;
1113
1114    gst_gl_context_helper_ensure_context (ctxh);
1115    GST_INFO_OBJECT (element, "pushing textures, context %p old context %p",
1116        ctxh->context, cache_gl ? cache_gl->ctx : NULL);
1117    if (cache_gl && cache_gl->ctx != ctxh->context) {
1118      g_object_unref (textureCache);
1119      textureCache = NULL;
1120    }
1121    if (!textureCache)
1122      textureCache = gst_video_texture_cache_gl_new (ctxh->context);
1123    gst_video_texture_cache_set_format (textureCache, format, alloc_caps);
1124  }
1125
1126  return TRUE;
1127}
1128
1129- (void)setContext:(GstContext *)context
1130{
1131  GST_INFO_OBJECT (element, "setting context %s",
1132          gst_context_get_context_type (context));
1133  gst_gl_handle_set_context (element, context,
1134          &ctxh->display, &ctxh->other_context);
1135  GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
1136}
1137
1138- (void)getSampleBuffer:(CMSampleBufferRef)sbuf
1139              timestamp:(GstClockTime *)outTimestamp
1140               duration:(GstClockTime *)outDuration
1141{
1142  CMSampleTimingInfo time_info;
1143  GstClockTime timestamp, avf_timestamp, duration, input_clock_now, input_clock_diff, running_time;
1144  CMItemCount num_timings;
1145  GstClock *clock;
1146  CMTime now;
1147
1148  timestamp = GST_CLOCK_TIME_NONE;
1149  duration = GST_CLOCK_TIME_NONE;
1150  if (CMSampleBufferGetOutputSampleTimingInfoArray(sbuf, 1, &time_info, &num_timings) == noErr) {
1151    avf_timestamp = gst_util_uint64_scale (GST_SECOND,
1152            time_info.presentationTimeStamp.value, time_info.presentationTimeStamp.timescale);
1153
1154    if (CMTIME_IS_VALID (time_info.duration) && time_info.duration.timescale != 0)
1155      duration = gst_util_uint64_scale (GST_SECOND,
1156          time_info.duration.value, time_info.duration.timescale);
1157
1158    now = CMClockGetTime(inputClock);
1159    input_clock_now = gst_util_uint64_scale (GST_SECOND,
1160        now.value, now.timescale);
1161    input_clock_diff = input_clock_now - avf_timestamp;
1162
1163    GST_OBJECT_LOCK (element);
1164    clock = GST_ELEMENT_CLOCK (element);
1165    if (clock) {
1166      running_time = gst_clock_get_time (clock) - element->base_time;
1167      /* We use presentationTimeStamp to determine how much time it took
1168       * between capturing and receiving the frame in our delegate
1169       * (e.g. how long it spent in AVF queues), then we subtract that time
1170       * from our running time to get the actual timestamp.
1171       */
1172      if (running_time >= input_clock_diff)
1173        timestamp = running_time - input_clock_diff;
1174      else
1175        timestamp = running_time;
1176
1177      GST_DEBUG_OBJECT (element, "AVF clock: %"GST_TIME_FORMAT ", AVF PTS: %"GST_TIME_FORMAT
1178          ", AVF clock diff: %"GST_TIME_FORMAT
1179          ", running time: %"GST_TIME_FORMAT ", out PTS: %"GST_TIME_FORMAT,
1180          GST_TIME_ARGS (input_clock_now), GST_TIME_ARGS (avf_timestamp),
1181          GST_TIME_ARGS (input_clock_diff),
1182          GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp));
1183    } else {
1184      /* no clock, can't set timestamps */
1185      timestamp = GST_CLOCK_TIME_NONE;
1186    }
1187    GST_OBJECT_UNLOCK (element);
1188  }
1189
1190  *outTimestamp = timestamp;
1191  *outDuration = duration;
1192}
1193
1194- (void)updateStatistics
1195{
1196  GstClock *clock;
1197
1198  GST_OBJECT_LOCK (element);
1199  clock = GST_ELEMENT_CLOCK (element);
1200  if (clock != NULL)
1201    gst_object_ref (clock);
1202  GST_OBJECT_UNLOCK (element);
1203
1204  if (clock != NULL) {
1205    GstClockTime now = gst_clock_get_time (clock);
1206    gst_object_unref (clock);
1207
1208    count++;
1209
1210    if (GST_CLOCK_TIME_IS_VALID (lastSampling)) {
1211      if (now - lastSampling >= GST_SECOND) {
1212        GST_OBJECT_LOCK (element);
1213        fps = count;
1214        GST_OBJECT_UNLOCK (element);
1215
1216        g_object_notify (G_OBJECT (element), "fps");
1217
1218        lastSampling = now;
1219        count = 0;
1220      }
1221    } else {
1222      lastSampling = now;
1223    }
1224  }
1225}
1226
1227@end
1228
1229/*
1230 * Glue code
1231 */
1232
1233enum
1234{
1235  PROP_0,
1236  PROP_DEVICE_INDEX,
1237  PROP_DEVICE_NAME,
1238  PROP_POSITION,
1239  PROP_ORIENTATION,
1240  PROP_DEVICE_TYPE,
1241  PROP_DO_STATS,
1242  PROP_FPS,
1243#if !HAVE_IOS
1244  PROP_CAPTURE_SCREEN,
1245  PROP_CAPTURE_SCREEN_CURSOR,
1246  PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1247#endif
1248};
1249
1250
1251static void gst_avf_video_src_finalize (GObject * obj);
1252static void gst_avf_video_src_get_property (GObject * object, guint prop_id,
1253    GValue * value, GParamSpec * pspec);
1254static void gst_avf_video_src_set_property (GObject * object, guint prop_id,
1255    const GValue * value, GParamSpec * pspec);
1256static GstStateChangeReturn gst_avf_video_src_change_state (
1257    GstElement * element, GstStateChange transition);
1258static GstCaps * gst_avf_video_src_get_caps (GstBaseSrc * basesrc,
1259    GstCaps * filter);
1260static gboolean gst_avf_video_src_set_caps (GstBaseSrc * basesrc,
1261    GstCaps * caps);
1262static gboolean gst_avf_video_src_start (GstBaseSrc * basesrc);
1263static gboolean gst_avf_video_src_stop (GstBaseSrc * basesrc);
1264static gboolean gst_avf_video_src_query (GstBaseSrc * basesrc,
1265    GstQuery * query);
1266static gboolean gst_avf_video_src_unlock (GstBaseSrc * basesrc);
1267static gboolean gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc);
1268static GstFlowReturn gst_avf_video_src_create (GstPushSrc * pushsrc,
1269    GstBuffer ** buf);
1270static GstCaps * gst_avf_video_src_fixate (GstBaseSrc * bsrc,
1271    GstCaps * caps);
1272static gboolean gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1273    GstQuery * query);
1274static void gst_avf_video_src_set_context (GstElement * element,
1275        GstContext * context);
1276
1277static void
1278gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass)
1279{
1280  GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
1281  GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
1282  GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
1283  GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
1284
1285  gobject_class->finalize = gst_avf_video_src_finalize;
1286  gobject_class->get_property = gst_avf_video_src_get_property;
1287  gobject_class->set_property = gst_avf_video_src_set_property;
1288
1289  gstelement_class->change_state = gst_avf_video_src_change_state;
1290  gstelement_class->set_context = gst_avf_video_src_set_context;
1291
1292  gstbasesrc_class->get_caps = gst_avf_video_src_get_caps;
1293  gstbasesrc_class->set_caps = gst_avf_video_src_set_caps;
1294  gstbasesrc_class->start = gst_avf_video_src_start;
1295  gstbasesrc_class->stop = gst_avf_video_src_stop;
1296  gstbasesrc_class->query = gst_avf_video_src_query;
1297  gstbasesrc_class->unlock = gst_avf_video_src_unlock;
1298  gstbasesrc_class->unlock_stop = gst_avf_video_src_unlock_stop;
1299  gstbasesrc_class->fixate = gst_avf_video_src_fixate;
1300  gstbasesrc_class->decide_allocation = gst_avf_video_src_decide_allocation;
1301
1302  gstpushsrc_class->create = gst_avf_video_src_create;
1303
1304  gst_element_class_set_metadata (gstelement_class,
1305      "Video Source (AVFoundation)", "Source/Video/Hardware",
1306      "Reads frames from an iOS AVFoundation device",
1307      "Ole André Vadla Ravnås <oleavr@soundrop.com>");
1308
1309  gst_element_class_add_static_pad_template (gstelement_class, &src_template);
1310
1311  g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
1312      g_param_spec_int ("device-index", "Device Index",
1313          "The zero-based device index",
1314          -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
1315          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1316  g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
1317      g_param_spec_string ("device-name", "Device Name",
1318          "The name of the currently opened capture device",
1319          NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1320  g_object_class_install_property (gobject_class, PROP_POSITION,
1321                                   g_param_spec_enum ("position", "Position",
1322                                                      "The position of the capture device (front or back-facing)",
1323                                                      GST_TYPE_AVF_VIDEO_SOURCE_POSITION, DEFAULT_POSITION,
1324                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1325  g_object_class_install_property (gobject_class, PROP_ORIENTATION,
1326                                   g_param_spec_enum ("orientation", "Orientation",
1327                                                      "The orientation of the video",
1328                                                      GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, DEFAULT_ORIENTATION,
1329                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1330  g_object_class_install_property (gobject_class, PROP_DEVICE_TYPE,
1331                                   g_param_spec_enum ("device-type", "Device Type",
1332                                                      "The general type of a video capture device",
1333                                                      GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, DEFAULT_DEVICE_TYPE,
1334                                                      G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1335  g_object_class_install_property (gobject_class, PROP_DO_STATS,
1336      g_param_spec_boolean ("do-stats", "Enable statistics",
1337          "Enable logging of statistics", DEFAULT_DO_STATS,
1338          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1339  g_object_class_install_property (gobject_class, PROP_FPS,
1340      g_param_spec_int ("fps", "Frames per second",
1341          "Last measured framerate, if statistics are enabled",
1342          -1, G_MAXINT, -1, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
1343#if !HAVE_IOS
1344  g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN,
1345      g_param_spec_boolean ("capture-screen", "Enable screen capture",
1346          "Enable screen capture functionality", FALSE,
1347          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1348  g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_CURSOR,
1349      g_param_spec_boolean ("capture-screen-cursor", "Capture screen cursor",
1350          "Enable cursor capture while capturing screen", FALSE,
1351          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1352  g_object_class_install_property (gobject_class, PROP_CAPTURE_SCREEN_MOUSE_CLICKS,
1353      g_param_spec_boolean ("capture-screen-mouse-clicks", "Enable mouse clicks capture",
1354          "Enable mouse clicks capture while capturing screen", FALSE,
1355          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
1356#endif
1357
1358  GST_DEBUG_CATEGORY_INIT (gst_avf_video_src_debug, "avfvideosrc",
1359      0, "iOS AVFoundation video source");
1360
1361  gst_type_mark_as_plugin_api (GST_TYPE_AVF_VIDEO_SOURCE_POSITION, 0);
1362  gst_type_mark_as_plugin_api (GST_TYPE_AVF_VIDEO_SOURCE_ORIENTATION, 0);
1363  gst_type_mark_as_plugin_api (GST_TYPE_AVF_VIDEO_SOURCE_DEVICE_TYPE, 0);
1364}
1365
1366static void
1367gst_avf_video_src_init (GstAVFVideoSrc * src)
1368{
1369  src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)];
1370}
1371
1372static void
1373gst_avf_video_src_finalize (GObject * obj)
1374{
1375  CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl);
1376
1377  G_OBJECT_CLASS (parent_class)->finalize (obj);
1378}
1379
1380static void
1381gst_avf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
1382    GParamSpec * pspec)
1383{
1384  GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1385
1386  switch (prop_id) {
1387#if !HAVE_IOS
1388    case PROP_CAPTURE_SCREEN:
1389      g_value_set_boolean (value, impl.captureScreen);
1390      break;
1391    case PROP_CAPTURE_SCREEN_CURSOR:
1392      g_value_set_boolean (value, impl.captureScreenCursor);
1393      break;
1394    case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1395      g_value_set_boolean (value, impl.captureScreenMouseClicks);
1396      break;
1397#endif
1398    case PROP_DEVICE_INDEX:
1399      g_value_set_int (value, impl.deviceIndex);
1400      break;
1401    case PROP_DEVICE_NAME:
1402      g_value_set_string (value, impl.deviceName);
1403      break;
1404    case PROP_POSITION:
1405      g_value_set_enum(value, impl.position);
1406      break;
1407    case PROP_ORIENTATION:
1408      g_value_set_enum(value, impl.orientation);
1409      break;
1410    case PROP_DEVICE_TYPE:
1411      g_value_set_enum(value, impl.deviceType);
1412      break;
1413    case PROP_DO_STATS:
1414      g_value_set_boolean (value, impl.doStats);
1415      break;
1416    case PROP_FPS:
1417      GST_OBJECT_LOCK (object);
1418      g_value_set_int (value, impl.fps);
1419      GST_OBJECT_UNLOCK (object);
1420      break;
1421    default:
1422      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1423      break;
1424  }
1425}
1426
1427static void
1428gst_avf_video_src_set_property (GObject * object, guint prop_id,
1429    const GValue * value, GParamSpec * pspec)
1430{
1431  GstAVFVideoSrcImpl *impl = GST_AVF_VIDEO_SRC_IMPL (object);
1432
1433  switch (prop_id) {
1434#if !HAVE_IOS
1435    case PROP_CAPTURE_SCREEN:
1436      impl.captureScreen = g_value_get_boolean (value);
1437      break;
1438    case PROP_CAPTURE_SCREEN_CURSOR:
1439      impl.captureScreenCursor = g_value_get_boolean (value);
1440      break;
1441    case PROP_CAPTURE_SCREEN_MOUSE_CLICKS:
1442      impl.captureScreenMouseClicks = g_value_get_boolean (value);
1443      break;
1444#endif
1445    case PROP_DEVICE_INDEX:
1446      impl.deviceIndex = g_value_get_int (value);
1447      break;
1448    case PROP_POSITION:
1449      impl.position = g_value_get_enum(value);
1450      break;
1451    case PROP_ORIENTATION:
1452      impl.orientation = g_value_get_enum(value);
1453      break;
1454    case PROP_DEVICE_TYPE:
1455      impl.deviceType = g_value_get_enum(value);
1456      break;
1457    case PROP_DO_STATS:
1458      impl.doStats = g_value_get_boolean (value);
1459      break;
1460    default:
1461      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1462      break;
1463  }
1464}
1465
1466static GstStateChangeReturn
1467gst_avf_video_src_change_state (GstElement * element, GstStateChange transition)
1468{
1469  GstStateChangeReturn ret;
1470
1471  ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition];
1472
1473  return ret;
1474}
1475
1476static GstCaps *
1477gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
1478{
1479  GstCaps *ret;
1480
1481  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps];
1482
1483  return ret;
1484}
1485
1486static gboolean
1487gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps)
1488{
1489  gboolean ret;
1490
1491  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps];
1492
1493  return ret;
1494}
1495
1496static gboolean
1497gst_avf_video_src_start (GstBaseSrc * basesrc)
1498{
1499  gboolean ret;
1500
1501  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start];
1502
1503  return ret;
1504}
1505
1506static gboolean
1507gst_avf_video_src_stop (GstBaseSrc * basesrc)
1508{
1509  gboolean ret;
1510
1511  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop];
1512
1513  return ret;
1514}
1515
1516static gboolean
1517gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query)
1518{
1519  gboolean ret;
1520
1521  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query];
1522
1523  return ret;
1524}
1525
1526static gboolean
1527gst_avf_video_src_unlock (GstBaseSrc * basesrc)
1528{
1529  gboolean ret;
1530
1531  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock];
1532
1533  return ret;
1534}
1535
1536static gboolean
1537gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc)
1538{
1539  gboolean ret;
1540
1541  ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop];
1542
1543  return ret;
1544}
1545
1546static GstFlowReturn
1547gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf)
1548{
1549  GstFlowReturn ret;
1550
1551  ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf];
1552
1553  return ret;
1554}
1555
1556
1557static GstCaps *
1558gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
1559{
1560  GstCaps *ret;
1561
1562  ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps];
1563
1564  return ret;
1565}
1566
1567static gboolean
1568gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc,
1569    GstQuery * query)
1570{
1571  gboolean ret;
1572
1573  ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query];
1574
1575  return ret;
1576}
1577
1578static void
1579gst_avf_video_src_set_context (GstElement * element, GstContext * context)
1580{
1581  [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context];
1582}
1583
1584GstCaps*
1585gst_av_capture_device_get_caps (AVCaptureDevice *device, AVCaptureVideoDataOutput *output, GstAVFVideoSourceOrientation orientation)
1586{
1587  NSArray *formats = [device valueForKey:@"formats"];
1588  NSArray *pixel_formats = output.availableVideoCVPixelFormatTypes;
1589  GstCaps *result_caps, *result_gl_caps;
1590#if !HAVE_IOS
1591  GstVideoFormat gl_format = GST_VIDEO_FORMAT_UYVY;
1592#else
1593  GstVideoFormat gl_format = GST_VIDEO_FORMAT_NV12;
1594#endif
1595
1596  result_caps = gst_caps_new_empty ();
1597  result_gl_caps = gst_caps_new_empty ();
1598
1599  /* Do not use AVCaptureDeviceFormat or AVFrameRateRange only
1600   * available in iOS >= 7.0. We use a dynamic approach with key-value
1601   * coding or performSelector */
1602  for (NSObject *f in [formats reverseObjectEnumerator]) {
1603    /* formatDescription can't be retrieved with valueForKey so use a selector here */
1604    CMFormatDescriptionRef formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)];
1605    CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions (formatDescription);
1606    dimensions = get_oriented_dimensions (orientation, dimensions);
1607
1608    for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) {
1609      int min_fps_n, min_fps_d, max_fps_n, max_fps_d;
1610      gdouble min_fps, max_fps;
1611
1612      [[rate valueForKey:@"minFrameRate"] getValue:&min_fps];
1613      gst_util_double_to_fraction (min_fps, &min_fps_n, &min_fps_d);
1614
1615      [[rate valueForKey:@"maxFrameRate"] getValue:&max_fps];
1616      gst_util_double_to_fraction (max_fps, &max_fps_n, &max_fps_d);
1617
1618      for (NSNumber *pixel_format in pixel_formats) {
1619        GstVideoFormat gst_format = get_gst_video_format (pixel_format);
1620
1621        if (gst_format != GST_VIDEO_FORMAT_UNKNOWN) {
1622          if (min_fps != max_fps)
1623            gst_caps_append (result_caps, GST_AVF_FPS_RANGE_CAPS_NEW (gst_format, dimensions.width, dimensions.height, min_fps_n, min_fps_d, max_fps_n, max_fps_d));
1624          else
1625            gst_caps_append (result_caps, GST_AVF_CAPS_NEW (gst_format, dimensions.width, dimensions.height, max_fps_n, max_fps_d));
1626        }
1627
1628        if (gst_format == gl_format) {
1629          GstCaps *gl_caps;
1630          if (min_fps != max_fps) {
1631            gl_caps = GST_AVF_FPS_RANGE_CAPS_NEW (gl_format,
1632                                                  dimensions.width, dimensions.height,
1633                                                  min_fps_n, min_fps_d,
1634                                                  max_fps_n, max_fps_d);
1635          } else {
1636            gl_caps = GST_AVF_CAPS_NEW (gl_format,
1637                                        dimensions.width, dimensions.height,
1638                                        max_fps_n, max_fps_d);
1639          }
1640          gst_caps_set_features (gl_caps, 0,
1641                                 gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
1642                                                        NULL));
1643          gst_caps_set_simple (gl_caps,
1644                               "texture-target", G_TYPE_STRING,
1645#if !HAVE_IOS
1646                               GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
1647#else
1648                               GST_GL_TEXTURE_TARGET_2D_STR,
1649#endif
1650                               NULL);
1651          gst_caps_append (result_gl_caps, gl_caps);
1652        }
1653      }
1654    }
1655  }
1656
1657  result_gl_caps = gst_caps_simplify (gst_caps_merge (result_gl_caps, result_caps));
1658
1659  return result_gl_caps;
1660}
1661
1662static GstVideoFormat
1663get_gst_video_format (NSNumber *pixel_format)
1664{
1665  GstVideoFormat gst_format = GST_VIDEO_FORMAT_UNKNOWN;
1666
1667  switch ([pixel_format integerValue]) {
1668    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
1669      gst_format = GST_VIDEO_FORMAT_NV12;
1670      break;
1671    case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
1672      gst_format = GST_VIDEO_FORMAT_UYVY;
1673      break;
1674    case kCVPixelFormatType_32BGRA: /* BGRA */
1675      gst_format = GST_VIDEO_FORMAT_BGRA;
1676      break;
1677    case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
1678      gst_format = GST_VIDEO_FORMAT_YUY2;
1679      break;
1680    default:
1681      break;
1682  }
1683
1684  return gst_format;
1685}
1686
1687static CMVideoDimensions
1688get_oriented_dimensions (GstAVFVideoSourceOrientation orientation, CMVideoDimensions dimensions)
1689{
1690  CMVideoDimensions orientedDimensions;
1691  if (orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT_UPSIDE_DOWN ||
1692      orientation == GST_AVF_VIDEO_SOURCE_ORIENTATION_PORTRAIT) {
1693    orientedDimensions.width = dimensions.height;
1694    orientedDimensions.height = dimensions.width;
1695  } else {
1696    orientedDimensions = dimensions;
1697  }
1698  return orientedDimensions;
1699}
1700