• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2013 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#import "media/video/capture/mac/video_capture_device_avfoundation_mac.h"
6
7#import <CoreVideo/CoreVideo.h>
8
9#include "base/logging.h"
10#include "base/mac/foundation_util.h"
11#include "media/video/capture/mac/video_capture_device_mac.h"
12#include "ui/gfx/size.h"
13
14@implementation VideoCaptureDeviceAVFoundation
15
16#pragma mark Class methods
17
18+ (void)getDeviceNames:(NSMutableDictionary*)deviceNames {
19  // At this stage we already know that AVFoundation is supported and the whole
20  // library is loaded and initialised, by the device monitoring.
21  NSArray* devices = [AVCaptureDeviceGlue devices];
22  for (CrAVCaptureDevice* device in devices) {
23    if (([device hasMediaType:AVFoundationGlue::AVMediaTypeVideo()] ||
24         [device hasMediaType:AVFoundationGlue::AVMediaTypeMuxed()]) &&
25        ![device isSuspended]) {
26      DeviceNameAndTransportType* nameAndTransportType =
27          [[[DeviceNameAndTransportType alloc]
28                 initWithName:[device localizedName]
29                transportType:[device transportType]] autorelease];
30      [deviceNames setObject:nameAndTransportType
31                      forKey:[device uniqueID]];
32    }
33  }
34}
35
36+ (NSDictionary*)deviceNames {
37  NSMutableDictionary* deviceNames =
38      [[[NSMutableDictionary alloc] init] autorelease];
39  // The device name retrieval is not going to happen in the main thread, and
40  // this might cause instabilities (it did in QTKit), so keep an eye here.
41  [self getDeviceNames:deviceNames];
42  return deviceNames;
43}
44
45+ (void)getDevice:(const media::VideoCaptureDevice::Name&)name
46 supportedFormats:(media::VideoCaptureFormats*)formats{
47  NSArray* devices = [AVCaptureDeviceGlue devices];
48  CrAVCaptureDevice* device = nil;
49  for (device in devices) {
50    if ([[device uniqueID] UTF8String] == name.id())
51      break;
52  }
53  if (device == nil)
54    return;
55  for (CrAVCaptureDeviceFormat* format in device.formats) {
56    // MediaSubType is a CMPixelFormatType but can be used as CVPixelFormatType
57    // as well according to CMFormatDescription.h
58    media::VideoPixelFormat pixelFormat = media::PIXEL_FORMAT_UNKNOWN;
59    switch (CoreMediaGlue::CMFormatDescriptionGetMediaSubType(
60                [format formatDescription])) {
61      case kCVPixelFormatType_422YpCbCr8:  // Typical.
62        pixelFormat = media::PIXEL_FORMAT_UYVY;
63        break;
64      case CoreMediaGlue::kCMPixelFormat_422YpCbCr8_yuvs:
65        pixelFormat = media::PIXEL_FORMAT_YUY2;
66        break;
67      case CoreMediaGlue::kCMVideoCodecType_JPEG_OpenDML:
68        pixelFormat = media::PIXEL_FORMAT_MJPEG;
69      default:
70        break;
71    }
72
73    CoreMediaGlue::CMVideoDimensions dimensions =
74        CoreMediaGlue::CMVideoFormatDescriptionGetDimensions(
75            [format formatDescription]);
76
77    for (CrAVFrameRateRange* frameRate in
78           [format videoSupportedFrameRateRanges]) {
79      media::VideoCaptureFormat format(
80          gfx::Size(dimensions.width, dimensions.height),
81          frameRate.maxFrameRate,
82          pixelFormat);
83      formats->push_back(format);
84      DVLOG(2) << name.name() << " " << format.ToString();
85    }
86  }
87
88}
89
90#pragma mark Public methods
91
92- (id)initWithFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
93  if ((self = [super init])) {
94    DCHECK(main_thread_checker_.CalledOnValidThread());
95    DCHECK(frameReceiver);
96    [self setFrameReceiver:frameReceiver];
97    captureSession_.reset(
98        [[AVFoundationGlue::AVCaptureSessionClass() alloc] init]);
99  }
100  return self;
101}
102
103- (void)dealloc {
104  [self stopCapture];
105  [super dealloc];
106}
107
108- (void)setFrameReceiver:(media::VideoCaptureDeviceMac*)frameReceiver {
109  base::AutoLock lock(lock_);
110  frameReceiver_ = frameReceiver;
111}
112
113- (BOOL)setCaptureDevice:(NSString*)deviceId {
114  DCHECK(captureSession_);
115  DCHECK(main_thread_checker_.CalledOnValidThread());
116
117  if (!deviceId) {
118    // First stop the capture session, if it's running.
119    [self stopCapture];
120    // Now remove the input and output from the capture session.
121    [captureSession_ removeOutput:captureVideoDataOutput_];
122    if (captureDeviceInput_) {
123      [captureSession_ removeInput:captureDeviceInput_];
124      // No need to release |captureDeviceInput_|, is owned by the session.
125      captureDeviceInput_ = nil;
126    }
127    return YES;
128  }
129
130  // Look for input device with requested name.
131  captureDevice_ = [AVCaptureDeviceGlue deviceWithUniqueID:deviceId];
132  if (!captureDevice_) {
133    [self sendErrorString:[NSString
134        stringWithUTF8String:"Could not open video capture device."]];
135    return NO;
136  }
137
138  // Create the capture input associated with the device. Easy peasy.
139  NSError* error = nil;
140  captureDeviceInput_ = [AVCaptureDeviceInputGlue
141      deviceInputWithDevice:captureDevice_
142                      error:&error];
143  if (!captureDeviceInput_) {
144    captureDevice_ = nil;
145    [self sendErrorString:[NSString
146        stringWithFormat:@"Could not create video capture input (%@): %@",
147                         [error localizedDescription],
148                         [error localizedFailureReason]]];
149    return NO;
150  }
151  [captureSession_ addInput:captureDeviceInput_];
152
153  // Create a new data output for video. The data output is configured to
154  // discard late frames by default.
155  captureVideoDataOutput_.reset(
156      [[AVFoundationGlue::AVCaptureVideoDataOutputClass() alloc] init]);
157  if (!captureVideoDataOutput_) {
158    [captureSession_ removeInput:captureDeviceInput_];
159    [self sendErrorString:[NSString
160        stringWithUTF8String:"Could not create video data output."]];
161    return NO;
162  }
163  [captureVideoDataOutput_
164      setSampleBufferDelegate:self
165                        queue:dispatch_get_global_queue(
166                            DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
167  [captureSession_ addOutput:captureVideoDataOutput_];
168  return YES;
169}
170
171- (BOOL)setCaptureHeight:(int)height
172                   width:(int)width
173               frameRate:(float)frameRate {
174  // Check if either of VideoCaptureDeviceMac::AllocateAndStart() or
175  // VideoCaptureDeviceMac::ReceiveFrame() is calling here, depending on the
176  // running state. VCDM::ReceiveFrame() calls here to change aspect ratio.
177  DCHECK((![captureSession_ isRunning] &&
178      main_thread_checker_.CalledOnValidThread()) ||
179      callback_thread_checker_.CalledOnValidThread());
180
181  frameWidth_ = width;
182  frameHeight_ = height;
183  frameRate_ = frameRate;
184
185  // The capture output has to be configured, despite Mac documentation
186  // detailing that setting the sessionPreset would be enough. The reason for
187  // this mismatch is probably because most of the AVFoundation docs are written
188  // for iOS and not for MacOsX. AVVideoScalingModeKey() refers to letterboxing
189  // yes/no and preserve aspect ratio yes/no when scaling. Currently we set
190  // cropping and preservation.
191  NSDictionary* videoSettingsDictionary = @{
192    (id)kCVPixelBufferWidthKey : @(width),
193    (id)kCVPixelBufferHeightKey : @(height),
194    (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_422YpCbCr8),
195    AVFoundationGlue::AVVideoScalingModeKey() :
196        AVFoundationGlue::AVVideoScalingModeResizeAspectFill()
197  };
198  [captureVideoDataOutput_ setVideoSettings:videoSettingsDictionary];
199
200  CrAVCaptureConnection* captureConnection = [captureVideoDataOutput_
201      connectionWithMediaType:AVFoundationGlue::AVMediaTypeVideo()];
202  // Check selector existence, related to bugs http://crbug.com/327532 and
203  // http://crbug.com/328096.
204  // CMTimeMake accepts integer argumenst but |frameRate| is float, round it.
205  if ([captureConnection
206           respondsToSelector:@selector(isVideoMinFrameDurationSupported)] &&
207      [captureConnection isVideoMinFrameDurationSupported]) {
208    [captureConnection setVideoMinFrameDuration:
209        CoreMediaGlue::CMTimeMake(media::kFrameRatePrecision,
210            (int)(frameRate * media::kFrameRatePrecision))];
211  }
212  if ([captureConnection
213           respondsToSelector:@selector(isVideoMaxFrameDurationSupported)] &&
214      [captureConnection isVideoMaxFrameDurationSupported]) {
215    [captureConnection setVideoMaxFrameDuration:
216        CoreMediaGlue::CMTimeMake(media::kFrameRatePrecision,
217            (int)(frameRate * media::kFrameRatePrecision))];
218  }
219  return YES;
220}
221
222- (BOOL)startCapture {
223  DCHECK(main_thread_checker_.CalledOnValidThread());
224  if (!captureSession_) {
225    DLOG(ERROR) << "Video capture session not initialized.";
226    return NO;
227  }
228  // Connect the notifications.
229  NSNotificationCenter* nc = [NSNotificationCenter defaultCenter];
230  [nc addObserver:self
231         selector:@selector(onVideoError:)
232             name:AVFoundationGlue::AVCaptureSessionRuntimeErrorNotification()
233           object:captureSession_];
234  [captureSession_ startRunning];
235  return YES;
236}
237
238- (void)stopCapture {
239  DCHECK(main_thread_checker_.CalledOnValidThread());
240  if ([captureSession_ isRunning])
241    [captureSession_ stopRunning];  // Synchronous.
242  [[NSNotificationCenter defaultCenter] removeObserver:self];
243}
244
245#pragma mark Private methods
246
247// |captureOutput| is called by the capture device to deliver a new frame.
248- (void)captureOutput:(CrAVCaptureOutput*)captureOutput
249    didOutputSampleBuffer:(CoreMediaGlue::CMSampleBufferRef)sampleBuffer
250           fromConnection:(CrAVCaptureConnection*)connection {
251  // AVFoundation calls from a number of threads, depending on, at least, if
252  // Chrome is on foreground or background. Sample the actual thread here.
253  callback_thread_checker_.DetachFromThread();
254  callback_thread_checker_.CalledOnValidThread();
255  CVImageBufferRef videoFrame =
256      CoreMediaGlue::CMSampleBufferGetImageBuffer(sampleBuffer);
257  // Lock the frame and calculate frame size.
258  const int kLockFlags = 0;
259  if (CVPixelBufferLockBaseAddress(videoFrame, kLockFlags) ==
260          kCVReturnSuccess) {
261    void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
262    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
263    size_t frameWidth = CVPixelBufferGetWidth(videoFrame);
264    size_t frameHeight = CVPixelBufferGetHeight(videoFrame);
265    size_t frameSize = bytesPerRow * frameHeight;
266    UInt8* addressToPass = reinterpret_cast<UInt8*>(baseAddress);
267
268    media::VideoCaptureFormat captureFormat(
269        gfx::Size(frameWidth, frameHeight),
270        frameRate_,
271        media::PIXEL_FORMAT_UYVY);
272    base::AutoLock lock(lock_);
273    if (!frameReceiver_)
274      return;
275    frameReceiver_->ReceiveFrame(addressToPass, frameSize, captureFormat, 0, 0);
276    CVPixelBufferUnlockBaseAddress(videoFrame, kLockFlags);
277  }
278}
279
280- (void)onVideoError:(NSNotification*)errorNotification {
281  NSError* error = base::mac::ObjCCast<NSError>([[errorNotification userInfo]
282      objectForKey:AVFoundationGlue::AVCaptureSessionErrorKey()]);
283  [self sendErrorString:[NSString
284      stringWithFormat:@"%@: %@",
285                       [error localizedDescription],
286                       [error localizedFailureReason]]];
287}
288
289- (void)sendErrorString:(NSString*)error {
290  DLOG(ERROR) << [error UTF8String];
291  base::AutoLock lock(lock_);
292  if (frameReceiver_)
293    frameReceiver_->ReceiveError([error UTF8String]);
294}
295
296@end
297