• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/app/webrtc/objc/avfoundationvideocapturer.h"
29
30#include "webrtc/base/bind.h"
31
32#import <AVFoundation/AVFoundation.h>
33#import <Foundation/Foundation.h>
34#import <UIKit/UIKit.h>
35
36#import "webrtc/base/objc/RTCDispatcher.h"
37
38// TODO(tkchin): support other formats.
39static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
40static cricket::VideoFormat const kDefaultFormat =
41    cricket::VideoFormat(640,
42                         480,
43                         cricket::VideoFormat::FpsToInterval(30),
44                         cricket::FOURCC_NV12);
45
46// This class used to capture frames using AVFoundation APIs on iOS. It is meant
47// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
48// because other webrtc objects own cricket::VideoCapturer, which is not
49// ref counted. To prevent bad behavior we do not expose this class directly.
50@interface RTCAVFoundationVideoCapturerInternal : NSObject
51    <AVCaptureVideoDataOutputSampleBufferDelegate>
52
53@property(nonatomic, readonly) AVCaptureSession* captureSession;
54@property(nonatomic, readonly) BOOL isRunning;
55@property(nonatomic, assign) BOOL useBackCamera;  // Defaults to NO.
56
57// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
58// when we receive frames. This is safe because this object should be owned by
59// it.
60- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer;
61- (void)startCaptureAsync;
62- (void)stopCaptureAsync;
63
64@end
65
66@implementation RTCAVFoundationVideoCapturerInternal {
67  // Keep pointers to inputs for convenience.
68  AVCaptureDeviceInput* _frontDeviceInput;
69  AVCaptureDeviceInput* _backDeviceInput;
70  AVCaptureVideoDataOutput* _videoOutput;
71  // The cricket::VideoCapturer that owns this class. Should never be NULL.
72  webrtc::AVFoundationVideoCapturer* _capturer;
73  BOOL _orientationHasChanged;
74}
75
76@synthesize captureSession = _captureSession;
77@synthesize useBackCamera = _useBackCamera;
78@synthesize isRunning = _isRunning;
79
80- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
81  NSParameterAssert(capturer);
82  if (self = [super init]) {
83    _capturer = capturer;
84    if (![self setupCaptureSession]) {
85      return nil;
86    }
87    NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
88    [center addObserver:self
89               selector:@selector(deviceOrientationDidChange:)
90                   name:UIDeviceOrientationDidChangeNotification
91                 object:nil];
92    [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
93                        object:nil
94                         queue:nil
95                    usingBlock:^(NSNotification* notification) {
96      NSLog(@"Capture session error: %@", notification.userInfo);
97    }];
98  }
99  return self;
100}
101
102- (void)dealloc {
103  [self stopCaptureAsync];
104  [[NSNotificationCenter defaultCenter] removeObserver:self];
105  _capturer = nullptr;
106}
107
108- (void)setUseBackCamera:(BOOL)useBackCamera {
109  if (_useBackCamera == useBackCamera) {
110    return;
111  }
112  _useBackCamera = useBackCamera;
113  [self updateSessionInput];
114}
115
116- (void)startCaptureAsync {
117  if (_isRunning) {
118    return;
119  }
120  _orientationHasChanged = NO;
121  [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
122  AVCaptureSession* session = _captureSession;
123  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
124                               block:^{
125    [session startRunning];
126  }];
127  _isRunning = YES;
128}
129
130- (void)stopCaptureAsync {
131  if (!_isRunning) {
132    return;
133  }
134  [_videoOutput setSampleBufferDelegate:nil queue:nullptr];
135  AVCaptureSession* session = _captureSession;
136  [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
137                               block:^{
138    [session stopRunning];
139  }];
140  [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
141  _isRunning = NO;
142}
143
144#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
145
146- (void)captureOutput:(AVCaptureOutput*)captureOutput
147    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
148           fromConnection:(AVCaptureConnection*)connection {
149  NSParameterAssert(captureOutput == _videoOutput);
150  if (!_isRunning) {
151    return;
152  }
153  _capturer->CaptureSampleBuffer(sampleBuffer);
154}
155
156- (void)captureOutput:(AVCaptureOutput*)captureOutput
157    didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
158    fromConnection:(AVCaptureConnection*)connection {
159  NSLog(@"Dropped sample buffer.");
160}
161
162#pragma mark - Private
163
164- (BOOL)setupCaptureSession {
165  _captureSession = [[AVCaptureSession alloc] init];
166#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
167  NSString* version = [[UIDevice currentDevice] systemVersion];
168  if ([version integerValue] >= 7) {
169    _captureSession.usesApplicationAudioSession = NO;
170  }
171#endif
172  if (![_captureSession canSetSessionPreset:kDefaultPreset]) {
173    NSLog(@"Default video capture preset unsupported.");
174    return NO;
175  }
176  _captureSession.sessionPreset = kDefaultPreset;
177
178  // Make the capturer output NV12. Ideally we want I420 but that's not
179  // currently supported on iPhone / iPad.
180  _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
181  _videoOutput.videoSettings = @{
182    (NSString*)kCVPixelBufferPixelFormatTypeKey :
183        @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
184  };
185  _videoOutput.alwaysDiscardsLateVideoFrames = NO;
186  [_videoOutput setSampleBufferDelegate:self
187                                  queue:dispatch_get_main_queue()];
188  if (![_captureSession canAddOutput:_videoOutput]) {
189    NSLog(@"Default video capture output unsupported.");
190    return NO;
191  }
192  [_captureSession addOutput:_videoOutput];
193
194  // Find the capture devices.
195  AVCaptureDevice* frontCaptureDevice = nil;
196  AVCaptureDevice* backCaptureDevice = nil;
197  for (AVCaptureDevice* captureDevice in
198       [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
199    if (captureDevice.position == AVCaptureDevicePositionBack) {
200      backCaptureDevice = captureDevice;
201    }
202    if (captureDevice.position == AVCaptureDevicePositionFront) {
203      frontCaptureDevice = captureDevice;
204    }
205  }
206  if (!frontCaptureDevice || !backCaptureDevice) {
207    NSLog(@"Failed to get capture devices.");
208    return NO;
209  }
210
211  // Set up the session inputs.
212  NSError* error = nil;
213  _frontDeviceInput =
214      [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
215                                            error:&error];
216  if (!_frontDeviceInput) {
217    NSLog(@"Failed to get capture device input: %@",
218          error.localizedDescription);
219    return NO;
220  }
221  _backDeviceInput =
222      [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice
223                                            error:&error];
224  if (!_backDeviceInput) {
225    NSLog(@"Failed to get capture device input: %@",
226          error.localizedDescription);
227    return NO;
228  }
229
230  // Add the inputs.
231  if (![_captureSession canAddInput:_frontDeviceInput] ||
232      ![_captureSession canAddInput:_backDeviceInput]) {
233    NSLog(@"Session does not support capture inputs.");
234    return NO;
235  }
236  [self updateSessionInput];
237
238  return YES;
239}
240
241- (void)deviceOrientationDidChange:(NSNotification*)notification {
242  _orientationHasChanged = YES;
243  [self updateOrientation];
244}
245
246- (void)updateOrientation {
247  AVCaptureConnection* connection =
248      [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
249  if (!connection.supportsVideoOrientation) {
250    // TODO(tkchin): set rotation bit on frames.
251    return;
252  }
253  AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
254  switch ([UIDevice currentDevice].orientation) {
255    case UIDeviceOrientationPortrait:
256      orientation = AVCaptureVideoOrientationPortrait;
257      break;
258    case UIDeviceOrientationPortraitUpsideDown:
259      orientation = AVCaptureVideoOrientationPortraitUpsideDown;
260      break;
261    case UIDeviceOrientationLandscapeLeft:
262      orientation = AVCaptureVideoOrientationLandscapeRight;
263      break;
264    case UIDeviceOrientationLandscapeRight:
265      orientation = AVCaptureVideoOrientationLandscapeLeft;
266      break;
267    case UIDeviceOrientationFaceUp:
268    case UIDeviceOrientationFaceDown:
269    case UIDeviceOrientationUnknown:
270      if (!_orientationHasChanged) {
271        connection.videoOrientation = orientation;
272      }
273      return;
274  }
275  connection.videoOrientation = orientation;
276}
277
278- (void)updateSessionInput {
279  // Update the current session input to match what's stored in _useBackCamera.
280  [_captureSession beginConfiguration];
281  AVCaptureDeviceInput* oldInput = _backDeviceInput;
282  AVCaptureDeviceInput* newInput = _frontDeviceInput;
283  if (_useBackCamera) {
284    oldInput = _frontDeviceInput;
285    newInput = _backDeviceInput;
286  }
287  // Ok to remove this even if it's not attached. Will be no-op.
288  [_captureSession removeInput:oldInput];
289  [_captureSession addInput:newInput];
290  [self updateOrientation];
291  [_captureSession commitConfiguration];
292}
293
294@end
295
296namespace webrtc {
297
298AVFoundationVideoCapturer::AVFoundationVideoCapturer()
299    : _capturer(nil), _startThread(nullptr) {
300  // Set our supported formats. This matches kDefaultPreset.
301  std::vector<cricket::VideoFormat> supportedFormats;
302  supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
303  SetSupportedFormats(supportedFormats);
304  _capturer =
305      [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
306}
307
308AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
309  _capturer = nil;
310}
311
312cricket::CaptureState AVFoundationVideoCapturer::Start(
313    const cricket::VideoFormat& format) {
314  if (!_capturer) {
315    LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
316    return cricket::CaptureState::CS_FAILED;
317  }
318  if (_capturer.isRunning) {
319    LOG(LS_ERROR) << "The capturer is already running.";
320    return cricket::CaptureState::CS_FAILED;
321  }
322  if (format != kDefaultFormat) {
323    LOG(LS_ERROR) << "Unsupported format provided.";
324    return cricket::CaptureState::CS_FAILED;
325  }
326
327  // Keep track of which thread capture started on. This is the thread that
328  // frames need to be sent to.
329  RTC_DCHECK(!_startThread);
330  _startThread = rtc::Thread::Current();
331
332  SetCaptureFormat(&format);
333  // This isn't super accurate because it takes a while for the AVCaptureSession
334  // to spin up, and this call returns async.
335  // TODO(tkchin): make this better.
336  [_capturer startCaptureAsync];
337  SetCaptureState(cricket::CaptureState::CS_RUNNING);
338
339  return cricket::CaptureState::CS_STARTING;
340}
341
342void AVFoundationVideoCapturer::Stop() {
343  [_capturer stopCaptureAsync];
344  SetCaptureFormat(NULL);
345  _startThread = nullptr;
346}
347
348bool AVFoundationVideoCapturer::IsRunning() {
349  return _capturer.isRunning;
350}
351
352AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
353  return _capturer.captureSession;
354}
355
356void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
357  _capturer.useBackCamera = useBackCamera;
358}
359
360bool AVFoundationVideoCapturer::GetUseBackCamera() const {
361  return _capturer.useBackCamera;
362}
363
364void AVFoundationVideoCapturer::CaptureSampleBuffer(
365    CMSampleBufferRef sampleBuffer) {
366  if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
367      !CMSampleBufferIsValid(sampleBuffer) ||
368      !CMSampleBufferDataIsReady(sampleBuffer)) {
369    return;
370  }
371
372  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
373  if (imageBuffer == NULL) {
374    return;
375  }
376
377  // Base address must be unlocked to access frame data.
378  CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly;
379  CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags);
380  if (ret != kCVReturnSuccess) {
381    return;
382  }
383
384  static size_t const kYPlaneIndex = 0;
385  static size_t const kUVPlaneIndex = 1;
386  uint8_t* yPlaneAddress =
387      (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex);
388  size_t yPlaneHeight =
389      CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex);
390  size_t yPlaneWidth =
391      CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex);
392  size_t yPlaneBytesPerRow =
393      CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex);
394  size_t uvPlaneHeight =
395      CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex);
396  size_t uvPlaneBytesPerRow =
397      CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex);
398  size_t frameSize =
399      yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
400
401  // Sanity check assumption that planar bytes are contiguous.
402  uint8_t* uvPlaneAddress =
403      (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex);
404  RTC_DCHECK(
405      uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow);
406
407  // Stuff data into a cricket::CapturedFrame.
408  int64_t currentTime = rtc::TimeNanos();
409  cricket::CapturedFrame frame;
410  frame.width = yPlaneWidth;
411  frame.height = yPlaneHeight;
412  frame.pixel_width = 1;
413  frame.pixel_height = 1;
414  frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
415  frame.time_stamp = currentTime;
416  frame.data = yPlaneAddress;
417  frame.data_size = frameSize;
418
419  if (_startThread->IsCurrent()) {
420    SignalFrameCaptured(this, &frame);
421  } else {
422    _startThread->Invoke<void>(
423        rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread,
424                  this, &frame));
425  }
426  CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags);
427}
428
429void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
430    const cricket::CapturedFrame* frame) {
431  RTC_DCHECK(_startThread->IsCurrent());
432  // This will call a superclass method that will perform the frame conversion
433  // to I420.
434  SignalFrameCaptured(this, frame);
435}
436
437}  // namespace webrtc
438