• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains code that is used to capture video frames from a camera device
19 * on Mac. This code uses QTKit API to work with camera devices, and requires
20 * Mac OS at least 10.5
21 */
22
23#import <Cocoa/Cocoa.h>
24#if 0
25#import <QTKit/QTKit>
26#else
27// QTMovieModernizer.h does not compile with current toolchain.
28// TODO: revert this when toolchain is updated.
29#import <QTKit/QTCaptureConnection.h>
30#import <QTKit/QTCaptureDevice.h>
31#import <QTKit/QTCaptureDeviceInput.h>
32#import <QTKit/QTCaptureSession.h>
33#import <QTKit/QTCaptureVideoPreviewOutput.h>
34#import <QTKit/QTMedia.h>
35#import <QTKit/QTSampleBuffer.h>
36#endif
37#import <CoreAudio/CoreAudio.h>
38#include "android/camera/camera-capture.h"
39#include "android/camera/camera-format-converters.h"
40
41#define  E(...)    derror(__VA_ARGS__)
42#define  W(...)    dwarning(__VA_ARGS__)
43#define  D(...)    VERBOSE_PRINT(camera,__VA_ARGS__)
44
45/*******************************************************************************
46 *                     Helper routines
47 ******************************************************************************/
48
49/* Converts internal QT pixel format to a FOURCC value. */
50static uint32_t
51_QTtoFOURCC(uint32_t qt_pix_format)
52{
53  switch (qt_pix_format) {
54    case kCVPixelFormatType_24RGB:
55      return V4L2_PIX_FMT_RGB24;
56
57    case kCVPixelFormatType_24BGR:
58      return V4L2_PIX_FMT_BGR32;
59
60    case kCVPixelFormatType_32ARGB:
61    case kCVPixelFormatType_32RGBA:
62      return V4L2_PIX_FMT_RGB32;
63
64    case kCVPixelFormatType_32BGRA:
65    case kCVPixelFormatType_32ABGR:
66      return V4L2_PIX_FMT_BGR32;
67
68    case kCVPixelFormatType_422YpCbCr8:
69      return V4L2_PIX_FMT_UYVY;
70
71    case kCVPixelFormatType_420YpCbCr8Planar:
72      return V4L2_PIX_FMT_YVU420;
73
74    case 'yuvs':  // kCVPixelFormatType_422YpCbCr8_yuvs - undeclared?
75      return V4L2_PIX_FMT_YUYV;
76
77    default:
78      E("Unrecognized pixel format '%.4s'", (const char*)&qt_pix_format);
79      return 0;
80  }
81}
82
83/*******************************************************************************
84 *                     MacCamera implementation
85 ******************************************************************************/
86
87/* Encapsulates a camera device on MacOS */
88@interface MacCamera : NSObject {
89    /* Capture session. */
90    QTCaptureSession*             capture_session;
91    /* Camera capture device. */
92    QTCaptureDevice*              capture_device;
93    /* Input device registered with the capture session. */
94    QTCaptureDeviceInput*         input_device;
95    /* Output device registered with the capture session. */
96    QTCaptureVideoPreviewOutput*  output_device;
97    /* Current framebuffer. */
98    CVImageBufferRef              current_frame;
99    /* Desired frame width */
100    int                           desired_width;
101    /* Desired frame height */
102    int                           desired_height;
103}
104
105/* Initializes MacCamera instance.
106 * Return:
107 *  Pointer to initialized instance on success, or nil on failure.
108 */
109- (MacCamera*)init;
110
111/* Undoes 'init' */
112- (void)free;
113
114/* Starts capturing video frames.
115 * Param:
116 *  width, height - Requested dimensions for the captured video frames.
117 * Return:
118 *  0 on success, or !=0 on failure.
119 */
120- (int)start_capturing:(int)width:(int)height;
121
122/* Captures a frame from the camera device.
123 * Param:
124 *  framebuffers - Array of framebuffers where to read the frame. Size of this
125 *      array is defined by the 'fbs_num' parameter. Note that the caller must
126 *      make sure that buffers are large enough to contain entire frame captured
127 *      from the device.
128 *  fbs_num - Number of entries in the 'framebuffers' array.
129 * Return:
130 *  0 on success, or non-zero value on failure. There is a special vaule 1
131 *  returned from this routine which indicates that frames are not yet available
132 *  in the device. The client should respond to this value by repeating the
133 *  read, rather than reporting an error.
134 */
135- (int)read_frame:(ClientFrameBuffer*)framebuffers:(int)fbs_num:(float)r_scale:(float)g_scale:(float)b_scale:(float)exp_comp;
136
137@end
138
139@implementation MacCamera
140
141- (MacCamera*)init
142{
143    NSError *error;
144    BOOL success;
145
146    /* Obtain the capture device, make sure it's not used by another
147     * application, and open it. */
148    capture_device =
149        [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
150    if (capture_device == nil) {
151        E("There are no available video devices found.");
152        [self release];
153        return nil;
154    }
155    if ([capture_device isInUseByAnotherApplication]) {
156        E("Default camera device is in use by another application.");
157        [capture_device release];
158        capture_device = nil;
159        [self release];
160        return nil;
161    }
162    success = [capture_device open:&error];
163    if (!success) {
164        E("Unable to open camera device: '%s'",
165          [[error localizedDescription] UTF8String]);
166        [self free];
167        [self release];
168        return nil;
169    }
170
171    /* Create capture session. */
172    capture_session = [[QTCaptureSession alloc] init];
173    if (capture_session == nil) {
174        E("Unable to create capure session.");
175        [self free];
176        [self release];
177        return nil;
178    }
179
180    /* Create an input device and register it with the capture session. */
181    input_device = [[QTCaptureDeviceInput alloc] initWithDevice:capture_device];
182    success = [capture_session addInput:input_device error:&error];
183    if (!success) {
184        E("Unable to initialize input device: '%s'",
185          [[error localizedDescription] UTF8String]);
186        [input_device release];
187        input_device = nil;
188        [self free];
189        [self release];
190        return nil;
191    }
192
193    /* Create an output device and register it with the capture session. */
194    output_device = [[QTCaptureVideoPreviewOutput alloc] init];
195    success = [capture_session addOutput:output_device error:&error];
196    if (!success) {
197        E("Unable to initialize output device: '%s'",
198          [[error localizedDescription] UTF8String]);
199        [output_device release];
200        output_device = nil;
201        [self free];
202        [self release];
203        return nil;
204    }
205    [output_device setDelegate:self];
206
207    return self;
208}
209
210- (void)free
211{
212    /* Uninitialize capture session. */
213    if (capture_session != nil) {
214        /* Make sure that capturing is stopped. */
215        if ([capture_session isRunning]) {
216            [capture_session stopRunning];
217        }
218        /* Detach input and output devices from the session. */
219        if (input_device != nil) {
220            [capture_session removeInput:input_device];
221            [input_device release];
222            input_device = nil;
223        }
224        if (output_device != nil) {
225            [capture_session removeOutput:output_device];
226            [output_device release];
227            output_device = nil;
228        }
229        /* Destroy capture session. */
230        [capture_session release];
231        capture_session = nil;
232    }
233
234    /* Uninitialize capture device. */
235    if (capture_device != nil) {
236        /* Make sure device is not opened. */
237        if ([capture_device isOpen]) {
238            [capture_device close];
239        }
240        [capture_device release];
241        capture_device = nil;
242    }
243
244    /* Release current framebuffer. */
245    if (current_frame != nil) {
246       CVBufferRelease(current_frame);
247       current_frame = nil;
248    }
249}
250
251- (int)start_capturing:(int)width:(int)height
252{
253  if (![capture_session isRunning]) {
254        /* Set desired frame dimensions. */
255        desired_width = width;
256        desired_height = height;
257        [output_device setPixelBufferAttributes:
258          [NSDictionary dictionaryWithObjectsAndKeys:
259              [NSNumber numberWithInt: width], kCVPixelBufferWidthKey,
260              [NSNumber numberWithInt: height], kCVPixelBufferHeightKey,
261              nil]];
262        [capture_session startRunning];
263        return 0;
264  } else if (width == desired_width && height == desired_height) {
265      W("%s: Already capturing %dx%d frames",
266        __FUNCTION__, desired_width, desired_height);
267      return -1;
268  } else {
269      E("%s: Already capturing %dx%d frames. Requested frame dimensions are %dx%d",
270        __FUNCTION__, desired_width, desired_height, width, height);
271      return -1;
272  }
273}
274
275- (int)read_frame:(ClientFrameBuffer*)framebuffers:(int)fbs_num:(float)r_scale:(float)g_scale:(float)b_scale:(float)exp_comp
276{
277    int res = -1;
278
279    /* Frames are pushed by QT in another thread.
280     * So we need a protection here. */
281    @synchronized (self)
282    {
283        if (current_frame != nil) {
284            /* Collect frame info. */
285            const uint32_t pixel_format =
286                _QTtoFOURCC(CVPixelBufferGetPixelFormatType(current_frame));
287            const int frame_width = CVPixelBufferGetWidth(current_frame);
288            const int frame_height = CVPixelBufferGetHeight(current_frame);
289            const size_t frame_size =
290                CVPixelBufferGetBytesPerRow(current_frame) * frame_height;
291
292            /* Get framebuffer pointer. */
293            CVPixelBufferLockBaseAddress(current_frame, 0);
294            const void* pixels = CVPixelBufferGetBaseAddress(current_frame);
295            if (pixels != nil) {
296                /* Convert framebuffer. */
297                res = convert_frame(pixels, pixel_format, frame_size,
298                                    frame_width, frame_height,
299                                    framebuffers, fbs_num,
300                                    r_scale, g_scale, b_scale, exp_comp);
301            } else {
302                E("%s: Unable to obtain framebuffer", __FUNCTION__);
303                res = -1;
304            }
305            CVPixelBufferUnlockBaseAddress(current_frame, 0);
306        } else {
307            /* First frame didn't come in just yet. Let the caller repeat. */
308            res = 1;
309        }
310    }
311
312    return res;
313}
314
315- (void)captureOutput:(QTCaptureOutput*) captureOutput
316                      didOutputVideoFrame:(CVImageBufferRef)videoFrame
317                      withSampleBuffer:(QTSampleBuffer*) sampleBuffer
318                      fromConnection:(QTCaptureConnection*) connection
319{
320    CVImageBufferRef to_release;
321    CVBufferRetain(videoFrame);
322
323    /* Frames are pulled by the client in another thread.
324     * So we need a protection here. */
325    @synchronized (self)
326    {
327        to_release = current_frame;
328        current_frame = videoFrame;
329    }
330    CVBufferRelease(to_release);
331}
332
333@end
334
335/*******************************************************************************
336 *                     CameraDevice routines
337 ******************************************************************************/
338
339typedef struct MacCameraDevice MacCameraDevice;
340/* MacOS-specific camera device descriptor. */
341struct MacCameraDevice {
342    /* Common camera device descriptor. */
343    CameraDevice  header;
344    /* Actual camera device object. */
345    MacCamera*    device;
346};
347
348/* Allocates an instance of MacCameraDevice structure.
349 * Return:
350 *  Allocated instance of MacCameraDevice structure. Note that this routine
351 *  also sets 'opaque' field in the 'header' structure to point back to the
352 *  containing MacCameraDevice instance.
353 */
354static MacCameraDevice*
355_camera_device_alloc(void)
356{
357    MacCameraDevice* cd = (MacCameraDevice*)malloc(sizeof(MacCameraDevice));
358    if (cd != NULL) {
359        memset(cd, 0, sizeof(MacCameraDevice));
360        cd->header.opaque = cd;
361    } else {
362        E("%s: Unable to allocate MacCameraDevice instance", __FUNCTION__);
363    }
364    return cd;
365}
366
367/* Uninitializes and frees MacCameraDevice descriptor.
368 * Note that upon return from this routine memory allocated for the descriptor
369 * will be freed.
370 */
371static void
372_camera_device_free(MacCameraDevice* cd)
373{
374    if (cd != NULL) {
375        if (cd->device != NULL) {
376            [cd->device free];
377            [cd->device release];
378            cd->device = nil;
379        }
380        AFREE(cd);
381    } else {
382        W("%s: No descriptor", __FUNCTION__);
383    }
384}
385
386/* Resets camera device after capturing.
387 * Since new capture request may require different frame dimensions we must
388 * reset frame info cached in the capture window. The only way to do that would
389 * be closing, and reopening it again. */
390static void
391_camera_device_reset(MacCameraDevice* cd)
392{
393    if (cd != NULL && cd->device) {
394        [cd->device free];
395        cd->device = [cd->device init];
396    }
397}
398
399/*******************************************************************************
400 *                     CameraDevice API
401 ******************************************************************************/
402
403CameraDevice*
404camera_device_open(const char* name, int inp_channel)
405{
406    MacCameraDevice* mcd;
407
408    mcd = _camera_device_alloc();
409    if (mcd == NULL) {
410        E("%s: Unable to allocate MacCameraDevice instance", __FUNCTION__);
411        return NULL;
412    }
413    mcd->device = [[MacCamera alloc] init];
414    if (mcd->device == nil) {
415        E("%s: Unable to initialize camera device.", __FUNCTION__);
416        return NULL;
417    }
418    return &mcd->header;
419}
420
421int
422camera_device_start_capturing(CameraDevice* cd,
423                              uint32_t pixel_format,
424                              int frame_width,
425                              int frame_height)
426{
427    MacCameraDevice* mcd;
428
429    /* Sanity checks. */
430    if (cd == NULL || cd->opaque == NULL) {
431        E("%s: Invalid camera device descriptor", __FUNCTION__);
432        return -1;
433    }
434    mcd = (MacCameraDevice*)cd->opaque;
435    if (mcd->device == nil) {
436        E("%s: Camera device is not opened", __FUNCTION__);
437        return -1;
438    }
439
440    return [mcd->device start_capturing:frame_width:frame_height];
441}
442
443int
444camera_device_stop_capturing(CameraDevice* cd)
445{
446    MacCameraDevice* mcd;
447
448    /* Sanity checks. */
449    if (cd == NULL || cd->opaque == NULL) {
450        E("%s: Invalid camera device descriptor", __FUNCTION__);
451        return -1;
452    }
453    mcd = (MacCameraDevice*)cd->opaque;
454    if (mcd->device == nil) {
455        E("%s: Camera device is not opened", __FUNCTION__);
456        return -1;
457    }
458
459    /* Reset capture settings, so next call to capture can set its own. */
460    _camera_device_reset(mcd);
461
462    return 0;
463}
464
465int
466camera_device_read_frame(CameraDevice* cd,
467                         ClientFrameBuffer* framebuffers,
468                         int fbs_num,
469                         float r_scale,
470                         float g_scale,
471                         float b_scale,
472                         float exp_comp)
473{
474    MacCameraDevice* mcd;
475
476    /* Sanity checks. */
477    if (cd == NULL || cd->opaque == NULL) {
478        E("%s: Invalid camera device descriptor", __FUNCTION__);
479        return -1;
480    }
481    mcd = (MacCameraDevice*)cd->opaque;
482    if (mcd->device == nil) {
483        E("%s: Camera device is not opened", __FUNCTION__);
484        return -1;
485    }
486
487    return [mcd->device read_frame:framebuffers:fbs_num:r_scale:g_scale:b_scale:exp_comp];
488}
489
490void
491camera_device_close(CameraDevice* cd)
492{
493    /* Sanity checks. */
494    if (cd == NULL || cd->opaque == NULL) {
495        E("%s: Invalid camera device descriptor", __FUNCTION__);
496    } else {
497        _camera_device_free((MacCameraDevice*)cd->opaque);
498    }
499}
500
501int
502enumerate_camera_devices(CameraInfo* cis, int max)
503{
504/* Array containing emulated webcam frame dimensions.
505 * QT API provides device independent frame dimensions, by scaling frames
506 * received from the device to whatever dimensions were requested for the
507 * output device. So, we can just use a small set of frame dimensions to
508 * emulate.
509 */
510static const CameraFrameDim _emulate_dims[] =
511{
512  /* Emulates 640x480 frame. */
513  {640, 480},
514  /* Emulates 352x288 frame (required by camera framework). */
515  {352, 288},
516  /* Emulates 320x240 frame (required by camera framework). */
517  {320, 240},
518  /* Emulates 176x144 frame (required by camera framework). */
519  {176, 144}
520};
521
522    /* Obtain default video device. QT API doesn't really provide a reliable
523     * way to identify camera devices. There is a QTCaptureDevice::uniqueId
524     * method that supposedly does that, but in some cases it just doesn't
525     * work. Until we figure out a reliable device identification, we will
526     * stick to using only one (default) camera for emulation. */
527    QTCaptureDevice* video_dev =
528        [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
529    if (video_dev == nil) {
530        D("No web cameras are connected to the host.");
531        return 0;
532    }
533
534    /* Obtain pixel format for the device. */
535    NSArray* pix_formats = [video_dev formatDescriptions];
536    if (pix_formats == nil || [pix_formats count] == 0) {
537        E("Unable to obtain pixel format for the default camera device.");
538        [video_dev release];
539        return 0;
540    }
541    const uint32_t qt_pix_format = [[pix_formats objectAtIndex:0] formatType];
542    [pix_formats release];
543
544    /* Obtain FOURCC pixel format for the device. */
545    cis[0].pixel_format = _QTtoFOURCC(qt_pix_format);
546    if (cis[0].pixel_format == 0) {
547        /* Unsupported pixel format. */
548        E("Pixel format '%.4s' reported by the camera device is unsupported",
549          (const char*)&qt_pix_format);
550        [video_dev release];
551        return 0;
552    }
553
554    /* Initialize camera info structure. */
555    cis[0].frame_sizes = (CameraFrameDim*)malloc(sizeof(_emulate_dims));
556    if (cis[0].frame_sizes != NULL) {
557        cis[0].frame_sizes_num = sizeof(_emulate_dims) / sizeof(*_emulate_dims);
558        memcpy(cis[0].frame_sizes, _emulate_dims, sizeof(_emulate_dims));
559        cis[0].device_name = ASTRDUP("webcam0");
560        cis[0].inp_channel = 0;
561        cis[0].display_name = ASTRDUP("webcam0");
562        cis[0].in_use = 0;
563        [video_dev release];
564        return 1;
565    } else {
566        E("Unable to allocate memory for camera information.");
567        [video_dev release];
568        return 0;
569    }
570}
571