• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 *  cap_avfoundation.mm
3 *  For iOS video I/O
4 *  by Xiaochao Yang on 06/15/11 modified from
5 *  cap_qtkit.mm for Nicholas Butko for Mac OS version.
6 *  Copyright 2011. All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions are met:
10 *
11 * 1. Redistributions of source code must retain the above copyright notice,
12 *    this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright notice,
14 *    this list of conditions and the following disclaimer in the documentation
15 *    and/or other materials provided with the distribution.
16 * 3. The name of the author may not be used to endorse or promote products
17 *    derived from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
20 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
22 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
24 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
25 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
27 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
28 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 *
30 */
31
32#include "precomp.hpp"
33#include "opencv2/imgproc.hpp"
34#include <iostream>
35#import <AVFoundation/AVFoundation.h>
36#import <Foundation/NSException.h>
37
38
39/********************** Declaration of class headers ************************/
40
41/*****************************************************************************
42 *
43 * CaptureDelegate Declaration.
44 *
45 * CaptureDelegate is notified on a separate thread by the OS whenever there
46 *   is a new frame. When "updateImage" is called from the main thread, it
47 *   copies this new frame into an IplImage, but only if this frame has not
48 *   been copied before. When "getOutput" is called from the main thread,
49 *   it gives the last copied IplImage.
50 *
51 *****************************************************************************/
52
53#define DISABLE_AUTO_RESTART 999
54
55@interface CaptureDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
56{
57    int newFrame;
58    CVImageBufferRef  mCurrentImageBuffer;
59    char* imagedata;
60    IplImage* image;
61    char* bgr_imagedata;
62    IplImage* bgr_image;
63    IplImage* bgr_image_r90;
64    size_t currSize;
65}
66
67- (void)captureOutput:(AVCaptureOutput *)captureOutput
68didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
69fromConnection:(AVCaptureConnection *)connection;
70
71
72- (int)updateImage;
73- (IplImage*)getOutput;
74
75@end
76
77/*****************************************************************************
78 *
79 * CvCaptureCAM Declaration.
80 *
81 * CvCaptureCAM is the instantiation of a capture source for cameras.
82 *
83 *****************************************************************************/
84
85class CvCaptureCAM : public CvCapture {
86    public:
87        CvCaptureCAM(int cameraNum = -1) ;
88        ~CvCaptureCAM();
89        virtual bool grabFrame();
90        virtual IplImage* retrieveFrame(int);
91        virtual IplImage* queryFrame();
92        virtual double getProperty(int property_id) const;
93        virtual bool setProperty(int property_id, double value);
94        virtual int didStart();
95
96    private:
97        AVCaptureSession            *mCaptureSession;
98        AVCaptureDeviceInput        *mCaptureDeviceInput;
99        AVCaptureVideoDataOutput    *mCaptureDecompressedVideoOutput;
100        AVCaptureDevice 						*mCaptureDevice;
101        CaptureDelegate							*capture;
102
103        int startCaptureDevice(int cameraNum);
104        void stopCaptureDevice();
105
106        void setWidthHeight();
107        bool grabFrame(double timeOut);
108
109        int camNum;
110        int width;
111        int height;
112        int settingWidth;
113        int settingHeight;
114        int started;
115        int disableAutoRestart;
116};
117
118
119/*****************************************************************************
120 *
121 * CvCaptureFile Declaration.
122 *
123 * CvCaptureFile is the instantiation of a capture source for video files.
124 *
125 *****************************************************************************/
126
127class CvCaptureFile : public CvCapture {
128    public:
129
130        CvCaptureFile(const char* filename) ;
131        ~CvCaptureFile();
132        virtual bool grabFrame();
133        virtual IplImage* retrieveFrame(int);
134        virtual IplImage* queryFrame();
135        virtual double getProperty(int property_id) const;
136        virtual bool setProperty(int property_id, double value);
137        virtual int didStart();
138
139    private:
140
141        AVAssetReader *mMovieReader;
142        char* imagedata;
143        IplImage* image;
144        char* bgr_imagedata;
145        IplImage* bgr_image;
146        size_t currSize;
147
148        IplImage* retrieveFramePixelBuffer();
149        double getFPS();
150
151        int movieWidth;
152        int movieHeight;
153        double movieFPS;
154        double currentFPS;
155        double movieDuration;
156        int changedPos;
157
158        int started;
159};
160
161
162/*****************************************************************************
163 *
164 * CvCaptureFile Declaration.
165 *
166 * CvCaptureFile is the instantiation of a capture source for video files.
167 *
168 *****************************************************************************/
169
170class CvVideoWriter_AVFoundation : public CvVideoWriter{
171    public:
172        CvVideoWriter_AVFoundation(const char* filename, int fourcc,
173                double fps, CvSize frame_size,
174                int is_color=1);
175        ~CvVideoWriter_AVFoundation();
176        bool writeFrame(const IplImage* image);
177    private:
178        IplImage* argbimage;
179
180        AVAssetWriter *mMovieWriter;
181        AVAssetWriterInput* mMovieWriterInput;
182        AVAssetWriterInputPixelBufferAdaptor* mMovieWriterAdaptor;
183
184        NSString* path;
185        NSString* codec;
186        NSString* fileType;
187        double movieFPS;
188        CvSize movieSize;
189        int movieColor;
190        unsigned long frameCount;
191};
192
193
194/****************** Implementation of interface functions ********************/
195
196
197CvCapture* cvCreateFileCapture_AVFoundation(const char* filename) {
198    CvCaptureFile *retval = new CvCaptureFile(filename);
199
200    if(retval->didStart())
201        return retval;
202    delete retval;
203    return NULL;
204}
205
206CvCapture* cvCreateCameraCapture_AVFoundation(int index ) {
207
208    CvCapture* retval = new CvCaptureCAM(index);
209    if (!((CvCaptureCAM *)retval)->didStart())
210        cvReleaseCapture(&retval);
211    return retval;
212
213}
214
215CvVideoWriter* cvCreateVideoWriter_AVFoundation(const char* filename, int fourcc,
216        double fps, CvSize frame_size,
217        int is_color) {
218    return new CvVideoWriter_AVFoundation(filename, fourcc, fps, frame_size,is_color);
219}
220
221/********************** Implementation of Classes ****************************/
222/*****************************************************************************
223 *
224 * CvCaptureCAM Implementation.
225 *
226 * CvCaptureCAM is the instantiation of a capture source for cameras.
227 *
228 *****************************************************************************/
229
230CvCaptureCAM::CvCaptureCAM(int cameraNum) {
231    mCaptureSession = nil;
232    mCaptureDeviceInput = nil;
233    mCaptureDecompressedVideoOutput = nil;
234    capture = nil;
235
236    width = 0;
237    height = 0;
238    settingWidth = 0;
239    settingHeight = 0;
240    disableAutoRestart = 0;
241
242    camNum = cameraNum;
243
244    if (!startCaptureDevice(camNum)) {
245        std::cout << "Warning, camera failed to properly initialize!" << std::endl;
246        started = 0;
247    } else {
248        started = 1;
249    }
250
251}
252
253CvCaptureCAM::~CvCaptureCAM() {
254    stopCaptureDevice();
255    //cout << "Cleaned up camera." << endl;
256}
257
258int CvCaptureCAM::didStart() {
259    return started;
260}
261
262
263bool CvCaptureCAM::grabFrame() {
264    return grabFrame(5);
265}
266
267bool CvCaptureCAM::grabFrame(double timeOut) {
268
269    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
270    double sleepTime = 0.005;
271    double total = 0;
272
273    NSDate *loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
274    while (![capture updateImage] && (total += sleepTime)<=timeOut &&
275            [[NSRunLoop currentRunLoop] runMode: NSDefaultRunLoopMode
276            beforeDate:loopUntil])
277        loopUntil = [NSDate dateWithTimeIntervalSinceNow:sleepTime];
278
279    [localpool drain];
280
281    return total <= timeOut;
282}
283
284IplImage* CvCaptureCAM::retrieveFrame(int) {
285    return [capture getOutput];
286}
287
288IplImage* CvCaptureCAM::queryFrame() {
289    while (!grabFrame()) {
290        std::cout << "WARNING: Couldn't grab new frame from camera!!!" << std::endl;
291        /*
292             cout << "Attempting to restart camera; set capture property DISABLE_AUTO_RESTART to disable." << endl;
293             stopCaptureDevice();
294             startCaptureDevice(camNum);
295         */
296    }
297    return retrieveFrame(0);
298}
299
300void CvCaptureCAM::stopCaptureDevice() {
301    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
302
303    [mCaptureSession stopRunning];
304
305    [mCaptureSession release];
306    [mCaptureDeviceInput release];
307
308    [mCaptureDecompressedVideoOutput release];
309    [capture release];
310    [localpool drain];
311
312}
313
314int CvCaptureCAM::startCaptureDevice(int cameraNum) {
315    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
316
317    capture = [[CaptureDelegate alloc] init];
318
319    AVCaptureDevice *device;
320    NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
321    if ([devices count] == 0) {
322        std::cout << "AV Foundation didn't find any attached Video Input Devices!" << std::endl;
323        [localpool drain];
324        return 0;
325    }
326
327    if (cameraNum >= 0) {
328        camNum = cameraNum % [devices count];
329        if (camNum != cameraNum) {
330            std::cout << "Warning: Max Camera Num is " << [devices count]-1 << "; Using camera " << camNum << std::endl;
331        }
332        device = [devices objectAtIndex:camNum];
333    } else {
334        device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]  ;
335    }
336    mCaptureDevice = device;
337    //int success;
338    NSError* error;
339
340    if (device) {
341
342        mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error] ;
343        mCaptureSession = [[AVCaptureSession alloc] init] ;
344
345        /*
346             success = [mCaptureSession addInput:mCaptureDeviceInput];
347
348             if (!success) {
349             cout << "AV Foundation failed to start capture session with opened Capture Device" << endl;
350             [localpool drain];
351             return 0;
352             }
353         */
354
355        mCaptureDecompressedVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
356
357        dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);
358        [mCaptureDecompressedVideoOutput setSampleBufferDelegate:capture queue:queue];
359        dispatch_release(queue);
360
361
362        NSDictionary *pixelBufferOptions ;
363        if (width > 0 && height > 0) {
364            pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
365                [NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
366                [NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
367                [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
368                (id)kCVPixelBufferPixelFormatTypeKey,
369                nil];
370        } else {
371            pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
372                [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
373                (id)kCVPixelBufferPixelFormatTypeKey,
374                nil];
375        }
376
377        //TODO: add new interface for setting fps and capturing resolution.
378        [mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
379        mCaptureDecompressedVideoOutput.alwaysDiscardsLateVideoFrames = YES;
380
381#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
382        mCaptureDecompressedVideoOutput.minFrameDuration = CMTimeMake(1, 30);
383#endif
384
385        //Slow. 1280*720 for iPhone4, iPod back camera. 640*480 for front camera
386        //mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; // fps ~= 5 slow for OpenCV
387
388        mCaptureSession.sessionPreset = AVCaptureSessionPresetMedium; //480*360
389        if (width == 0 ) width = 480;
390        if (height == 0 ) height = 360;
391
392        [mCaptureSession addInput:mCaptureDeviceInput];
393        [mCaptureSession addOutput:mCaptureDecompressedVideoOutput];
394
395        /*
396        // Does not work! This is the preferred way (hardware acceleration) to change pixel buffer orientation.
397        // I'm now using cvtranspose and cvflip instead, which takes cpu cycles.
398        AVCaptureConnection *connection = [[mCaptureDecompressedVideoOutput connections] objectAtIndex:0];
399        if([connection isVideoOrientationSupported]) {
400            //NSLog(@"Setting pixel buffer orientation");
401            connection.videoOrientation = AVCaptureVideoOrientationPortrait;
402        }
403        */
404
405        [mCaptureSession startRunning];
406
407        grabFrame(60);
408        [localpool drain];
409        return 1;
410    }
411
412    [localpool drain];
413    return 0;
414}
415
416void CvCaptureCAM::setWidthHeight() {
417    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
418    NSDictionary* pixelBufferOptions = [NSDictionary dictionaryWithObjectsAndKeys:
419        [NSNumber numberWithDouble:1.0*width], (id)kCVPixelBufferWidthKey,
420        [NSNumber numberWithDouble:1.0*height], (id)kCVPixelBufferHeightKey,
421        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],
422        (id)kCVPixelBufferPixelFormatTypeKey,
423        nil];
424
425    [mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
426    grabFrame(60);
427    [localpool drain];
428}
429
430//added macros into headers in videoio_c.h
431/*
432#define CV_CAP_PROP_IOS_DEVICE_FOCUS 9001
433#define CV_CAP_PROP_IOS_DEVICE_EXPOSURE 9002
434#define CV_CAP_PROP_IOS_DEVICE_FLASH 9003
435#define CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE 9004
436#define CV_CAP_PROP_IOS_DEVICE_TORCH 9005
437*/
438
439
440/*
441// All available settings are taken from iOS API
442
443enum {
444   AVCaptureFlashModeOff    = 0,
445   AVCaptureFlashModeOn     = 1,
446   AVCaptureFlashModeAuto   = 2
447};
448typedef NSInteger AVCaptureFlashMode;
449
450enum {
451   AVCaptureTorchModeOff    = 0,
452   AVCaptureTorchModeOn     = 1,
453   AVCaptureTorchModeAuto   = 2
454};
455typedef NSInteger AVCaptureTorchMode;
456
457enum {
458   AVCaptureFocusModeLocked                = 0,
459   AVCaptureFocusModeAutoFocus             = 1,
460   AVCaptureFocusModeContinuousAutoFocus   = 2,
461};
462typedef NSInteger AVCaptureFocusMode;
463
464enum {
465   AVCaptureExposureModeLocked                    = 0,
466   AVCaptureExposureModeAutoExpose                = 1,
467   AVCaptureExposureModeContinuousAutoExposure    = 2,
468};
469typedef NSInteger AVCaptureExposureMode;
470
471enum {
472   AVCaptureWhiteBalanceModeLocked             = 0,
473   AVCaptureWhiteBalanceModeAutoWhiteBalance   = 1,
474   AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
475};
476typedef NSInteger AVCaptureWhiteBalanceMode;
477*/
478
479double CvCaptureCAM::getProperty(int property_id) const{
480    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
481
482    /*
483         NSArray* connections = [mCaptureDeviceInput	connections];
484         QTFormatDescription* format = [[connections objectAtIndex:0] formatDescription];
485         NSSize s1 = [[format attributeForKey:QTFormatDescriptionVideoCleanApertureDisplaySizeAttribute] sizeValue];
486     */
487
488    NSArray* ports = mCaptureDeviceInput.ports;
489    CMFormatDescriptionRef format = [[ports objectAtIndex:0] formatDescription];
490    CGSize s1 = CMVideoFormatDescriptionGetPresentationDimensions(format, YES, YES);
491
492    int w=(int)s1.width, h=(int)s1.height;
493
494    [localpool drain];
495
496    switch (property_id) {
497        case CV_CAP_PROP_FRAME_WIDTH:
498            return w;
499        case CV_CAP_PROP_FRAME_HEIGHT:
500            return h;
501
502        case CV_CAP_PROP_IOS_DEVICE_FOCUS:
503            return mCaptureDevice.focusMode;
504        case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
505            return mCaptureDevice.exposureMode;
506        case CV_CAP_PROP_IOS_DEVICE_FLASH:
507            return mCaptureDevice.flashMode;
508        case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
509            return mCaptureDevice.whiteBalanceMode;
510        case CV_CAP_PROP_IOS_DEVICE_TORCH:
511            return mCaptureDevice.torchMode;
512
513        default:
514            return 0;
515    }
516
517
518}
519
520bool CvCaptureCAM::setProperty(int property_id, double value) {
521    switch (property_id) {
522        case CV_CAP_PROP_FRAME_WIDTH:
523            width = value;
524            settingWidth = 1;
525            if (settingWidth && settingHeight) {
526                setWidthHeight();
527                settingWidth =0;
528                settingHeight = 0;
529            }
530            return true;
531
532        case CV_CAP_PROP_FRAME_HEIGHT:
533            height = value;
534            settingHeight = 1;
535            if (settingWidth && settingHeight) {
536                setWidthHeight();
537                settingWidth =0;
538                settingHeight = 0;
539            }
540            return true;
541
542        case CV_CAP_PROP_IOS_DEVICE_FOCUS:
543            if ([mCaptureDevice isFocusModeSupported:(AVCaptureFocusMode)value]){
544                NSError* error = nil;
545                [mCaptureDevice lockForConfiguration:&error];
546                if (error) return false;
547                [mCaptureDevice setFocusMode:(AVCaptureFocusMode)value];
548                [mCaptureDevice unlockForConfiguration];
549                //NSLog(@"Focus set");
550                return true;
551            }else {
552                return false;
553            }
554
555        case CV_CAP_PROP_IOS_DEVICE_EXPOSURE:
556            if ([mCaptureDevice isExposureModeSupported:(AVCaptureExposureMode)value]){
557                NSError* error = nil;
558                [mCaptureDevice lockForConfiguration:&error];
559                if (error) return false;
560                [mCaptureDevice setExposureMode:(AVCaptureExposureMode)value];
561                [mCaptureDevice unlockForConfiguration];
562                //NSLog(@"Exposure set");
563                return true;
564            }else {
565                return false;
566            }
567
568        case CV_CAP_PROP_IOS_DEVICE_FLASH:
569            if ( [mCaptureDevice hasFlash] && [mCaptureDevice isFlashModeSupported:(AVCaptureFlashMode)value]){
570                NSError* error = nil;
571                [mCaptureDevice lockForConfiguration:&error];
572                if (error) return false;
573                [mCaptureDevice setFlashMode:(AVCaptureFlashMode)value];
574                [mCaptureDevice unlockForConfiguration];
575                //NSLog(@"Flash mode set");
576                return true;
577            }else {
578                return false;
579            }
580
581        case CV_CAP_PROP_IOS_DEVICE_WHITEBALANCE:
582            if ([mCaptureDevice isWhiteBalanceModeSupported:(AVCaptureWhiteBalanceMode)value]){
583                NSError* error = nil;
584                [mCaptureDevice lockForConfiguration:&error];
585                if (error) return false;
586                [mCaptureDevice setWhiteBalanceMode:(AVCaptureWhiteBalanceMode)value];
587                [mCaptureDevice unlockForConfiguration];
588                //NSLog(@"White balance set");
589                return true;
590            }else {
591                return false;
592            }
593
594        case CV_CAP_PROP_IOS_DEVICE_TORCH:
595            if ([mCaptureDevice hasFlash] && [mCaptureDevice isTorchModeSupported:(AVCaptureTorchMode)value]){
596                NSError* error = nil;
597                [mCaptureDevice lockForConfiguration:&error];
598                if (error) return false;
599                [mCaptureDevice setTorchMode:(AVCaptureTorchMode)value];
600                [mCaptureDevice unlockForConfiguration];
601                //NSLog(@"Torch mode set");
602                return true;
603            }else {
604                return false;
605            }
606
607        case DISABLE_AUTO_RESTART:
608            disableAutoRestart = value;
609            return 1;
610        default:
611            return false;
612    }
613}
614
615
616/*****************************************************************************
617 *
618 * CaptureDelegate Implementation.
619 *
620 * CaptureDelegate is notified on a separate thread by the OS whenever there
621 *   is a new frame. When "updateImage" is called from the main thread, it
622 *   copies this new frame into an IplImage, but only if this frame has not
623 *   been copied before. When "getOutput" is called from the main thread,
624 *   it gives the last copied IplImage.
625 *
626 *****************************************************************************/
627
628
629@implementation CaptureDelegate
630
631- (id)init {
632    [super init];
633    newFrame = 0;
634    imagedata = NULL;
635    bgr_imagedata = NULL;
636    currSize = 0;
637    image = NULL;
638    bgr_image = NULL;
639    bgr_image_r90 = NULL;
640    return self;
641}
642
643
644-(void)dealloc {
645    if (imagedata != NULL) free(imagedata);
646    if (bgr_imagedata != NULL) free(bgr_imagedata);
647    cvReleaseImage(&image);
648    cvReleaseImage(&bgr_image);
649    cvReleaseImage(&bgr_image_r90);
650    [super dealloc];
651}
652
653
654
655- (void)captureOutput:(AVCaptureOutput *)captureOutput
656didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
657fromConnection:(AVCaptureConnection *)connection{
658
659    // Failed
660    // connection.videoOrientation = AVCaptureVideoOrientationPortrait;
661    (void)captureOutput;
662    (void)connection;
663
664    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
665
666    CVBufferRetain(imageBuffer);
667    CVImageBufferRef imageBufferToRelease  = mCurrentImageBuffer;
668
669    @synchronized (self) {
670
671        mCurrentImageBuffer = imageBuffer;
672        newFrame = 1;
673    }
674
675    CVBufferRelease(imageBufferToRelease);
676
677}
678
679
680-(IplImage*) getOutput {
681    //return bgr_image;
682    return bgr_image_r90;
683}
684
685-(int) updateImage {
686    if (newFrame==0) return 0;
687    CVPixelBufferRef pixels;
688
689    @synchronized (self){
690        pixels = CVBufferRetain(mCurrentImageBuffer);
691        newFrame = 0;
692    }
693
694    CVPixelBufferLockBaseAddress(pixels, 0);
695    uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
696
697    size_t width = CVPixelBufferGetWidth(pixels);
698    size_t height = CVPixelBufferGetHeight(pixels);
699    size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
700
701    if (rowBytes != 0) {
702
703        if (currSize != rowBytes*height*sizeof(char)) {
704            currSize = rowBytes*height*sizeof(char);
705            if (imagedata != NULL) free(imagedata);
706            if (bgr_imagedata != NULL) free(bgr_imagedata);
707            imagedata = (char*)malloc(currSize);
708            bgr_imagedata = (char*)malloc(currSize);
709        }
710
711        memcpy(imagedata, baseaddress, currSize);
712
713        if (image == NULL) {
714            image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
715        }
716        image->width = (int)width;
717        image->height = (int)height;
718        image->nChannels = 4;
719        image->depth = IPL_DEPTH_8U;
720        image->widthStep = (int)rowBytes;
721        image->imageData = imagedata;
722        image->imageSize = (int)currSize;
723
724        if (bgr_image == NULL) {
725            bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
726        }
727        bgr_image->width = (int)width;
728        bgr_image->height = (int)height;
729        bgr_image->nChannels = 3;
730        bgr_image->depth = IPL_DEPTH_8U;
731        bgr_image->widthStep = (int)rowBytes;
732        bgr_image->imageData = bgr_imagedata;
733        bgr_image->imageSize = (int)currSize;
734
735        cvCvtColor(image, bgr_image, CV_BGRA2BGR);
736
737        // image taken from the buffer is incorrected rotated. I'm using cvTranspose + cvFlip.
738        // There should be an option in iOS API to rotate the buffer output orientation.
739        // iOS provides hardware accelerated rotation through AVCaptureConnection class
740        // I can't get it work.
741        if (bgr_image_r90 == NULL){
742            bgr_image_r90 = cvCreateImage(cvSize((int)height, (int)width), IPL_DEPTH_8U, 3);
743        }
744        cvTranspose(bgr_image, bgr_image_r90);
745        cvFlip(bgr_image_r90, NULL, 1);
746
747    }
748
749    CVPixelBufferUnlockBaseAddress(pixels, 0);
750    CVBufferRelease(pixels);
751
752    return 1;
753}
754
755@end
756
757
758/*****************************************************************************
759 *
760 * CvCaptureFile Implementation.
761 *
762 * CvCaptureFile is the instantiation of a capture source for video files.
763 *
764 *****************************************************************************/
765
766CvCaptureFile::CvCaptureFile(const char* filename) {
767
768    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
769
770    mMovieReader = nil;
771    image = NULL;
772    bgr_image = NULL;
773    imagedata = NULL;
774    bgr_imagedata = NULL;
775    currSize = 0;
776
777    movieWidth = 0;
778    movieHeight = 0;
779    movieFPS = 0;
780    currentFPS = 0;
781    movieDuration = 0;
782    changedPos = 0;
783
784    started = 0;
785
786    AVURLAsset *asset = [AVURLAsset URLAssetWithURL:
787        [NSURL fileURLWithPath: [NSString stringWithUTF8String:filename]]
788        options:nil];
789
790    AVAssetTrack* videoTrack = nil;
791    NSArray* tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
792    if ([tracks count] == 1)
793    {
794        videoTrack = [tracks objectAtIndex:0];
795
796        movieWidth = videoTrack.naturalSize.width;
797        movieHeight = videoTrack.naturalSize.height;
798        movieFPS = videoTrack.nominalFrameRate;
799
800        currentFPS = movieFPS; //Debugging !! should be getFPS();
801        //Debugging. need to be checked
802
803        // In ms
804        movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
805
806        started = 1;
807        NSError* error = nil;
808        mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
809        if (error)
810            NSLog(@"%@", [error localizedDescription]);
811
812        NSDictionary* videoSettings =
813            [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
814            forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
815
816        [mMovieReader addOutput:[AVAssetReaderTrackOutput
817            assetReaderTrackOutputWithTrack:videoTrack
818            outputSettings:videoSettings]];
819        [mMovieReader startReading];
820    }
821
822    /*
823    // Asynchronously open the video in another thread. Always fail.
824    [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:
825    ^{
826    // The completion block goes here.
827    dispatch_async(dispatch_get_main_queue(),
828    ^{
829    AVAssetTrack* ::videoTrack = nil;
830    NSArray* ::tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
831    if ([tracks count] == 1)
832    {
833    videoTrack = [tracks objectAtIndex:0];
834
835    movieWidth = videoTrack.naturalSize.width;
836    movieHeight = videoTrack.naturalSize.height;
837    movieFPS = videoTrack.nominalFrameRate;
838    currentFPS = movieFPS; //Debugging !! should be getFPS();
839    //Debugging. need to be checked
840    movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
841    started = 1;
842
843    NSError* ::error = nil;
844    // mMovieReader is a member variable
845    mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
846    if (error)
847    NSLog(@"%@", [error localizedDescription]);
848
849    NSDictionary* ::videoSettings =
850    [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
851forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
852
853[mMovieReader addOutput:[AVAssetReaderTrackOutput
854assetReaderTrackOutputWithTrack:videoTrack
855outputSettings:videoSettings]];
856[mMovieReader startReading];
857}
858});
859
860}];
861     */
862
863[localpool drain];
864}
865
866CvCaptureFile::~CvCaptureFile() {
867
868    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
869    if (imagedata != NULL) free(imagedata);
870    if (bgr_imagedata != NULL) free(bgr_imagedata);
871    cvReleaseImage(&image);
872    cvReleaseImage(&bgr_image);
873    [mMovieReader release];
874    [localpool drain];
875}
876
877int CvCaptureFile::didStart() {
878    return started;
879}
880
881bool CvCaptureFile::grabFrame() {
882
883    //everything is done in queryFrame;
884    currentFPS = movieFPS;
885    return 1;
886
887
888    /*
889            double t1 = getProperty(CV_CAP_PROP_POS_MSEC);
890            [mCaptureSession stepForward];
891            double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
892            if (t2>t1 && !changedPos) {
893            currentFPS = 1000.0/(t2-t1);
894            } else {
895            currentFPS = movieFPS;
896            }
897            changedPos = 0;
898
899     */
900
901}
902
903
904IplImage* CvCaptureFile::retrieveFramePixelBuffer() {
905    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
906
907    if (mMovieReader.status != AVAssetReaderStatusReading){
908
909        return NULL;
910    }
911
912
913    AVAssetReaderTrackOutput * output = [mMovieReader.outputs objectAtIndex:0];
914    CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
915    if (!sampleBuffer) {
916        [localpool drain];
917        return NULL;
918    }
919    CVPixelBufferRef frame = CMSampleBufferGetImageBuffer(sampleBuffer);
920    CVPixelBufferRef pixels = CVBufferRetain(frame);
921
922    CVPixelBufferLockBaseAddress(pixels, 0);
923
924    uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels);
925    size_t width = CVPixelBufferGetWidth(pixels);
926    size_t height = CVPixelBufferGetHeight(pixels);
927    size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels);
928
929    if (rowBytes != 0) {
930
931        if (currSize != rowBytes*height*sizeof(char)) {
932            currSize = rowBytes*height*sizeof(char);
933            if (imagedata != NULL) free(imagedata);
934            if (bgr_imagedata != NULL) free(bgr_imagedata);
935            imagedata = (char*)malloc(currSize);
936            bgr_imagedata = (char*)malloc(currSize);
937        }
938
939        memcpy(imagedata, baseaddress, currSize);
940
941        if (image == NULL) {
942            image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4);
943        }
944
945        image->width = (int)width;
946        image->height = (int)height;
947        image->nChannels = 4;
948        image->depth = IPL_DEPTH_8U;
949        image->widthStep = (int)rowBytes;
950        image->imageData = imagedata;
951        image->imageSize = (int)currSize;
952
953
954        if (bgr_image == NULL) {
955            bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3);
956        }
957
958        bgr_image->width = (int)width;
959        bgr_image->height = (int)height;
960        bgr_image->nChannels = 3;
961        bgr_image->depth = IPL_DEPTH_8U;
962        bgr_image->widthStep = (int)rowBytes;
963        bgr_image->imageData = bgr_imagedata;
964        bgr_image->imageSize = (int)currSize;
965
966        cvCvtColor(image, bgr_image,CV_BGRA2BGR);
967
968    }
969
970    CVPixelBufferUnlockBaseAddress(pixels, 0);
971    CVBufferRelease(pixels);
972    CMSampleBufferInvalidate(sampleBuffer);
973    CFRelease(sampleBuffer);
974
975    [localpool drain];
976    return bgr_image;
977}
978
979
980IplImage* CvCaptureFile::retrieveFrame(int) {
981    return retrieveFramePixelBuffer();
982}
983
984IplImage* CvCaptureFile::queryFrame() {
985    grabFrame();
986    return retrieveFrame(0);
987}
988
989double CvCaptureFile::getFPS() {
990
991    /*
992         if (mCaptureSession == nil) return 0;
993         NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
994         double now = getProperty(CV_CAP_PROP_POS_MSEC);
995         double retval = 0;
996         if (now == 0) {
997         [mCaptureSession stepForward];
998         double t2 =  getProperty(CV_CAP_PROP_POS_MSEC);
999         [mCaptureSession stepBackward];
1000         retval = 1000.0 / (t2-now);
1001         } else {
1002         [mCaptureSession stepBackward];
1003         double t2 = getProperty(CV_CAP_PROP_POS_MSEC);
1004         [mCaptureSession stepForward];
1005         retval = 1000.0 / (now-t2);
1006         }
1007         [localpool drain];
1008         return retval;
1009     */
1010    return 30.0; //TODO: Debugging
1011}
1012
1013double CvCaptureFile::getProperty(int /*property_id*/) const{
1014
1015    /*
1016         if (mCaptureSession == nil) return 0;
1017
1018         NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1019
1020         double retval;
1021         QTTime t;
1022
1023         switch (property_id) {
1024         case CV_CAP_PROP_POS_MSEC:
1025         [[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
1026         retval = t.timeValue * 1000.0 / t.timeScale;
1027         break;
1028         case CV_CAP_PROP_POS_FRAMES:
1029         retval = movieFPS * getProperty(CV_CAP_PROP_POS_MSEC) / 1000;
1030         break;
1031         case CV_CAP_PROP_POS_AVI_RATIO:
1032         retval = (getProperty(CV_CAP_PROP_POS_MSEC)) / (movieDuration );
1033         break;
1034         case CV_CAP_PROP_FRAME_WIDTH:
1035         retval = movieWidth;
1036         break;
1037         case CV_CAP_PROP_FRAME_HEIGHT:
1038         retval = movieHeight;
1039         break;
1040         case CV_CAP_PROP_FPS:
1041         retval = currentFPS;
1042         break;
1043         case CV_CAP_PROP_FOURCC:
1044         default:
1045         retval = 0;
1046         }
1047
1048         [localpool drain];
1049         return retval;
1050     */
1051    return 1.0; //Debugging
1052}
1053
1054bool CvCaptureFile::setProperty(int /*property_id*/, double /*value*/) {
1055
1056    /*
1057         if (mCaptureSession == nil) return false;
1058
1059         NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1060
1061         bool retval = false;
1062         QTTime t;
1063
1064         double ms;
1065
1066         switch (property_id) {
1067         case CV_CAP_PROP_POS_MSEC:
1068         [[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
1069         t.timeValue = value * t.timeScale / 1000;
1070         [mCaptureSession setCurrentTime:t];
1071         changedPos = 1;
1072         retval = true;
1073         break;
1074         case CV_CAP_PROP_POS_FRAMES:
1075         ms = (value*1000.0 -5)/ currentFPS;
1076         retval = setProperty(CV_CAP_PROP_POS_MSEC, ms);
1077         break;
1078         case CV_CAP_PROP_POS_AVI_RATIO:
1079         ms = value * movieDuration;
1080         retval = setProperty(CV_CAP_PROP_POS_MSEC, ms);
1081         break;
1082         case CV_CAP_PROP_FRAME_WIDTH:
1083    //retval = movieWidth;
1084    break;
1085    case CV_CAP_PROP_FRAME_HEIGHT:
1086    //retval = movieHeight;
1087    break;
1088    case CV_CAP_PROP_FPS:
1089    //etval = currentFPS;
1090    break;
1091    case CV_CAP_PROP_FOURCC:
1092    default:
1093    retval = false;
1094    }
1095
1096    [localpool drain];
1097
1098    return retval;
1099     */
1100    return true;
1101}
1102
1103
1104/*****************************************************************************
1105 *
1106 * CvVideoWriter Implementation.
1107 *
1108 * CvVideoWriter is the instantiation of a video output class
1109 *
1110 *****************************************************************************/
1111
1112
1113CvVideoWriter_AVFoundation::CvVideoWriter_AVFoundation(const char* filename, int fourcc,
1114        double fps, CvSize frame_size,
1115        int is_color) {
1116
1117    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1118
1119
1120    frameCount = 0;
1121    movieFPS = fps;
1122    movieSize = frame_size;
1123    movieColor = is_color;
1124    argbimage = cvCreateImage(movieSize, IPL_DEPTH_8U, 4);
1125    path = [[[NSString stringWithCString:filename encoding:NSASCIIStringEncoding] stringByExpandingTildeInPath] retain];
1126
1127
1128    /*
1129         AVFileTypeQuickTimeMovie
1130         UTI for the QuickTime movie file format.
1131         The value of this UTI is com.apple.quicktime-movie. Files are identified with the .mov and .qt extensions.
1132
1133         AVFileTypeMPEG4
1134         UTI for the MPEG-4 file format.
1135         The value of this UTI is public.mpeg-4. Files are identified with the .mp4 extension.
1136
1137         AVFileTypeAppleM4V
1138         UTI for the iTunes video file format.
1139         The value of this UTI is com.apple.mpeg-4-video. Files are identified with the .m4v extension.
1140
1141         AVFileType3GPP
1142         UTI for the 3GPP file format.
1143         The value of this UTI is public.3gpp. Files are identified with the .3gp, .3gpp, and .sdv extensions.
1144     */
1145
1146    NSString *fileExt =[[[path pathExtension] lowercaseString] copy];
1147    if ([fileExt isEqualToString:@"mov"] || [fileExt isEqualToString:@"qt"]){
1148        fileType = [AVFileTypeQuickTimeMovie copy];
1149    }else if ([fileExt isEqualToString:@"mp4"]){
1150        fileType = [AVFileTypeMPEG4 copy];
1151    }else if ([fileExt isEqualToString:@"m4v"]){
1152        fileType = [AVFileTypeAppleM4V copy];
1153#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
1154    }else if ([fileExt isEqualToString:@"3gp"] || [fileExt isEqualToString:@"3gpp"] || [fileExt isEqualToString:@"sdv"]  ){
1155        fileType = [AVFileType3GPP copy];
1156#endif
1157    } else{
1158        fileType = [AVFileTypeMPEG4 copy];  //default mp4
1159    }
1160    [fileExt release];
1161
1162    char cc[5];
1163    cc[0] = fourcc & 255;
1164    cc[1] = (fourcc >> 8) & 255;
1165    cc[2] = (fourcc >> 16) & 255;
1166    cc[3] = (fourcc >> 24) & 255;
1167    cc[4] = 0;
1168    int cc2 = CV_FOURCC(cc[0], cc[1], cc[2], cc[3]);
1169    if (cc2!=fourcc) {
1170        std::cout << "WARNING: Didn't properly encode FourCC. Expected " << fourcc
1171            << " but got " << cc2 << "." << std::endl;
1172        //exception;
1173    }
1174
1175    // Two codec supported AVVideoCodecH264 AVVideoCodecJPEG
1176    // On iPhone 3G H264 is not supported.
1177    if (fourcc == CV_FOURCC('J','P','E','G') || fourcc == CV_FOURCC('j','p','e','g') ||
1178            fourcc == CV_FOURCC('M','J','P','G') || fourcc == CV_FOURCC('m','j','p','g') ){
1179        codec = [AVVideoCodecJPEG copy]; // Use JPEG codec if specified, otherwise H264
1180    }else if(fourcc == CV_FOURCC('H','2','6','4') || fourcc == CV_FOURCC('a','v','c','1')){
1181            codec = [AVVideoCodecH264 copy];
1182    }else{
1183        codec = [AVVideoCodecH264 copy]; // default canonical H264.
1184
1185    }
1186
1187    //NSLog(@"Path: %@", path);
1188
1189    NSError *error = nil;
1190
1191
1192    // Make sure the file does not already exist. Necessary to overwirte??
1193    /*
1194    NSFileManager *fileManager = [NSFileManager defaultManager];
1195    if ([fileManager fileExistsAtPath:path]){
1196        [fileManager removeItemAtPath:path error:&error];
1197    }
1198    */
1199
1200    // Wire the writer:
1201    // Supported file types:
1202    //      AVFileTypeQuickTimeMovie AVFileTypeMPEG4 AVFileTypeAppleM4V AVFileType3GPP
1203
1204    mMovieWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
1205        fileType:fileType
1206        error:&error];
1207    //NSParameterAssert(mMovieWriter);
1208
1209    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
1210        codec, AVVideoCodecKey,
1211        [NSNumber numberWithInt:movieSize.width], AVVideoWidthKey,
1212        [NSNumber numberWithInt:movieSize.height], AVVideoHeightKey,
1213        nil];
1214
1215    mMovieWriterInput = [[AVAssetWriterInput
1216        assetWriterInputWithMediaType:AVMediaTypeVideo
1217        outputSettings:videoSettings] retain];
1218
1219    //NSParameterAssert(mMovieWriterInput);
1220    //NSParameterAssert([mMovieWriter canAddInput:mMovieWriterInput]);
1221
1222    [mMovieWriter addInput:mMovieWriterInput];
1223
1224    mMovieWriterAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:mMovieWriterInput sourcePixelBufferAttributes:nil];
1225
1226
1227    //Start a session:
1228    [mMovieWriter startWriting];
1229    [mMovieWriter startSessionAtSourceTime:kCMTimeZero];
1230
1231
1232    if(mMovieWriter.status == AVAssetWriterStatusFailed){
1233        NSLog(@"%@", [mMovieWriter.error localizedDescription]);
1234        // TODO: error handling, cleanup. Throw execption?
1235        // return;
1236    }
1237
1238    [localpool drain];
1239}
1240
1241
1242CvVideoWriter_AVFoundation::~CvVideoWriter_AVFoundation() {
1243    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1244
1245    [mMovieWriterInput markAsFinished];
1246    [mMovieWriter finishWriting];
1247    [mMovieWriter release];
1248    [mMovieWriterInput release];
1249    [mMovieWriterAdaptor release];
1250    [path release];
1251    [codec release];
1252    [fileType release];
1253    cvReleaseImage(&argbimage);
1254
1255    [localpool drain];
1256
1257}
1258
1259bool CvVideoWriter_AVFoundation::writeFrame(const IplImage* iplimage) {
1260    NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
1261
1262    // writer status check
1263    if (![mMovieWriterInput isReadyForMoreMediaData] || mMovieWriter.status !=  AVAssetWriterStatusWriting ) {
1264        NSLog(@"[mMovieWriterInput isReadyForMoreMediaData] Not ready for media data or ...");
1265        NSLog(@"mMovieWriter.status: %d. Error: %@", (int)mMovieWriter.status, [mMovieWriter.error localizedDescription]);
1266        [localpool drain];
1267        return false;
1268    }
1269
1270    BOOL success = FALSE;
1271
1272    if (iplimage->height!=movieSize.height || iplimage->width!=movieSize.width){
1273        std::cout<<"Frame size does not match video size."<<std::endl;
1274        [localpool drain];
1275        return false;
1276    }
1277
1278    if (movieColor) {
1279        //assert(iplimage->nChannels == 3);
1280        cvCvtColor(iplimage, argbimage, CV_BGR2BGRA);
1281    }else{
1282        //assert(iplimage->nChannels == 1);
1283        cvCvtColor(iplimage, argbimage, CV_GRAY2BGRA);
1284    }
1285    //IplImage -> CGImage conversion
1286    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
1287    NSData *nsData = [NSData dataWithBytes:argbimage->imageData length:argbimage->imageSize];
1288    CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)nsData);
1289    CGImageRef cgImage = CGImageCreate(argbimage->width, argbimage->height,
1290            argbimage->depth, argbimage->depth * argbimage->nChannels, argbimage->widthStep,
1291            colorSpace, kCGImageAlphaLast|kCGBitmapByteOrderDefault,
1292            provider, NULL, false, kCGRenderingIntentDefault);
1293
1294    //CGImage -> CVPixelBufferRef coversion
1295    CVPixelBufferRef pixelBuffer = NULL;
1296    CFDataRef cfData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
1297    int status = CVPixelBufferCreateWithBytes(NULL,
1298            movieSize.width,
1299            movieSize.height,
1300            kCVPixelFormatType_32BGRA,
1301            (void*)CFDataGetBytePtr(cfData),
1302            CGImageGetBytesPerRow(cgImage),
1303            NULL,
1304            0,
1305            NULL,
1306            &pixelBuffer);
1307    if(status == kCVReturnSuccess){
1308        success = [mMovieWriterAdaptor appendPixelBuffer:pixelBuffer
1309            withPresentationTime:CMTimeMake(frameCount, movieFPS)];
1310    }
1311
1312    //cleanup
1313    CFRelease(cfData);
1314    CVPixelBufferRelease(pixelBuffer);
1315    CGImageRelease(cgImage);
1316    CGDataProviderRelease(provider);
1317    CGColorSpaceRelease(colorSpace);
1318
1319    [localpool drain];
1320
1321    if (success) {
1322        frameCount ++;
1323        //NSLog(@"Frame #%d", frameCount);
1324        return true;
1325    }else{
1326        NSLog(@"Frame appendPixelBuffer failed.");
1327        return false;
1328    }
1329
1330}
1331