• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 *  cap_ios_video_camera.mm
3 *  For iOS video I/O
4 *  by Eduard Feicho on 29/07/12
5 *  by Alexander Shishkov on 17/07/13
6 *  Copyright 2012. All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions are met:
10 *
11 * 1. Redistributions of source code must retain the above copyright notice,
12 *    this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright notice,
14 *    this list of conditions and the following disclaimer in the documentation
15 *    and/or other materials provided with the distribution.
16 * 3. The name of the author may not be used to endorse or promote products
17 *    derived from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
20 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
22 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
24 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
25 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
27 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
28 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 *
30 */
31
32#import "opencv2/videoio/cap_ios.h"
33#include "precomp.hpp"
34#import <AssetsLibrary/AssetsLibrary.h>
35
36
37static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}
38
39#pragma mark - Private Interface
40
41
42
43
44@interface CvVideoCamera ()
45
46- (void)createVideoDataOutput;
47- (void)createVideoFileOutput;
48
49
50@property (nonatomic, retain) CALayer *customPreviewLayer;
51@property (nonatomic, retain) AVCaptureVideoDataOutput *videoDataOutput;
52
53@end
54
55
56
57#pragma mark - Implementation
58
59
60
61@implementation CvVideoCamera
62
63
64
65
66@synthesize delegate;
67@synthesize grayscaleMode;
68
69@synthesize customPreviewLayer;
70@synthesize videoDataOutput;
71
72@synthesize recordVideo;
73@synthesize rotateVideo;
74//@synthesize videoFileOutput;
75@synthesize recordAssetWriterInput;
76@synthesize recordPixelBufferAdaptor;
77@synthesize recordAssetWriter;
78
79
80
81#pragma mark - Constructors
82
83- (id)initWithParentView:(UIView*)parent;
84{
85    self = [super initWithParentView:parent];
86    if (self) {
87        self.useAVCaptureVideoPreviewLayer = NO;
88        self.recordVideo = NO;
89        self.rotateVideo = NO;
90    }
91    return self;
92}
93
94
95
96#pragma mark - Public interface
97
98
99- (void)start;
100{
101    [super start];
102
103    if (self.recordVideo == YES) {
104        NSError* error = nil;
105        if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
106            [[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
107        }
108        if (error == nil) {
109            NSLog(@"[Camera] Delete file %@", [self videoFileString]);
110        }
111    }
112}
113
114
115
116- (void)stop;
117{
118    [super stop];
119
120    self.videoDataOutput = nil;
121    if (videoDataOutputQueue) {
122        dispatch_release(videoDataOutputQueue);
123    }
124
125    if (self.recordVideo == YES) {
126
127        if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
128            [self.recordAssetWriter finishWriting];
129            NSLog(@"[Camera] recording stopped");
130        } else {
131            NSLog(@"[Camera] Recording Error: asset writer status is not writing");
132        }
133
134        self.recordAssetWriter = nil;
135        self.recordAssetWriterInput = nil;
136        self.recordPixelBufferAdaptor = nil;
137    }
138
139    [self.customPreviewLayer removeFromSuperlayer];
140    self.customPreviewLayer = nil;
141}
142
143// TODO fix
144- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
145{
146
147    NSLog(@"layout preview layer");
148    if (self.parentView != nil) {
149
150        CALayer* layer = self.customPreviewLayer;
151        CGRect bounds = self.customPreviewLayer.bounds;
152        int rotation_angle = 0;
153        bool flip_bounds = false;
154
155        switch (interfaceOrientation) {
156            case UIInterfaceOrientationPortrait:
157                NSLog(@"to Portrait");
158                rotation_angle = 270;
159                break;
160            case UIInterfaceOrientationPortraitUpsideDown:
161                rotation_angle = 90;
162                NSLog(@"to UpsideDown");
163                break;
164            case UIInterfaceOrientationLandscapeLeft:
165                rotation_angle = 0;
166                NSLog(@"to LandscapeLeft");
167                break;
168            case UIInterfaceOrientationLandscapeRight:
169                rotation_angle = 180;
170                NSLog(@"to LandscapeRight");
171                break;
172            default:
173                break; // leave the layer in its last known orientation
174        }
175
176        switch (defaultAVCaptureVideoOrientation) {
177            case AVCaptureVideoOrientationLandscapeRight:
178                rotation_angle += 180;
179                break;
180            case AVCaptureVideoOrientationPortraitUpsideDown:
181                rotation_angle += 270;
182                break;
183            case AVCaptureVideoOrientationPortrait:
184                rotation_angle += 90;
185            case AVCaptureVideoOrientationLandscapeLeft:
186                break;
187            default:
188                break;
189        }
190        rotation_angle = rotation_angle % 360;
191
192        if (rotation_angle == 90 || rotation_angle == 270) {
193            flip_bounds = true;
194        }
195
196        if (flip_bounds) {
197            NSLog(@"flip bounds");
198            bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
199        }
200
201        layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
202        self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
203
204        layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
205        layer.bounds = bounds;
206    }
207
208}
209
210// TODO fix
211- (void)layoutPreviewLayer;
212{
213    NSLog(@"layout preview layer");
214    if (self.parentView != nil) {
215
216        CALayer* layer = self.customPreviewLayer;
217        CGRect bounds = self.customPreviewLayer.bounds;
218        int rotation_angle = 0;
219        bool flip_bounds = false;
220
221        switch (currentDeviceOrientation) {
222            case UIDeviceOrientationPortrait:
223                rotation_angle = 270;
224                break;
225            case UIDeviceOrientationPortraitUpsideDown:
226                rotation_angle = 90;
227                break;
228            case UIDeviceOrientationLandscapeLeft:
229                NSLog(@"left");
230                rotation_angle = 180;
231                break;
232            case UIDeviceOrientationLandscapeRight:
233                NSLog(@"right");
234                rotation_angle = 0;
235                break;
236            case UIDeviceOrientationFaceUp:
237            case UIDeviceOrientationFaceDown:
238            default:
239                break; // leave the layer in its last known orientation
240        }
241
242        switch (defaultAVCaptureVideoOrientation) {
243            case AVCaptureVideoOrientationLandscapeRight:
244                rotation_angle += 180;
245                break;
246            case AVCaptureVideoOrientationPortraitUpsideDown:
247                rotation_angle += 270;
248                break;
249            case AVCaptureVideoOrientationPortrait:
250                rotation_angle += 90;
251            case AVCaptureVideoOrientationLandscapeLeft:
252                break;
253            default:
254                break;
255        }
256        rotation_angle = rotation_angle % 360;
257
258        if (rotation_angle == 90 || rotation_angle == 270) {
259            flip_bounds = true;
260        }
261
262        if (flip_bounds) {
263            NSLog(@"flip bounds");
264            bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
265        }
266
267        layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
268        layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
269        layer.bounds = bounds;
270    }
271
272}
273
274#pragma mark - Private Interface
275
276- (void)createVideoDataOutput;
277{
278    // Make a video data output
279    self.videoDataOutput = [AVCaptureVideoDataOutput new];
280
281    // In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
282    // In color mode we, BGRA format is used
283    OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
284
285    self.videoDataOutput.videoSettings  = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
286                                                                      forKey:(id)kCVPixelBufferPixelFormatTypeKey];
287
288    // discard if the data output queue is blocked (as we process the still image)
289    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
290
291    if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
292        [self.captureSession addOutput:self.videoDataOutput];
293    }
294    [[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
295
296
297    // set default FPS
298    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMinFrameDuration) {
299        [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMinFrameDuration = CMTimeMake(1, self.defaultFPS);
300    }
301    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMaxFrameDuration) {
302        [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMaxFrameDuration = CMTimeMake(1, self.defaultFPS);
303    }
304
305    // set video mirroring for front camera (more intuitive)
306    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
307        if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
308            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
309        } else {
310            [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
311        }
312    }
313
314    // set default video orientation
315    if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
316        [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
317    }
318
319
320    // create a custom preview layer
321    self.customPreviewLayer = [CALayer layer];
322    self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
323    [self layoutPreviewLayer];
324
325    // create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
326    // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
327    // see the header doc for setSampleBufferDelegate:queue: for more information
328    videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
329    [self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
330
331
332    NSLog(@"[Camera] created AVCaptureVideoDataOutput at %d FPS", self.defaultFPS);
333}
334
335
336
337- (void)createVideoFileOutput;
338{
339    /* Video File Output in H.264, via AVAsserWriter */
340    NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
341
342    NSDictionary *outputSettings
343     = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
344                                                  [NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
345                                                  AVVideoCodecH264, AVVideoCodecKey,
346                                                  nil
347     ];
348
349
350    self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
351
352
353    int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
354
355    self.recordPixelBufferAdaptor =
356               [[AVAssetWriterInputPixelBufferAdaptor alloc]
357                    initWithAssetWriterInput:self.recordAssetWriterInput
358                    sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
359
360    NSError* error = nil;
361    NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
362    self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
363                                                      fileType:AVFileTypeMPEG4
364                                                         error:&error];
365    if (error != nil) {
366        NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
367    }
368
369    [self.recordAssetWriter addInput:self.recordAssetWriterInput];
370    self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
371
372    NSLog(@"[Camera] created AVAssetWriter");
373}
374
375
376- (void)createCaptureOutput;
377{
378    [self createVideoDataOutput];
379    if (self.recordVideo == YES) {
380        [self createVideoFileOutput];
381    }
382}
383
384- (void)createCustomVideoPreview;
385{
386    [self.parentView.layer addSublayer:self.customPreviewLayer];
387}
388
389- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
390{
391
392    CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
393    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
394                             [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
395                             [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
396                             nil];
397    CVPixelBufferRef pxbuffer = NULL;
398    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
399                                          frameSize.height,  kCVPixelFormatType_32ARGB, (CFDictionaryRef) CFBridgingRetain(options),
400                                          &pxbuffer);
401    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
402
403    CVPixelBufferLockBaseAddress(pxbuffer, 0);
404    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
405
406
407    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
408    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
409                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
410                                                 kCGImageAlphaPremultipliedFirst);
411
412    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
413                                           CGImageGetHeight(image)), image);
414    CGColorSpaceRelease(rgbColorSpace);
415    CGContextRelease(context);
416
417    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
418
419    return pxbuffer;
420}
421
422#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
423
424
425- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
426{
427    (void)captureOutput;
428    (void)connection;
429    if (self.delegate) {
430
431        // convert from Core Media to Core Video
432        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
433        CVPixelBufferLockBaseAddress(imageBuffer, 0);
434
435        void* bufferAddress;
436        size_t width;
437        size_t height;
438        size_t bytesPerRow;
439
440        CGColorSpaceRef colorSpace;
441        CGContextRef context;
442
443        int format_opencv;
444
445        OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
446        if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
447
448            format_opencv = CV_8UC1;
449
450            bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
451            width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
452            height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
453            bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
454
455        } else { // expect kCVPixelFormatType_32BGRA
456
457            format_opencv = CV_8UC4;
458
459            bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
460            width = CVPixelBufferGetWidth(imageBuffer);
461            height = CVPixelBufferGetHeight(imageBuffer);
462            bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
463
464        }
465
466        // delegate image processing to the delegate
467        cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
468
469        CGImage* dstImage;
470
471        if ([self.delegate respondsToSelector:@selector(processImage:)]) {
472            [self.delegate processImage:image];
473        }
474
475        // check if matrix data pointer or dimensions were changed by the delegate
476        bool iOSimage = false;
477        if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
478            iOSimage = true;
479        }
480
481
482        // (create color space, create graphics context, render buffer)
483        CGBitmapInfo bitmapInfo;
484
485        // basically we decide if it's a grayscale, rgb or rgba image
486        if (image.channels() == 1) {
487            colorSpace = CGColorSpaceCreateDeviceGray();
488            bitmapInfo = kCGImageAlphaNone;
489        } else if (image.channels() == 3) {
490            colorSpace = CGColorSpaceCreateDeviceRGB();
491            bitmapInfo = kCGImageAlphaNone;
492            if (iOSimage) {
493                bitmapInfo |= kCGBitmapByteOrder32Little;
494            } else {
495                bitmapInfo |= kCGBitmapByteOrder32Big;
496            }
497        } else {
498            colorSpace = CGColorSpaceCreateDeviceRGB();
499            bitmapInfo = kCGImageAlphaPremultipliedFirst;
500            if (iOSimage) {
501                bitmapInfo |= kCGBitmapByteOrder32Little;
502            } else {
503                bitmapInfo |= kCGBitmapByteOrder32Big;
504            }
505        }
506
507        if (iOSimage) {
508            context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
509            dstImage = CGBitmapContextCreateImage(context);
510            CGContextRelease(context);
511        } else {
512
513            NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
514            CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
515
516            // Creating CGImage from cv::Mat
517            dstImage = CGImageCreate(image.cols,                                 // width
518                                     image.rows,                                 // height
519                                     8,                                          // bits per component
520                                     8 * image.elemSize(),                       // bits per pixel
521                                     image.step,                                 // bytesPerRow
522                                     colorSpace,                                 // colorspace
523                                     bitmapInfo,                                 // bitmap info
524                                     provider,                                   // CGDataProviderRef
525                                     NULL,                                       // decode
526                                     false,                                      // should interpolate
527                                     kCGRenderingIntentDefault                   // intent
528                                     );
529
530            CGDataProviderRelease(provider);
531        }
532
533
534        // render buffer
535        dispatch_sync(dispatch_get_main_queue(), ^{
536            self.customPreviewLayer.contents = (__bridge id)dstImage;
537        });
538
539
540        if (self.recordVideo == YES) {
541            lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
542//			CMTimeShow(lastSampleTime);
543            if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
544                [self.recordAssetWriter startWriting];
545                [self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
546                if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
547                    NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
548                    return;
549                } else {
550                    NSLog(@"[Camera] Video recording started");
551                }
552            }
553
554            if (self.recordAssetWriterInput.readyForMoreMediaData) {
555                CVImageBufferRef pixelBuffer = [self pixelBufferFromCGImage:dstImage];
556                if (! [self.recordPixelBufferAdaptor appendPixelBuffer:pixelBuffer
557                                                  withPresentationTime:lastSampleTime] ) {
558                    NSLog(@"Video Writing Error");
559                }
560            }
561
562        }
563
564
565        // cleanup
566        CGImageRelease(dstImage);
567
568        CGColorSpaceRelease(colorSpace);
569
570        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
571    }
572}
573
574
575- (void)updateOrientation;
576{
577    if (self.rotateVideo == YES)
578    {
579        NSLog(@"rotate..");
580        self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
581        [self layoutPreviewLayer];
582    }
583}
584
585
586- (void)saveVideo;
587{
588    if (self.recordVideo == NO) {
589        return;
590    }
591
592    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
593    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:[self videoFileURL]]) {
594        [library writeVideoAtPathToSavedPhotosAlbum:[self videoFileURL]
595                                    completionBlock:^(NSURL *assetURL, NSError *error){ (void)assetURL; (void)error; }];
596    }
597}
598
599
600- (NSURL *)videoFileURL;
601{
602    NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
603    NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
604    NSFileManager *fileManager = [NSFileManager defaultManager];
605    if ([fileManager fileExistsAtPath:outputPath]) {
606        NSLog(@"file exists");
607    }
608    return outputURL;
609}
610
611
612
613- (NSString *)videoFileString;
614{
615    NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
616    return outputPath;
617}
618
619@end
620