1/** 2 * Copyright 2017 The WebRTC Project Authors. All rights reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11#import "RTCFileVideoCapturer.h" 12 13#import "base/RTCLogging.h" 14#import "base/RTCVideoFrameBuffer.h" 15#import "components/video_frame_buffer/RTCCVPixelBuffer.h" 16#include "rtc_base/system/gcd_helpers.h" 17 18NSString *const kRTCFileVideoCapturerErrorDomain = 19 @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)"; 20 21typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) { 22 RTCFileVideoCapturerErrorCode_CapturerRunning = 2000, 23 RTCFileVideoCapturerErrorCode_FileNotFound 24}; 25 26typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) { 27 RTCFileVideoCapturerStatusNotInitialized, 28 RTCFileVideoCapturerStatusStarted, 29 RTCFileVideoCapturerStatusStopped 30}; 31 32@interface RTC_OBJC_TYPE (RTCFileVideoCapturer) 33() @property(nonatomic, assign) CMTime lastPresentationTime; 34@property(nonatomic, strong) NSURL *fileURL; 35@end 36 37@implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { 38 AVAssetReader *_reader; 39 AVAssetReaderTrackOutput *_outTrack; 40 RTCFileVideoCapturerStatus _status; 41 dispatch_queue_t _frameQueue; 42} 43 44@synthesize lastPresentationTime = _lastPresentationTime; 45@synthesize fileURL = _fileURL; 46 47- (void)startCapturingFromFileNamed:(NSString *)nameOfFile 48 onError:(RTCFileVideoCapturerErrorBlock)errorBlock { 49 if (_status == RTCFileVideoCapturerStatusStarted) { 50 NSError *error = 51 [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain 52 code:RTCFileVideoCapturerErrorCode_CapturerRunning 53 userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}]; 54 55 errorBlock(error); 56 return; 57 } else { 58 _status = RTCFileVideoCapturerStatusStarted; 59 } 60 61 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 62 NSString *pathForFile = [self pathForFileName:nameOfFile]; 63 if (!pathForFile) { 64 NSString *errorString = 65 [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile]; 66 NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain 67 code:RTCFileVideoCapturerErrorCode_FileNotFound 68 userInfo:@{NSUnderlyingErrorKey : errorString}]; 69 errorBlock(error); 70 return; 71 } 72 73 self.lastPresentationTime = CMTimeMake(0, 0); 74 75 self.fileURL = [NSURL fileURLWithPath:pathForFile]; 76 [self setupReaderOnError:errorBlock]; 77 }); 78} 79 80- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock { 81 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil]; 82 83 NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo]; 84 NSError *error = nil; 85 86 _reader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; 87 if (error) { 88 errorBlock(error); 89 return; 90 } 91 92 NSDictionary *options = @{ 93 (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) 94 }; 95 _outTrack = 96 [[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options]; 97 [_reader addOutput:_outTrack]; 98 99 [_reader startReading]; 100 RTCLog(@"File capturer started reading"); 101 [self readNextBuffer]; 102} 103- (void)stopCapture { 104 _status = RTCFileVideoCapturerStatusStopped; 105 RTCLog(@"File capturer stopped."); 106} 107 108#pragma mark - Private 109 110- (nullable NSString *)pathForFileName:(NSString *)fileName { 111 NSArray *nameComponents = [fileName componentsSeparatedByString:@"."]; 112 if (nameComponents.count != 2) { 113 return nil; 114 } 115 116 NSString *path = 117 [[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]]; 118 return path; 119} 120 121- (dispatch_queue_t)frameQueue { 122 if (!_frameQueue) { 123 _frameQueue = RTCDispatchQueueCreateWithTarget( 124 "org.webrtc.filecapturer.video", 125 DISPATCH_QUEUE_SERIAL, 126 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0)); 127 } 128 return _frameQueue; 129} 130 131- (void)readNextBuffer { 132 if (_status == RTCFileVideoCapturerStatusStopped) { 133 [_reader cancelReading]; 134 _reader = nil; 135 return; 136 } 137 138 if (_reader.status == AVAssetReaderStatusCompleted) { 139 [_reader cancelReading]; 140 _reader = nil; 141 [self setupReaderOnError:nil]; 142 return; 143 } 144 145 CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer]; 146 if (!sampleBuffer) { 147 [self readNextBuffer]; 148 return; 149 } 150 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || 151 !CMSampleBufferDataIsReady(sampleBuffer)) { 152 CFRelease(sampleBuffer); 153 [self readNextBuffer]; 154 return; 155 } 156 157 [self publishSampleBuffer:sampleBuffer]; 158} 159 160- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer { 161 CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 162 Float64 presentationDifference = 163 CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime)); 164 _lastPresentationTime = presentationTime; 165 int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC); 166 167 __block dispatch_source_t timer = [self createStrictTimer]; 168 // Strict timer that will fire |presentationDifferenceRound| ns from now and never again. 169 dispatch_source_set_timer(timer, 170 dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound), 171 DISPATCH_TIME_FOREVER, 172 0); 173 dispatch_source_set_event_handler(timer, ^{ 174 dispatch_source_cancel(timer); 175 timer = nil; 176 177 CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 178 if (!pixelBuffer) { 179 CFRelease(sampleBuffer); 180 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 181 [self readNextBuffer]; 182 }); 183 return; 184 } 185 186 RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = 187 [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; 188 NSTimeInterval timeStampSeconds = CACurrentMediaTime(); 189 int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); 190 RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = 191 [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer 192 rotation:0 193 timeStampNs:timeStampNs]; 194 CFRelease(sampleBuffer); 195 196 dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 197 [self readNextBuffer]; 198 }); 199 200 [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; 201 }); 202 dispatch_activate(timer); 203} 204 205- (dispatch_source_t)createStrictTimer { 206 dispatch_source_t timer = dispatch_source_create( 207 DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]); 208 return timer; 209} 210 211- (void)dealloc { 212 [self stopCapture]; 213} 214 215@end 216