• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import <OCMock/OCMock.h>
12
13#if TARGET_OS_IPHONE
14#import <UIKit/UIKit.h>
15#endif
16
17#include "rtc_base/gunit.h"
18
19#import "base/RTCVideoFrame.h"
20#import "components/capturer/RTCCameraVideoCapturer.h"
21#import "helpers/AVCaptureSession+DevicePosition.h"
22#import "helpers/RTCDispatcher.h"
23
24#if TARGET_OS_IPHONE
25// Helper method.
26CMSampleBufferRef createTestSampleBufferRef() {
27
28  // This image is already in the testing bundle.
29  UIImage *image = [UIImage imageNamed:@"Default.png"];
30  CGSize size = image.size;
31  CGImageRef imageRef = [image CGImage];
32
33  CVPixelBufferRef pixelBuffer = nullptr;
34  CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, nil,
35                      &pixelBuffer);
36
37  CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
38  // We don't care about bitsPerComponent and bytesPerRow so arbitrary value of 8 for both.
39  CGContextRef context = CGBitmapContextCreate(nil, size.width, size.height, 8, 8 * size.width,
40                                               rgbColorSpace, kCGImageAlphaPremultipliedFirst);
41
42  CGContextDrawImage(
43      context, CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)), imageRef);
44
45  CGColorSpaceRelease(rgbColorSpace);
46  CGContextRelease(context);
47
48  // We don't really care about the timing.
49  CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
50  CMVideoFormatDescriptionRef description = nullptr;
51  CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &description);
52
53  CMSampleBufferRef sampleBuffer = nullptr;
54  CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, YES, NULL, NULL, description,
55                                     &timing, &sampleBuffer);
56  CFRelease(pixelBuffer);
57
58  return sampleBuffer;
59
60}
61#endif
62@interface RTC_OBJC_TYPE (RTCCameraVideoCapturer)
63(Tests)<AVCaptureVideoDataOutputSampleBufferDelegate> -
64    (instancetype)initWithDelegate
65    : (__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate captureSession
66    : (AVCaptureSession *)captureSession;
67@end
68
69@interface RTCCameraVideoCapturerTests : NSObject
70@property(nonatomic, strong) id delegateMock;
71@property(nonatomic, strong) id deviceMock;
72@property(nonatomic, strong) id captureConnectionMock;
73@property(nonatomic, strong) id captureSessionMock;
74@property(nonatomic, strong) RTC_OBJC_TYPE(RTCCameraVideoCapturer) * capturer;
75@end
76
77@implementation RTCCameraVideoCapturerTests
78@synthesize delegateMock = _delegateMock;
79@synthesize deviceMock = _deviceMock;
80@synthesize captureConnectionMock = _captureConnectionMock;
81@synthesize captureSessionMock = _captureSessionMock;
82@synthesize capturer = _capturer;
83
84- (void)setup {
85  self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
86  self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
87  self.capturer =
88      [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock];
89  self.deviceMock = [self createDeviceMock];
90}
91
92- (void)setupWithMockedCaptureSession {
93  self.captureSessionMock = OCMStrictClassMock([AVCaptureSession class]);
94  OCMStub([self.captureSessionMock setSessionPreset:[OCMArg any]]);
95  OCMStub([self.captureSessionMock setUsesApplicationAudioSession:NO]);
96  OCMStub([self.captureSessionMock canAddOutput:[OCMArg any]]).andReturn(YES);
97  OCMStub([self.captureSessionMock addOutput:[OCMArg any]]);
98  OCMStub([self.captureSessionMock beginConfiguration]);
99  OCMStub([self.captureSessionMock commitConfiguration]);
100  self.delegateMock = OCMProtocolMock(@protocol(RTC_OBJC_TYPE(RTCVideoCapturerDelegate)));
101  self.captureConnectionMock = OCMClassMock([AVCaptureConnection class]);
102  self.capturer =
103      [[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:self.delegateMock
104                                                       captureSession:self.captureSessionMock];
105  self.deviceMock = [self createDeviceMock];
106}
107
108- (void)tearDown {
109  [self.delegateMock stopMocking];
110  [self.deviceMock stopMocking];
111  self.delegateMock = nil;
112  self.deviceMock = nil;
113  self.capturer = nil;
114}
115
116#pragma mark - utils
117
118- (id)createDeviceMock {
119  return OCMClassMock([AVCaptureDevice class]);
120}
121
122#pragma mark - test cases
123
124- (void)testSetupSession {
125  AVCaptureSession *session = self.capturer.captureSession;
126  EXPECT_TRUE(session != nil);
127
128#if TARGET_OS_IPHONE
129  EXPECT_EQ(session.sessionPreset, AVCaptureSessionPresetInputPriority);
130  EXPECT_EQ(session.usesApplicationAudioSession, NO);
131#endif
132  EXPECT_EQ(session.outputs.count, 1u);
133}
134
135- (void)testSetupSessionOutput {
136  AVCaptureVideoDataOutput *videoOutput = self.capturer.captureSession.outputs[0];
137  EXPECT_EQ(videoOutput.alwaysDiscardsLateVideoFrames, NO);
138  EXPECT_EQ(videoOutput.sampleBufferDelegate, self.capturer);
139}
140
141- (void)testSupportedFormatsForDevice {
142  // given
143  id validFormat1 = OCMClassMock([AVCaptureDeviceFormat class]);
144  CMVideoFormatDescriptionRef format;
145
146  // We don't care about width and heigth so arbitrary 123 and 456 values.
147  int width = 123;
148  int height = 456;
149  CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8PlanarFullRange, width, height,
150                                 nil, &format);
151  OCMStub([validFormat1 formatDescription]).andReturn(format);
152
153  id validFormat2 = OCMClassMock([AVCaptureDeviceFormat class]);
154  CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, width,
155                                 height, nil, &format);
156  OCMStub([validFormat2 formatDescription]).andReturn(format);
157
158  id invalidFormat = OCMClassMock([AVCaptureDeviceFormat class]);
159  CMVideoFormatDescriptionCreate(nil, kCVPixelFormatType_422YpCbCr8_yuvs, width, height, nil,
160                                 &format);
161  OCMStub([invalidFormat formatDescription]).andReturn(format);
162
163  NSArray *formats = @[ validFormat1, validFormat2, invalidFormat ];
164  OCMStub([self.deviceMock formats]).andReturn(formats);
165
166  // when
167  NSArray *supportedFormats =
168      [RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:self.deviceMock];
169
170  // then
171  EXPECT_EQ(supportedFormats.count, 3u);
172  EXPECT_TRUE([supportedFormats containsObject:validFormat1]);
173  EXPECT_TRUE([supportedFormats containsObject:validFormat2]);
174  EXPECT_TRUE([supportedFormats containsObject:invalidFormat]);
175
176  // cleanup
177  [validFormat1 stopMocking];
178  [validFormat2 stopMocking];
179  [invalidFormat stopMocking];
180  validFormat1 = nil;
181  validFormat2 = nil;
182  invalidFormat = nil;
183}
184
185- (void)testDelegateCallbackNotCalledWhenInvalidBuffer {
186  // given
187  CMSampleBufferRef sampleBuffer = nullptr;
188  [[self.delegateMock reject] capturer:[OCMArg any] didCaptureVideoFrame:[OCMArg any]];
189
190  // when
191  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
192         didOutputSampleBuffer:sampleBuffer
193                fromConnection:self.captureConnectionMock];
194
195  // then
196  [self.delegateMock verify];
197}
198
199
200- (void)testDelegateCallbackWithValidBufferAndOrientationUpdate {
201#if TARGET_OS_IPHONE
202  [UIDevice.currentDevice setValue:@(UIDeviceOrientationPortraitUpsideDown) forKey:@"orientation"];
203  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
204
205  // then
206  [[self.delegateMock expect] capturer:self.capturer
207                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
208                                                                    expectedFrame) {
209                    EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_270);
210                    return YES;
211                  }]];
212
213  // when
214  NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
215  [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
216
217  // We need to wait for the dispatch to finish.
218  WAIT(0, 1000);
219
220  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
221         didOutputSampleBuffer:sampleBuffer
222                fromConnection:self.captureConnectionMock];
223
224  [self.delegateMock verify];
225  CFRelease(sampleBuffer);
226#endif
227}
228
229- (void)testRotationCamera:(AVCaptureDevicePosition)camera
230           withOrientation:(UIDeviceOrientation)deviceOrientation {
231#if TARGET_OS_IPHONE
232  // Mock the AVCaptureConnection as we will get the camera position from the connection's
233  // input ports.
234  AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
235  AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
236  NSArray *inputPortsArrayMock = @[captureInputPort];
237  AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
238  OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
239      andReturn(inputPortsArrayMock);
240  OCMStub(captureInputPort.input).andReturn(inputPortMock);
241  OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
242  OCMStub(captureDeviceMock.position).andReturn(camera);
243
244  [UIDevice.currentDevice setValue:@(deviceOrientation) forKey:@"orientation"];
245
246  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
247
248  [[self.delegateMock expect] capturer:self.capturer
249                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
250                                                                    expectedFrame) {
251                    if (camera == AVCaptureDevicePositionFront) {
252                      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
253                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
254                      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
255                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
256                      }
257                    } else if (camera == AVCaptureDevicePositionBack) {
258                      if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
259                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
260                      } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
261                        EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_180);
262                      }
263                    }
264                    return YES;
265                  }]];
266
267  NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
268  [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
269
270  // We need to wait for the dispatch to finish.
271  WAIT(0, 1000);
272
273  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
274         didOutputSampleBuffer:sampleBuffer
275                fromConnection:self.captureConnectionMock];
276
277  [self.delegateMock verify];
278
279  CFRelease(sampleBuffer);
280#endif
281}
282
283- (void)setExif:(CMSampleBufferRef)sampleBuffer {
284  CFMutableDictionaryRef exif = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, NULL, NULL);
285  CFDictionarySetValue(exif, CFSTR("LensModel"), CFSTR("iPhone SE back camera 4.15mm f/2.2"));
286  CMSetAttachment(sampleBuffer, CFSTR("{Exif}"), exif, kCMAttachmentMode_ShouldPropagate);
287}
288
289- (void)testRotationFrame {
290#if TARGET_OS_IPHONE
291  // Mock the AVCaptureConnection as we will get the camera position from the connection's
292  // input ports.
293  AVCaptureDeviceInput *inputPortMock = OCMClassMock([AVCaptureDeviceInput class]);
294  AVCaptureInputPort *captureInputPort = OCMClassMock([AVCaptureInputPort class]);
295  NSArray *inputPortsArrayMock = @[captureInputPort];
296  AVCaptureDevice *captureDeviceMock = OCMClassMock([AVCaptureDevice class]);
297  OCMStub(((AVCaptureConnection *)self.captureConnectionMock).inputPorts).
298      andReturn(inputPortsArrayMock);
299  OCMStub(captureInputPort.input).andReturn(inputPortMock);
300  OCMStub(inputPortMock.device).andReturn(captureDeviceMock);
301  OCMStub(captureDeviceMock.position).andReturn(AVCaptureDevicePositionFront);
302
303  [UIDevice.currentDevice setValue:@(UIDeviceOrientationLandscapeLeft) forKey:@"orientation"];
304
305  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
306
307  [[self.delegateMock expect] capturer:self.capturer
308                  didCaptureVideoFrame:[OCMArg checkWithBlock:^BOOL(RTC_OBJC_TYPE(RTCVideoFrame) *
309                                                                    expectedFrame) {
310                    // Front camera and landscape left should return 180. But the frame says its
311                    // from the back camera, so rotation should be 0.
312                    EXPECT_EQ(expectedFrame.rotation, RTCVideoRotation_0);
313                    return YES;
314                  }]];
315
316  NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
317  [center postNotificationName:UIDeviceOrientationDidChangeNotification object:nil];
318
319  // We need to wait for the dispatch to finish.
320  WAIT(0, 1000);
321
322  [self setExif:sampleBuffer];
323
324  [self.capturer captureOutput:self.capturer.captureSession.outputs[0]
325         didOutputSampleBuffer:sampleBuffer
326                fromConnection:self.captureConnectionMock];
327
328  [self.delegateMock verify];
329  CFRelease(sampleBuffer);
330#endif
331}
332
333- (void)testImageExif {
334#if TARGET_OS_IPHONE
335  CMSampleBufferRef sampleBuffer = createTestSampleBufferRef();
336  [self setExif:sampleBuffer];
337
338  AVCaptureDevicePosition cameraPosition = [AVCaptureSession
339                                            devicePositionForSampleBuffer:sampleBuffer];
340  EXPECT_EQ(cameraPosition, AVCaptureDevicePositionBack);
341#endif
342}
343
344- (void)testStartingAndStoppingCapture {
345  id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
346  id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
347  OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
348      .andReturn(expectedDeviceInputMock);
349
350  OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
351  OCMStub([self.deviceMock unlockForConfiguration]);
352  OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
353  OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
354  OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
355
356  // Set expectation that the capture session should be started with correct device.
357  OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
358  OCMExpect([_captureSessionMock startRunning]);
359  OCMExpect([_captureSessionMock stopRunning]);
360
361  id format = OCMClassMock([AVCaptureDeviceFormat class]);
362  [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
363  [self.capturer stopCapture];
364
365  // Start capture code is dispatched async.
366  OCMVerifyAllWithDelay(_captureSessionMock, 15);
367}
368
369- (void)testStartCaptureFailingToLockForConfiguration {
370  // The captureSessionMock is a strict mock, so this test will crash if the startCapture
371  // method does not return when failing to lock for configuration.
372  OCMExpect([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(NO);
373
374  id format = OCMClassMock([AVCaptureDeviceFormat class]);
375  [self.capturer startCaptureWithDevice:self.deviceMock format:format fps:30];
376
377  // Start capture code is dispatched async.
378  OCMVerifyAllWithDelay(self.deviceMock, 15);
379}
380
381- (void)testStartingAndStoppingCaptureWithCallbacks {
382  id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
383  id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
384  OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
385      .andReturn(expectedDeviceInputMock);
386
387  OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES);
388  OCMStub([self.deviceMock unlockForConfiguration]);
389  OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
390  OCMStub([_captureSessionMock inputs]).andReturn(@[ expectedDeviceInputMock ]);
391  OCMStub([_captureSessionMock removeInput:expectedDeviceInputMock]);
392
393  // Set expectation that the capture session should be started with correct device.
394  OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
395  OCMExpect([_captureSessionMock startRunning]);
396  OCMExpect([_captureSessionMock stopRunning]);
397
398  dispatch_semaphore_t completedStopSemaphore = dispatch_semaphore_create(0);
399
400  __block BOOL completedStart = NO;
401  id format = OCMClassMock([AVCaptureDeviceFormat class]);
402  [self.capturer startCaptureWithDevice:self.deviceMock
403                                 format:format
404                                    fps:30
405                      completionHandler:^(NSError *error) {
406                        EXPECT_EQ(error, nil);
407                        completedStart = YES;
408                      }];
409
410  __block BOOL completedStop = NO;
411  [self.capturer stopCaptureWithCompletionHandler:^{
412    completedStop = YES;
413    dispatch_semaphore_signal(completedStopSemaphore);
414  }];
415
416  dispatch_semaphore_wait(completedStopSemaphore,
417                          dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
418  OCMVerifyAllWithDelay(_captureSessionMock, 15);
419  EXPECT_TRUE(completedStart);
420  EXPECT_TRUE(completedStop);
421}
422
423- (void)testStartCaptureFailingToLockForConfigurationWithCallback {
424  id expectedDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
425  id captureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]);
426  OCMStub([captureDeviceInputMock deviceInputWithDevice:self.deviceMock error:[OCMArg setTo:nil]])
427      .andReturn(expectedDeviceInputMock);
428
429  id errorMock = OCMClassMock([NSError class]);
430
431  OCMStub([self.deviceMock lockForConfiguration:[OCMArg setTo:errorMock]]).andReturn(NO);
432  OCMStub([_captureSessionMock canAddInput:expectedDeviceInputMock]).andReturn(YES);
433  OCMStub([self.deviceMock unlockForConfiguration]);
434
435  OCMExpect([_captureSessionMock addInput:expectedDeviceInputMock]);
436
437  dispatch_semaphore_t completedStartSemaphore = dispatch_semaphore_create(0);
438  __block NSError *callbackError = nil;
439
440  id format = OCMClassMock([AVCaptureDeviceFormat class]);
441  [self.capturer startCaptureWithDevice:self.deviceMock
442                                 format:format
443                                    fps:30
444                      completionHandler:^(NSError *error) {
445                        callbackError = error;
446                        dispatch_semaphore_signal(completedStartSemaphore);
447                      }];
448
449  long ret = dispatch_semaphore_wait(completedStartSemaphore,
450                                     dispatch_time(DISPATCH_TIME_NOW, 15.0 * NSEC_PER_SEC));
451  EXPECT_EQ(ret, 0);
452  EXPECT_EQ(callbackError, errorMock);
453}
454
455@end
456
457TEST(RTCCameraVideoCapturerTests, SetupSession) {
458  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
459  [test setup];
460  [test testSetupSession];
461  [test tearDown];
462}
463
464TEST(RTCCameraVideoCapturerTests, SetupSessionOutput) {
465  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
466  [test setup];
467  [test testSetupSessionOutput];
468  [test tearDown];
469}
470
471TEST(RTCCameraVideoCapturerTests, SupportedFormatsForDevice) {
472  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
473  [test setup];
474  [test testSupportedFormatsForDevice];
475  [test tearDown];
476}
477
478TEST(RTCCameraVideoCapturerTests, DelegateCallbackNotCalledWhenInvalidBuffer) {
479  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
480  [test setup];
481  [test testDelegateCallbackNotCalledWhenInvalidBuffer];
482  [test tearDown];
483}
484
485TEST(RTCCameraVideoCapturerTests, DelegateCallbackWithValidBufferAndOrientationUpdate) {
486  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
487  [test setup];
488  [test testDelegateCallbackWithValidBufferAndOrientationUpdate];
489  [test tearDown];
490}
491
492TEST(RTCCameraVideoCapturerTests, RotationCameraBackLandscapeLeft) {
493  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
494  [test setup];
495  [test testRotationCamera:AVCaptureDevicePositionBack
496           withOrientation:UIDeviceOrientationLandscapeLeft];
497  [test tearDown];
498}
499
500TEST(RTCCameraVideoCapturerTests, RotationCameraFrontLandscapeLeft) {
501  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
502  [test setup];
503  [test testRotationCamera:AVCaptureDevicePositionFront
504           withOrientation:UIDeviceOrientationLandscapeLeft];
505  [test tearDown];
506}
507
508TEST(RTCCameraVideoCapturerTests, RotationCameraBackLandscapeRight) {
509  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
510  [test setup];
511  [test testRotationCamera:AVCaptureDevicePositionBack
512           withOrientation:UIDeviceOrientationLandscapeRight];
513  [test tearDown];
514}
515
516TEST(RTCCameraVideoCapturerTests, RotationCameraFrontLandscapeRight) {
517  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
518  [test setup];
519  [test testRotationCamera:AVCaptureDevicePositionFront
520           withOrientation:UIDeviceOrientationLandscapeRight];
521  [test tearDown];
522}
523
524TEST(RTCCameraVideoCapturerTests, RotationCameraFrame) {
525  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
526  [test setup];
527  [test testRotationFrame];
528  [test tearDown];
529}
530
531TEST(RTCCameraVideoCapturerTests, ImageExif) {
532  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
533  [test setup];
534  [test testImageExif];
535  [test tearDown];
536}
537
538TEST(RTCCameraVideoCapturerTests, StartAndStopCapture) {
539  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
540  [test setupWithMockedCaptureSession];
541  [test testStartingAndStoppingCapture];
542  [test tearDown];
543}
544
545TEST(RTCCameraVideoCapturerTests, StartCaptureFailingToLockForConfiguration) {
546  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
547  [test setupWithMockedCaptureSession];
548  [test testStartCaptureFailingToLockForConfiguration];
549  [test tearDown];
550}
551
552TEST(RTCCameraVideoCapturerTests, StartAndStopCaptureWithCallbacks) {
553  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
554  [test setupWithMockedCaptureSession];
555  [test testStartingAndStoppingCaptureWithCallbacks];
556  [test tearDown];
557}
558
559TEST(RTCCameraVideoCapturerTests, StartCaptureFailingToLockForConfigurationWithCallback) {
560  RTCCameraVideoCapturerTests *test = [[RTCCameraVideoCapturerTests alloc] init];
561  [test setupWithMockedCaptureSession];
562  [test testStartCaptureFailingToLockForConfigurationWithCallback];
563  [test tearDown];
564}
565