• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCMTLRenderer+Private.h"
12
13#import <Metal/Metal.h>
14#import <MetalKit/MetalKit.h>
15
16#import "base/RTCLogging.h"
17#import "base/RTCVideoFrame.h"
18#import "base/RTCVideoFrameBuffer.h"
19
20#include "api/video/video_rotation.h"
21#include "rtc_base/checks.h"
22
23// As defined in shaderSource.
24static NSString *const vertexFunctionName = @"vertexPassthrough";
25static NSString *const fragmentFunctionName = @"fragmentColorConversion";
26
27static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
28static NSString *const commandBufferLabel = @"RTCCommandBuffer";
29static NSString *const renderEncoderLabel = @"RTCEncoder";
30static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
31
32// Computes the texture coordinates given rotation and cropping.
33static inline void getCubeVertexData(int cropX,
34                                     int cropY,
35                                     int cropWidth,
36                                     int cropHeight,
37                                     size_t frameWidth,
38                                     size_t frameHeight,
39                                     RTCVideoRotation rotation,
40                                     float *buffer) {
41  // The computed values are the adjusted texture coordinates, in [0..1].
42  // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
43  // left/top edge.
44  // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
45  // right/bottom edge (i.e. render up to 80% of the width/height).
46  float cropLeft = cropX / (float)frameWidth;
47  float cropRight = (cropX + cropWidth) / (float)frameWidth;
48  float cropTop = cropY / (float)frameHeight;
49  float cropBottom = (cropY + cropHeight) / (float)frameHeight;
50
51  // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
52  // into account. The first two columns are view coordinates, the last two are texture coordinates.
53  switch (rotation) {
54    case RTCVideoRotation_0: {
55      float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
56                           1.0, -1.0, cropRight, cropBottom,
57                          -1.0,  1.0, cropLeft, cropTop,
58                           1.0,  1.0, cropRight, cropTop};
59      memcpy(buffer, &values, sizeof(values));
60    } break;
61    case RTCVideoRotation_90: {
62      float values[16] = {-1.0, -1.0, cropRight, cropBottom,
63                           1.0, -1.0, cropRight, cropTop,
64                          -1.0,  1.0, cropLeft, cropBottom,
65                           1.0,  1.0, cropLeft, cropTop};
66      memcpy(buffer, &values, sizeof(values));
67    } break;
68    case RTCVideoRotation_180: {
69      float values[16] = {-1.0, -1.0, cropRight, cropTop,
70                           1.0, -1.0, cropLeft, cropTop,
71                          -1.0,  1.0, cropRight, cropBottom,
72                           1.0,  1.0, cropLeft, cropBottom};
73      memcpy(buffer, &values, sizeof(values));
74    } break;
75    case RTCVideoRotation_270: {
76      float values[16] = {-1.0, -1.0, cropLeft, cropTop,
77                           1.0, -1.0, cropLeft, cropBottom,
78                          -1.0, 1.0, cropRight, cropTop,
79                           1.0, 1.0, cropRight, cropBottom};
80      memcpy(buffer, &values, sizeof(values));
81    } break;
82  }
83}
84
85// The max number of command buffers in flight (submitted to GPU).
86// For now setting it up to 1.
87// In future we might use triple buffering method if it improves performance.
88static const NSInteger kMaxInflightBuffers = 1;
89
90@implementation RTCMTLRenderer {
91  __kindof MTKView *_view;
92
93  // Controller.
94  dispatch_semaphore_t _inflight_semaphore;
95
96  // Renderer.
97  id<MTLDevice> _device;
98  id<MTLCommandQueue> _commandQueue;
99  id<MTLLibrary> _defaultLibrary;
100  id<MTLRenderPipelineState> _pipelineState;
101
102  // Buffers.
103  id<MTLBuffer> _vertexBuffer;
104
105  // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
106  int _oldFrameWidth;
107  int _oldFrameHeight;
108  int _oldCropWidth;
109  int _oldCropHeight;
110  int _oldCropX;
111  int _oldCropY;
112  RTCVideoRotation _oldRotation;
113}
114
115@synthesize rotationOverride = _rotationOverride;
116
117- (instancetype)init {
118  if (self = [super init]) {
119    _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
120  }
121
122  return self;
123}
124
125- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
126  return [self setupWithView:view];
127}
128
129#pragma mark - Private
130
131- (BOOL)setupWithView:(__kindof MTKView *)view {
132  BOOL success = NO;
133  if ([self setupMetal]) {
134    _view = view;
135    view.device = _device;
136    view.preferredFramesPerSecond = 30;
137    view.autoResizeDrawable = NO;
138
139    [self loadAssets];
140
141    float vertexBufferArray[16] = {0};
142    _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
143                                         length:sizeof(vertexBufferArray)
144                                        options:MTLResourceCPUCacheModeWriteCombined];
145    success = YES;
146  }
147  return success;
148}
149#pragma mark - Inheritance
150
151- (id<MTLDevice>)currentMetalDevice {
152  return _device;
153}
154
155- (NSString *)shaderSource {
156  RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
157  return nil;
158}
159
160- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
161  RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
162}
163
164- (void)getWidth:(int *)width
165          height:(int *)height
166       cropWidth:(int *)cropWidth
167      cropHeight:(int *)cropHeight
168           cropX:(int *)cropX
169           cropY:(int *)cropY
170         ofFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
171  RTC_DCHECK_NOTREACHED() << "Virtual method not implemented in subclass.";
172}
173
174- (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
175  // Apply rotation override if set.
176  RTCVideoRotation rotation;
177  NSValue *rotationOverride = self.rotationOverride;
178  if (rotationOverride) {
179#if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
180    (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
181    if (@available(iOS 11, *)) {
182      [rotationOverride getValue:&rotation size:sizeof(rotation)];
183    } else
184#endif
185    {
186      [rotationOverride getValue:&rotation];
187    }
188  } else {
189    rotation = frame.rotation;
190  }
191
192  int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
193  [self getWidth:&frameWidth
194          height:&frameHeight
195       cropWidth:&cropWidth
196      cropHeight:&cropHeight
197           cropX:&cropX
198           cropY:&cropY
199         ofFrame:frame];
200
201  // Recompute the texture cropping and recreate vertexBuffer if necessary.
202  if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
203      cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
204      frameHeight != _oldFrameHeight) {
205    getCubeVertexData(cropX,
206                      cropY,
207                      cropWidth,
208                      cropHeight,
209                      frameWidth,
210                      frameHeight,
211                      rotation,
212                      (float *)_vertexBuffer.contents);
213    _oldCropX = cropX;
214    _oldCropY = cropY;
215    _oldCropWidth = cropWidth;
216    _oldCropHeight = cropHeight;
217    _oldRotation = rotation;
218    _oldFrameWidth = frameWidth;
219    _oldFrameHeight = frameHeight;
220  }
221
222  return YES;
223}
224
225#pragma mark - GPU methods
226
227- (BOOL)setupMetal {
228  // Set the view to use the default device.
229  _device = MTLCreateSystemDefaultDevice();
230  if (!_device) {
231    return NO;
232  }
233
234  // Create a new command queue.
235  _commandQueue = [_device newCommandQueue];
236
237  // Load metal library from source.
238  NSError *libraryError = nil;
239  NSString *shaderSource = [self shaderSource];
240
241  id<MTLLibrary> sourceLibrary =
242      [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
243
244  if (libraryError) {
245    RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
246    return NO;
247  }
248
249  if (!sourceLibrary) {
250    RTCLogError(@"Metal: Failed to load library. %@", libraryError);
251    return NO;
252  }
253  _defaultLibrary = sourceLibrary;
254
255  return YES;
256}
257
258- (void)loadAssets {
259  id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
260  id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
261
262  MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
263  pipelineDescriptor.label = pipelineDescriptorLabel;
264  pipelineDescriptor.vertexFunction = vertexFunction;
265  pipelineDescriptor.fragmentFunction = fragmentFunction;
266  pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
267  pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
268  NSError *error = nil;
269  _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
270
271  if (!_pipelineState) {
272    RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
273  }
274}
275
276- (void)render {
277  id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
278  commandBuffer.label = commandBufferLabel;
279
280  __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
281  [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
282    // GPU work completed.
283    dispatch_semaphore_signal(block_semaphore);
284  }];
285
286  MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
287  if (renderPassDescriptor) {  // Valid drawable.
288    id<MTLRenderCommandEncoder> renderEncoder =
289        [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
290    renderEncoder.label = renderEncoderLabel;
291
292    // Set context state.
293    [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
294    [renderEncoder setRenderPipelineState:_pipelineState];
295    [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
296    [self uploadTexturesToRenderEncoder:renderEncoder];
297
298    [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
299                      vertexStart:0
300                      vertexCount:4
301                    instanceCount:1];
302    [renderEncoder popDebugGroup];
303    [renderEncoder endEncoding];
304
305    [commandBuffer presentDrawable:_view.currentDrawable];
306  }
307
308  // CPU work is completed, GPU work can be started.
309  [commandBuffer commit];
310}
311
312#pragma mark - RTCMTLRenderer
313
314- (void)drawFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame {
315  @autoreleasepool {
316    // Wait until the inflight (curently sent to GPU) command buffer
317    // has completed the GPU work.
318    dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
319
320    if ([self setupTexturesForFrame:frame]) {
321      [self render];
322    } else {
323      dispatch_semaphore_signal(_inflight_semaphore);
324    }
325  }
326}
327
328@end
329