blob: 5363aaeda21421c35f7861544702c57c91e9873d [file] [log] [blame]
denicijad2088152017-04-28 02:14:54 -07001/*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCMTLRenderer+Private.h"
12
13#import <Metal/Metal.h>
14#import <MetalKit/MetalKit.h>
15
16#import "WebRTC/RTCLogging.h"
17#import "WebRTC/RTCVideoFrame.h"
Peter Hanspers5daaf7d2018-06-01 10:34:37 +020018#import "WebRTC/RTCVideoFrameBuffer.h"
denicijad2088152017-04-28 02:14:54 -070019
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020020#include "api/video/video_rotation.h"
21#include "rtc_base/checks.h"
denicijad2088152017-04-28 02:14:54 -070022
23// As defined in shaderSource.
24static NSString *const vertexFunctionName = @"vertexPassthrough";
25static NSString *const fragmentFunctionName = @"fragmentColorConversion";
26
27static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
28static NSString *const commandBufferLabel = @"RTCCommandBuffer";
29static NSString *const renderEncoderLabel = @"RTCEncoder";
30static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
31
Peter Hanspers5daaf7d2018-06-01 10:34:37 +020032// Computes the texture coordinates given rotation and cropping.
33static inline void getCubeVertexData(int cropX,
34 int cropY,
35 int cropWidth,
36 int cropHeight,
37 size_t frameWidth,
38 size_t frameHeight,
39 RTCVideoRotation rotation,
40 float *buffer) {
41 // The computed values are the adjusted texture coordinates, in [0..1].
42 // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
43 // left/top edge.
44 // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
45 // right/bottom edge (i.e. render up to 80% of the width/height).
46 float cropLeft = cropX / (float)frameWidth;
47 float cropRight = (cropX + cropWidth) / (float)frameWidth;
48 float cropTop = cropY / (float)frameHeight;
49 float cropBottom = (cropY + cropHeight) / (float)frameHeight;
denicijad2088152017-04-28 02:14:54 -070050
Peter Hanspers5daaf7d2018-06-01 10:34:37 +020051 // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
52 // into account. The first two columns are view coordinates, the last two are texture coordinates.
denicijad2088152017-04-28 02:14:54 -070053 switch (rotation) {
Peter Hanspers5daaf7d2018-06-01 10:34:37 +020054 case RTCVideoRotation_0: {
55 float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
56 1.0, -1.0, cropRight, cropBottom,
57 -1.0, 1.0, cropLeft, cropTop,
58 1.0, 1.0, cropRight, cropTop};
59 memcpy(buffer, &values, sizeof(values));
60 } break;
61 case RTCVideoRotation_90: {
62 float values[16] = {-1.0, -1.0, cropRight, cropBottom,
63 1.0, -1.0, cropRight, cropTop,
64 -1.0, 1.0, cropLeft, cropBottom,
65 1.0, 1.0, cropLeft, cropTop};
66 memcpy(buffer, &values, sizeof(values));
67 } break;
68 case RTCVideoRotation_180: {
69 float values[16] = {-1.0, -1.0, cropRight, cropTop,
70 1.0, -1.0, cropLeft, cropTop,
71 -1.0, 1.0, cropRight, cropBottom,
72 1.0, 1.0, cropLeft, cropBottom};
73 memcpy(buffer, &values, sizeof(values));
74 } break;
75 case RTCVideoRotation_270: {
76 float values[16] = {-1.0, -1.0, cropLeft, cropTop,
77 1.0, -1.0, cropLeft, cropBottom,
78 -1.0, 1.0, cropRight, cropTop,
79 1.0, 1.0, cropRight, cropBottom};
80 memcpy(buffer, &values, sizeof(values));
81 } break;
denicijad2088152017-04-28 02:14:54 -070082 }
denicijad2088152017-04-28 02:14:54 -070083}
84
85// The max number of command buffers in flight (submitted to GPU).
86// For now setting it up to 1.
87// In future we might use triple buffering method if it improves performance.
88static const NSInteger kMaxInflightBuffers = 1;
89
90@implementation RTCMTLRenderer {
91 __kindof MTKView *_view;
92
93 // Controller.
94 dispatch_semaphore_t _inflight_semaphore;
95
96 // Renderer.
97 id<MTLDevice> _device;
98 id<MTLCommandQueue> _commandQueue;
99 id<MTLLibrary> _defaultLibrary;
100 id<MTLRenderPipelineState> _pipelineState;
101
102 // Buffers.
103 id<MTLBuffer> _vertexBuffer;
104
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200105 // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
Peter Hanspers7af087a2018-06-12 14:14:48 +0200106 int _oldFrameWidth;
107 int _oldFrameHeight;
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200108 int _oldCropWidth;
109 int _oldCropHeight;
110 int _oldCropX;
111 int _oldCropY;
112 RTCVideoRotation _oldRotation;
denicijad2088152017-04-28 02:14:54 -0700113}
114
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200115@synthesize rotationOverride = _rotationOverride;
116
denicijad2088152017-04-28 02:14:54 -0700117- (instancetype)init {
118 if (self = [super init]) {
denicijad2088152017-04-28 02:14:54 -0700119 _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
120 }
121
122 return self;
123}
124
125- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
126 return [self setupWithView:view];
127}
128
129#pragma mark - Private
130
131- (BOOL)setupWithView:(__kindof MTKView *)view {
132 BOOL success = NO;
133 if ([self setupMetal]) {
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200134 _view = view;
135 view.device = _device;
136 view.preferredFramesPerSecond = 30;
137 view.autoResizeDrawable = NO;
138
denicijad2088152017-04-28 02:14:54 -0700139 [self loadAssets];
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200140
141 float vertexBufferArray[16] = {0};
142 _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
143 length:sizeof(vertexBufferArray)
144 options:MTLResourceCPUCacheModeWriteCombined];
denicijad2088152017-04-28 02:14:54 -0700145 success = YES;
146 }
147 return success;
148}
149#pragma mark - Inheritance
150
151- (id<MTLDevice>)currentMetalDevice {
152 return _device;
153}
154
155- (NSString *)shaderSource {
156 RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
157 return nil;
158}
159
160- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
161 RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
162}
163
Peter Hanspers7af087a2018-06-12 14:14:48 +0200164- (void)getWidth:(int *)width
165 height:(int *)height
166 cropWidth:(int *)cropWidth
167 cropHeight:(int *)cropHeight
168 cropX:(int *)cropX
169 cropY:(int *)cropY
170 ofFrame:(nonnull RTCVideoFrame *)frame {
171 RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
172}
173
denicijad2088152017-04-28 02:14:54 -0700174- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200175 // Apply rotation override if set.
176 RTCVideoRotation rotation;
177 NSValue *rotationOverride = self.rotationOverride;
178 if (rotationOverride) {
179#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
180 if (@available(iOS 11, *)) {
181 [rotationOverride getValue:&rotation size:sizeof(rotation)];
182 } else
183#endif
184 {
185 [rotationOverride getValue:&rotation];
186 }
187 } else {
188 rotation = frame.rotation;
189 }
190
Peter Hanspers7af087a2018-06-12 14:14:48 +0200191 int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
192 [self getWidth:&frameWidth
193 height:&frameHeight
194 cropWidth:&cropWidth
195 cropHeight:&cropHeight
196 cropX:&cropX
197 cropY:&cropY
198 ofFrame:frame];
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200199
200 // Recompute the texture cropping and recreate vertexBuffer if necessary.
Peter Hanspers7af087a2018-06-12 14:14:48 +0200201 if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
202 cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
203 frameHeight != _oldFrameHeight) {
204 getCubeVertexData(cropX,
205 cropY,
206 cropWidth,
207 cropHeight,
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200208 frameWidth,
209 frameHeight,
210 rotation,
211 (float *)_vertexBuffer.contents);
Peter Hanspers7af087a2018-06-12 14:14:48 +0200212 _oldCropX = cropX;
213 _oldCropY = cropY;
214 _oldCropWidth = cropWidth;
215 _oldCropHeight = cropHeight;
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200216 _oldRotation = rotation;
217 _oldFrameWidth = frameWidth;
218 _oldFrameHeight = frameHeight;
219 }
220
denicijad2088152017-04-28 02:14:54 -0700221 return YES;
222}
223
224#pragma mark - GPU methods
225
226- (BOOL)setupMetal {
227 // Set the view to use the default device.
228 _device = MTLCreateSystemDefaultDevice();
229 if (!_device) {
230 return NO;
231 }
232
233 // Create a new command queue.
234 _commandQueue = [_device newCommandQueue];
235
236 // Load metal library from source.
237 NSError *libraryError = nil;
Peter Hanspers1c62b982018-05-03 14:06:04 +0200238 NSString *shaderSource = [self shaderSource];
denicijad2088152017-04-28 02:14:54 -0700239
240 id<MTLLibrary> sourceLibrary =
Peter Hanspers1c62b982018-05-03 14:06:04 +0200241 [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
denicijad2088152017-04-28 02:14:54 -0700242
243 if (libraryError) {
244 RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
245 return NO;
246 }
247
248 if (!sourceLibrary) {
249 RTCLogError(@"Metal: Failed to load library. %@", libraryError);
250 return NO;
251 }
252 _defaultLibrary = sourceLibrary;
253
254 return YES;
255}
256
denicijad2088152017-04-28 02:14:54 -0700257- (void)loadAssets {
258 id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
259 id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
260
261 MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
262 pipelineDescriptor.label = pipelineDescriptorLabel;
263 pipelineDescriptor.vertexFunction = vertexFunction;
264 pipelineDescriptor.fragmentFunction = fragmentFunction;
265 pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
266 pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
267 NSError *error = nil;
268 _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
269
270 if (!_pipelineState) {
271 RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
272 }
273}
274
denicijad2088152017-04-28 02:14:54 -0700275- (void)render {
276 // Wait until the inflight (curently sent to GPU) command buffer
277 // has completed the GPU work.
278 dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
279
280 id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
281 commandBuffer.label = commandBufferLabel;
282
283 __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
284 [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
285 // GPU work completed.
286 dispatch_semaphore_signal(block_semaphore);
287 }];
288
289 MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
290 if (renderPassDescriptor) { // Valid drawable.
291 id<MTLRenderCommandEncoder> renderEncoder =
292 [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
293 renderEncoder.label = renderEncoderLabel;
294
295 // Set context state.
296 [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
297 [renderEncoder setRenderPipelineState:_pipelineState];
Peter Hanspers5daaf7d2018-06-01 10:34:37 +0200298 [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
denicijad2088152017-04-28 02:14:54 -0700299 [self uploadTexturesToRenderEncoder:renderEncoder];
300
301 [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
302 vertexStart:0
303 vertexCount:4
304 instanceCount:1];
305 [renderEncoder popDebugGroup];
306 [renderEncoder endEncoding];
307
308 [commandBuffer presentDrawable:_view.currentDrawable];
309 }
310
311 // CPU work is completed, GPU work can be started.
312 [commandBuffer commit];
313}
314
315#pragma mark - RTCMTLRenderer
316
317- (void)drawFrame:(RTCVideoFrame *)frame {
318 @autoreleasepool {
319 if ([self setupTexturesForFrame:frame]) {
320 [self render];
321 }
322 }
323}
324
325@end