blob: 44b63de0c72690a3aaa30e8462a92e998fb982ee [file] [log] [blame]
denicijad2088152017-04-28 02:14:54 -07001/*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCMTLRenderer+Private.h"
12
13#import <Metal/Metal.h>
14#import <MetalKit/MetalKit.h>
15
16#import "WebRTC/RTCLogging.h"
17#import "WebRTC/RTCVideoFrame.h"
Peter Hanspersfc4a9c92018-05-23 16:30:00 +020018#import "WebRTC/RTCVideoFrameBuffer.h"
denicijad2088152017-04-28 02:14:54 -070019
Mirko Bonadei92ea95e2017-09-15 06:47:31 +020020#include "api/video/video_rotation.h"
21#include "rtc_base/checks.h"
denicijad2088152017-04-28 02:14:54 -070022
23// As defined in shaderSource.
24static NSString *const vertexFunctionName = @"vertexPassthrough";
25static NSString *const fragmentFunctionName = @"fragmentColorConversion";
26
27static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
28static NSString *const commandBufferLabel = @"RTCCommandBuffer";
29static NSString *const renderEncoderLabel = @"RTCEncoder";
30static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
31
Peter Hanspersfc4a9c92018-05-23 16:30:00 +020032// Computes the texture coordinates given rotation and cropping.
33static inline void getCubeVertexData(int cropX,
34 int cropY,
35 int cropWidth,
36 int cropHeight,
37 size_t frameWidth,
38 size_t frameHeight,
39 RTCVideoRotation rotation,
40 float *buffer) {
41 // The computed values are the adjusted texture coordinates, in [0..1].
42 // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
43 // left/top edge.
44 // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
45 // right/bottom edge (i.e. keeping 80%).
46 float cropLeft = cropX / (float)frameWidth;
47 float cropRight = (cropX + cropWidth) / (float)frameWidth;
48 float cropTop = cropY / (float)frameHeight;
49 float cropBottom = (cropY + cropHeight) / (float)frameHeight;
denicijad2088152017-04-28 02:14:54 -070050
Peter Hanspersfc4a9c92018-05-23 16:30:00 +020051 // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
52 // into account. The first two columns are view coordinates, the last two are texture coordinates.
denicijad2088152017-04-28 02:14:54 -070053 switch (rotation) {
Peter Hanspersfc4a9c92018-05-23 16:30:00 +020054 case RTCVideoRotation_0: {
55 float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
56 1.0, -1.0, cropRight, cropBottom,
57 -1.0, 1.0, cropLeft, cropTop,
58 1.0, 1.0, cropRight, cropTop};
59 memcpy(buffer, &values, sizeof(values));
60 } break;
61 case RTCVideoRotation_90: {
62 float values[16] = {-1.0, -1.0, cropRight, cropBottom,
63 1.0, -1.0, cropRight, cropTop,
64 -1.0, 1.0, cropLeft, cropBottom,
65 1.0, 1.0, cropLeft, cropTop};
66 memcpy(buffer, &values, sizeof(values));
67 } break;
68 case RTCVideoRotation_180: {
69 float values[16] = {-1.0, -1.0, cropRight, cropTop,
70 1.0, -1.0, cropLeft, cropTop,
71 -1.0, 1.0, cropRight, cropBottom,
72 1.0, 1.0, cropLeft, cropBottom};
73 memcpy(buffer, &values, sizeof(values));
74 } break;
75 case RTCVideoRotation_270: {
76 float values[16] = {-1.0, -1.0, cropLeft, cropTop,
77 1.0, -1.0, cropLeft, cropBottom,
78 -1.0, 1.0, cropRight, cropTop,
79 1.0, 1.0, cropRight, cropBottom};
80 memcpy(buffer, &values, sizeof(values));
81 } break;
denicijad2088152017-04-28 02:14:54 -070082 }
denicijad2088152017-04-28 02:14:54 -070083}
84
85// The max number of command buffers in flight (submitted to GPU).
86// For now setting it up to 1.
87// In future we might use triple buffering method if it improves performance.
88static const NSInteger kMaxInflightBuffers = 1;
89
90@implementation RTCMTLRenderer {
91 __kindof MTKView *_view;
92
93 // Controller.
94 dispatch_semaphore_t _inflight_semaphore;
95
96 // Renderer.
97 id<MTLDevice> _device;
98 id<MTLCommandQueue> _commandQueue;
99 id<MTLLibrary> _defaultLibrary;
100 id<MTLRenderPipelineState> _pipelineState;
101
102 // Buffers.
103 id<MTLBuffer> _vertexBuffer;
104
Peter Hanspersfc4a9c92018-05-23 16:30:00 +0200105 // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
106 size_t _oldFrameWidth;
107 size_t _oldFrameHeight;
108 int _oldCropWidth;
109 int _oldCropHeight;
110 int _oldCropX;
111 int _oldCropY;
112 RTCVideoRotation _oldRotation;
denicijad2088152017-04-28 02:14:54 -0700113}
114
Peter Hanspersfc4a9c92018-05-23 16:30:00 +0200115@synthesize rotationOverride = _rotationOverride;
116
denicijad2088152017-04-28 02:14:54 -0700117- (instancetype)init {
118 if (self = [super init]) {
denicijad2088152017-04-28 02:14:54 -0700119 _inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
120 }
121
122 return self;
123}
124
125- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
126 return [self setupWithView:view];
127}
128
129#pragma mark - Private
130
131- (BOOL)setupWithView:(__kindof MTKView *)view {
132 BOOL success = NO;
133 if ([self setupMetal]) {
Peter Hanspersfc4a9c92018-05-23 16:30:00 +0200134 _view = view;
135 view.device = _device;
136 view.preferredFramesPerSecond = 30;
137 view.autoResizeDrawable = NO;
138
139 float vertexBufferArray[16] = {0};
140 _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
141 length:sizeof(vertexBufferArray)
142 options:MTLResourceCPUCacheModeWriteCombined];
143
denicijad2088152017-04-28 02:14:54 -0700144 [self loadAssets];
denicijad2088152017-04-28 02:14:54 -0700145 success = YES;
146 }
147 return success;
148}
Peter Hanspersfc4a9c92018-05-23 16:30:00 +0200149
denicijad2088152017-04-28 02:14:54 -0700150#pragma mark - Inheritance
151
152- (id<MTLDevice>)currentMetalDevice {
153 return _device;
154}
155
156- (NSString *)shaderSource {
157 RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
158 return nil;
159}
160
161- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
162 RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
163}
164
165- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
Peter Hanspersfc4a9c92018-05-23 16:30:00 +0200166 // Apply rotation override if set.
167 RTCVideoRotation rotation;
168 NSValue *rotationOverride = self.rotationOverride;
169 if (rotationOverride) {
170#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
171 if (@available(iOS 11, *)) {
172 [rotationOverride getValue:&rotation size:sizeof(rotation)];
173 } else
174#endif
175 {
176 [rotationOverride getValue:&rotation];
177 }
178 } else {
179 rotation = frame.rotation;
180 }
181
182 RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
183 size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
184 size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
185
186 // Recompute the texture cropping and recreate vertexBuffer if necessary.
187 if (pixelBuffer.cropX != _oldCropX || pixelBuffer.cropY != _oldCropY ||
188 pixelBuffer.cropWidth != _oldCropWidth || pixelBuffer.cropHeight != _oldCropHeight ||
189 rotation != _oldRotation || frameWidth != _oldFrameWidth || frameHeight != _oldFrameHeight) {
190 getCubeVertexData(pixelBuffer.cropX,
191 pixelBuffer.cropY,
192 pixelBuffer.cropWidth,
193 pixelBuffer.cropHeight,
194 frameWidth,
195 frameHeight,
196 rotation,
197 (float *)_vertexBuffer.contents);
198 _oldCropX = pixelBuffer.cropX;
199 _oldCropY = pixelBuffer.cropY;
200 _oldCropWidth = pixelBuffer.cropWidth;
201 _oldCropHeight = pixelBuffer.cropHeight;
202 _oldRotation = rotation;
203 _oldFrameWidth = frameWidth;
204 _oldFrameHeight = frameHeight;
205 }
206
denicijad2088152017-04-28 02:14:54 -0700207 return YES;
208}
209
210#pragma mark - GPU methods
211
212- (BOOL)setupMetal {
213 // Set the view to use the default device.
214 _device = MTLCreateSystemDefaultDevice();
215 if (!_device) {
216 return NO;
217 }
218
219 // Create a new command queue.
220 _commandQueue = [_device newCommandQueue];
221
222 // Load metal library from source.
223 NSError *libraryError = nil;
Peter Hanspers1c62b982018-05-03 14:06:04 +0200224 NSString *shaderSource = [self shaderSource];
denicijad2088152017-04-28 02:14:54 -0700225
226 id<MTLLibrary> sourceLibrary =
Peter Hanspers1c62b982018-05-03 14:06:04 +0200227 [_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
denicijad2088152017-04-28 02:14:54 -0700228
229 if (libraryError) {
230 RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
231 return NO;
232 }
233
234 if (!sourceLibrary) {
235 RTCLogError(@"Metal: Failed to load library. %@", libraryError);
236 return NO;
237 }
238 _defaultLibrary = sourceLibrary;
239
240 return YES;
241}
242
denicijad2088152017-04-28 02:14:54 -0700243- (void)loadAssets {
244 id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
245 id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
246
247 MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
248 pipelineDescriptor.label = pipelineDescriptorLabel;
249 pipelineDescriptor.vertexFunction = vertexFunction;
250 pipelineDescriptor.fragmentFunction = fragmentFunction;
251 pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
252 pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
253 NSError *error = nil;
254 _pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
255
256 if (!_pipelineState) {
257 RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
258 }
259}
260
denicijad2088152017-04-28 02:14:54 -0700261- (void)render {
262 // Wait until the inflight (curently sent to GPU) command buffer
263 // has completed the GPU work.
264 dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
265
266 id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
267 commandBuffer.label = commandBufferLabel;
268
269 __block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
270 [commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
271 // GPU work completed.
272 dispatch_semaphore_signal(block_semaphore);
273 }];
274
275 MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
276 if (renderPassDescriptor) { // Valid drawable.
277 id<MTLRenderCommandEncoder> renderEncoder =
278 [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
279 renderEncoder.label = renderEncoderLabel;
280
281 // Set context state.
282 [renderEncoder pushDebugGroup:renderEncoderDebugGroup];
283 [renderEncoder setRenderPipelineState:_pipelineState];
Peter Hanspersfc4a9c92018-05-23 16:30:00 +0200284
285 [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
denicijad2088152017-04-28 02:14:54 -0700286 [self uploadTexturesToRenderEncoder:renderEncoder];
287
288 [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
289 vertexStart:0
290 vertexCount:4
291 instanceCount:1];
292 [renderEncoder popDebugGroup];
293 [renderEncoder endEncoding];
294
295 [commandBuffer presentDrawable:_view.currentDrawable];
296 }
297
298 // CPU work is completed, GPU work can be started.
299 [commandBuffer commit];
300}
301
302#pragma mark - RTCMTLRenderer
303
304- (void)drawFrame:(RTCVideoFrame *)frame {
305 @autoreleasepool {
306 if ([self setupTexturesForFrame:frame]) {
307 [self render];
308 }
309 }
310}
311
312@end