Support cropping and rotation override in Metal renderers.
Bug: webrtc:9301
Change-Id: Ic761f0fd6ad6fee74021b84903f1653878453533
Reviewed-on: https://webrtc-review.googlesource.com/80460
Reviewed-by: Anders Carlsson <andersc@webrtc.org>
Commit-Queue: Peter Hanspers <peterhanspers@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23522}
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
index ae55258..03a381f 100644
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
+++ b/sdk/objc/Framework/Classes/Metal/RTCMTLI420Renderer.mm
@@ -90,7 +90,9 @@
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- [super setupTexturesForFrame:frame];
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
id<MTLDevice> device = [self currentMetalDevice];
if (!device) {
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
index d8dd7e7..f016713 100644
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
+++ b/sdk/objc/Framework/Classes/Metal/RTCMTLNV12Renderer.mm
@@ -89,7 +89,9 @@
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
- [super setupTexturesForFrame:frame];
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
id<MTLTexture> lumaTexture = nil;
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm
index e735879..481a7fb 100644
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm
+++ b/sdk/objc/Framework/Classes/Metal/RTCMTLRGBRenderer.mm
@@ -88,7 +88,9 @@
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
- [super setupTexturesForFrame:frame];
+ if (![super setupTexturesForFrame:frame]) {
+ return NO;
+ }
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
id<MTLTexture> gpuTexture = nil;
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
index e279b09..bd1b6b7 100644
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
+++ b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.h
@@ -46,10 +46,16 @@
@end
/**
- * Implementation of RTCMTLRenderer protocol for rendering native nv12 video frames.
+ * Implementation of RTCMTLRenderer protocol.
*/
NS_AVAILABLE(10_11, 9_0)
@interface RTCMTLRenderer : NSObject<RTCMTLRenderer>
+
+/** @abstract A wrapped RTCVideoRotation, or nil.
+ @discussion When not nil, the rotation of the actual frame is ignored when rendering.
+ */
+@property(atomic, nullable) NSValue *rotationOverride;
+
@end
NS_ASSUME_NONNULL_END
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
index 68486dc..22f1488 100644
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
+++ b/sdk/objc/Framework/Classes/Metal/RTCMTLRenderer.mm
@@ -15,6 +15,7 @@
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
+#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/video_rotation.h"
#include "rtc_base/checks.h"
@@ -28,31 +29,57 @@
static NSString *const renderEncoderLabel = @"RTCEncoder";
static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
-static const float cubeVertexData[64] = {
- -1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0,
+// Computes the texture coordinates given rotation and cropping.
+static inline void getCubeVertexData(int cropX,
+ int cropY,
+ int cropWidth,
+ int cropHeight,
+ size_t frameWidth,
+ size_t frameHeight,
+ RTCVideoRotation rotation,
+ float *buffer) {
+ // The computed values are the adjusted texture coordinates, in [0..1].
+ // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
+ // left/top edge.
+ // For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
+ // right/bottom edge (i.e. render up to 80% of the width/height).
+ float cropLeft = cropX / (float)frameWidth;
+ float cropRight = (cropX + cropWidth) / (float)frameWidth;
+ float cropTop = cropY / (float)frameHeight;
+ float cropBottom = (cropY + cropHeight) / (float)frameHeight;
- // rotation = 90, offset = 16.
- -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0,
-
- // rotation = 180, offset = 32.
- -1.0, -1.0, 1.0, 0.0, 1.0, -1.0, 0.0, 0.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0,
-
- // rotation = 270, offset = 48.
- -1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 0.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0,
-};
-
-static inline int offsetForRotation(RTCVideoRotation rotation) {
+ // These arrays map the view coordinates to texture coordinates, taking cropping and rotation
+ // into account. The first two columns are view coordinates, the last two are texture coordinates.
switch (rotation) {
- case RTCVideoRotation_0:
- return 0;
- case RTCVideoRotation_90:
- return 16;
- case RTCVideoRotation_180:
- return 32;
- case RTCVideoRotation_270:
- return 48;
+ case RTCVideoRotation_0: {
+ float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
+ 1.0, -1.0, cropRight, cropBottom,
+ -1.0, 1.0, cropLeft, cropTop,
+ 1.0, 1.0, cropRight, cropTop};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_90: {
+ float values[16] = {-1.0, -1.0, cropRight, cropBottom,
+ 1.0, -1.0, cropRight, cropTop,
+ -1.0, 1.0, cropLeft, cropBottom,
+ 1.0, 1.0, cropLeft, cropTop};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_180: {
+ float values[16] = {-1.0, -1.0, cropRight, cropTop,
+ 1.0, -1.0, cropLeft, cropTop,
+ -1.0, 1.0, cropRight, cropBottom,
+ 1.0, 1.0, cropLeft, cropBottom};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
+ case RTCVideoRotation_270: {
+ float values[16] = {-1.0, -1.0, cropLeft, cropTop,
+ 1.0, -1.0, cropLeft, cropBottom,
+ -1.0, 1.0, cropRight, cropTop,
+ 1.0, 1.0, cropRight, cropBottom};
+ memcpy(buffer, &values, sizeof(values));
+ } break;
}
- return 0;
}
// The max number of command buffers in flight (submitted to GPU).
@@ -75,14 +102,20 @@
// Buffers.
id<MTLBuffer> _vertexBuffer;
- // RTC Frame parameters.
- int _offset;
+ // Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
+ size_t _oldFrameWidth;
+ size_t _oldFrameHeight;
+ int _oldCropWidth;
+ int _oldCropHeight;
+ int _oldCropX;
+ int _oldCropY;
+ RTCVideoRotation _oldRotation;
}
+@synthesize rotationOverride = _rotationOverride;
+
- (instancetype)init {
if (self = [super init]) {
- // _offset of 0 is equal to rotation of 0.
- _offset = 0;
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
}
@@ -98,9 +131,17 @@
- (BOOL)setupWithView:(__kindof MTKView *)view {
BOOL success = NO;
if ([self setupMetal]) {
- [self setupView:view];
+ _view = view;
+ view.device = _device;
+ view.preferredFramesPerSecond = 30;
+ view.autoResizeDrawable = NO;
+
[self loadAssets];
- [self setupBuffers];
+
+ float vertexBufferArray[16] = {0};
+ _vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
+ length:sizeof(vertexBufferArray)
+ options:MTLResourceCPUCacheModeWriteCombined];
success = YES;
}
return success;
@@ -121,7 +162,47 @@
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
- _offset = offsetForRotation(frame.rotation);
+ // Apply rotation override if set.
+ RTCVideoRotation rotation;
+ NSValue *rotationOverride = self.rotationOverride;
+ if (rotationOverride) {
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 11, *)) {
+ [rotationOverride getValue:&rotation size:sizeof(rotation)];
+ } else
+#endif
+ {
+ [rotationOverride getValue:&rotation];
+ }
+ } else {
+ rotation = frame.rotation;
+ }
+
+ RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
+ size_t frameWidth = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
+ size_t frameHeight = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
+
+ // Recompute the texture cropping and recreate vertexBuffer if necessary.
+ if (pixelBuffer.cropX != _oldCropX || pixelBuffer.cropY != _oldCropY ||
+ pixelBuffer.cropWidth != _oldCropWidth || pixelBuffer.cropHeight != _oldCropHeight ||
+ rotation != _oldRotation || frameWidth != _oldFrameWidth || frameHeight != _oldFrameHeight) {
+ getCubeVertexData(pixelBuffer.cropX,
+ pixelBuffer.cropY,
+ pixelBuffer.cropWidth,
+ pixelBuffer.cropHeight,
+ frameWidth,
+ frameHeight,
+ rotation,
+ (float *)_vertexBuffer.contents);
+ _oldCropX = pixelBuffer.cropX;
+ _oldCropY = pixelBuffer.cropY;
+ _oldCropWidth = pixelBuffer.cropWidth;
+ _oldCropHeight = pixelBuffer.cropHeight;
+ _oldRotation = rotation;
+ _oldFrameWidth = frameWidth;
+ _oldFrameHeight = frameHeight;
+ }
+
return YES;
}
@@ -158,16 +239,6 @@
return YES;
}
-- (void)setupView:(__kindof MTKView *)view {
- view.device = _device;
-
- view.preferredFramesPerSecond = 30;
- view.autoResizeDrawable = NO;
-
- // We need to keep reference to the view as it's needed down the rendering pipeline.
- _view = view;
-}
-
- (void)loadAssets {
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
@@ -186,12 +257,6 @@
}
}
-- (void)setupBuffers {
- _vertexBuffer = [_device newBufferWithBytes:cubeVertexData
- length:sizeof(cubeVertexData)
- options:MTLResourceOptionCPUCacheModeDefault];
-}
-
- (void)render {
// Wait until the inflight (curently sent to GPU) command buffer
// has completed the GPU work.
@@ -215,7 +280,7 @@
// Set context state.
[renderEncoder pushDebugGroup:renderEncoderDebugGroup];
[renderEncoder setRenderPipelineState:_pipelineState];
- [renderEncoder setVertexBuffer:_vertexBuffer offset:_offset * sizeof(float) atIndex:0];
+ [renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
[self uploadTexturesToRenderEncoder:renderEncoder];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
diff --git a/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m b/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
index 08594c8..66194db 100644
--- a/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
+++ b/sdk/objc/Framework/Classes/Metal/RTCMTLVideoView.m
@@ -29,17 +29,16 @@
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
@interface RTCMTLVideoView () <MTKViewDelegate>
-@property(nonatomic, strong) RTCMTLI420Renderer *rendererI420;
-@property(nonatomic, strong) RTCMTLNV12Renderer *rendererNV12;
-@property(nonatomic, strong) RTCMTLRGBRenderer *rendererRGB;
-@property(nonatomic, strong) MTKView *metalView;
-@property(atomic, strong) RTCVideoFrame *videoFrame;
+@property(nonatomic) RTCMTLI420Renderer *rendererI420;
+@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
+@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
+@property(nonatomic) MTKView *metalView;
+@property(atomic) RTCVideoFrame *videoFrame;
+@property(nonatomic) CGSize videoFrameSize;
+@property(nonatomic) int64_t lastFrameTimeNs;
@end
-@implementation RTCMTLVideoView {
- int64_t _lastFrameTimeNs;
- CGSize _videoFrameSize;
-}
+@implementation RTCMTLVideoView
@synthesize delegate = _delegate;
@synthesize rendererI420 = _rendererI420;
@@ -47,6 +46,9 @@
@synthesize rendererRGB = _rendererRGB;
@synthesize metalView = _metalView;
@synthesize videoFrame = _videoFrame;
+@synthesize videoFrameSize = _videoFrameSize;
+@synthesize lastFrameTimeNs = _lastFrameTimeNs;
+@synthesize rotationOverride = _rotationOverride;
- (instancetype)initWithFrame:(CGRect)frameRect {
self = [super initWithFrame:frameRect];
@@ -64,6 +66,22 @@
return self;
}
+- (BOOL)isEnabled {
+ return !self.metalView.paused;
+}
+
+- (void)setEnabled:(BOOL)enabled {
+ self.metalView.paused = !enabled;
+}
+
+- (UIViewContentMode)videoContentMode {
+ return self.metalView.contentMode;
+}
+
+- (void)setVideoContentMode:(UIViewContentMode)mode {
+ self.metalView.contentMode = mode;
+}
+
#pragma mark - Private
+ (BOOL)isMetalAvailable {
@@ -74,11 +92,6 @@
#endif
}
-+ (MTKView *)createMetalView:(CGRect)frame {
- MTKView *view = [[MTKViewClass alloc] initWithFrame:frame];
- return view;
-}
-
+ (RTCMTLNV12Renderer *)createNV12Renderer {
return [[RTCMTLNV12RendererClass alloc] init];
}
@@ -94,33 +107,22 @@
- (void)configure {
NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
- _metalView = [RTCMTLVideoView createMetalView:self.bounds];
- [self configureMetalView];
+ self.metalView = [[MTKViewClass alloc] initWithFrame:self.bounds];
+ self.metalView.delegate = self;
+ self.metalView.contentMode = UIViewContentModeScaleAspectFill;
+ [self addSubview:self.metalView];
+ self.videoFrameSize = CGSizeZero;
}
-- (void)configureMetalView {
- if (_metalView) {
- _metalView.delegate = self;
- [self addSubview:_metalView];
- _metalView.contentMode = UIViewContentModeScaleAspectFit;
- _videoFrameSize = CGSizeZero;
- }
-}
-
-- (void)setVideoContentMode:(UIViewContentMode)mode {
- _metalView.contentMode = mode;
-}
-
-#pragma mark - Private
-
- (void)layoutSubviews {
[super layoutSubviews];
+
CGRect bounds = self.bounds;
- _metalView.frame = bounds;
- if (!CGSizeEqualToSize(_videoFrameSize, CGSizeZero)) {
- _metalView.drawableSize = _videoFrameSize;
+ self.metalView.frame = bounds;
+ if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) {
+ self.metalView.drawableSize = [self drawableSize];
} else {
- _metalView.drawableSize = bounds.size;
+ self.metalView.drawableSize = bounds.size;
}
}
@@ -130,10 +132,11 @@
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
RTCVideoFrame *videoFrame = self.videoFrame;
// Skip rendering if we've already rendered this frame.
- if (!videoFrame || videoFrame.timeStampNs == _lastFrameTimeNs) {
+ if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) {
return;
}
+ RTCMTLRenderer *renderer;
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
@@ -146,7 +149,7 @@
return;
}
}
- [self.rendererRGB drawFrame:videoFrame];
+ renderer = self.rendererRGB;
} else {
if (!self.rendererNV12) {
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
@@ -156,7 +159,7 @@
return;
}
}
- [self.rendererNV12 drawFrame:videoFrame];
+ renderer = self.rendererNV12;
}
} else {
if (!self.rendererI420) {
@@ -167,25 +170,73 @@
return;
}
}
- [self.rendererI420 drawFrame:videoFrame];
+ renderer = self.rendererI420;
}
- _lastFrameTimeNs = videoFrame.timeStampNs;
+
+ renderer.rotationOverride = self.rotationOverride;
+
+ [renderer drawFrame:videoFrame];
+ self.lastFrameTimeNs = videoFrame.timeStampNs;
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
+- (RTCVideoRotation)frameRotation {
+ if (self.rotationOverride) {
+ RTCVideoRotation rotation;
+#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
+ if (@available(iOS 11, *)) {
+ [self.rotationOverride getValue:&rotation size:sizeof(rotation)];
+ } else
+#endif
+ {
+ [self.rotationOverride getValue:&rotation];
+ }
+ return rotation;
+ }
+
+ return self.videoFrame.rotation;
+}
+
+- (CGSize)drawableSize {
+ // Flip width/height if the rotations are not the same.
+ CGSize videoFrameSize = self.videoFrameSize;
+ RTCVideoRotation frameRotation = [self frameRotation];
+
+ BOOL useLandscape =
+ (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
+ BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
+ (self.videoFrame.rotation == RTCVideoRotation_180);
+
+ if (useLandscape == sizeIsLandscape) {
+ return videoFrameSize;
+ } else {
+ return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
+ }
+}
+
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
- self.metalView.drawableSize = size;
+ __weak RTCMTLVideoView *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
- _videoFrameSize = size;
- [self.delegate videoView:self didChangeVideoSize:size];
+ RTCMTLVideoView *strongSelf = weakSelf;
+
+ strongSelf.videoFrameSize = size;
+ CGSize drawableSize = [strongSelf drawableSize];
+
+ strongSelf.metalView.drawableSize = drawableSize;
+ [strongSelf setNeedsLayout];
+ [strongSelf.delegate videoView:self didChangeVideoSize:size];
});
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
+ if (!self.isEnabled) {
+ return;
+ }
+
if (frame == nil) {
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
return;