Add CoreVideoFrameBuffer.
- Makes vt h264 decoder output CoreVideoFrameBuffer
- Makes iOS renderer convert frame buffer if it is not i420
BUG=
Review URL: https://codereview.webrtc.org/1853503003
Cr-Commit-Position: refs/heads/master@{#12224}
diff --git a/webrtc/api/objc/RTCEAGLVideoView.m b/webrtc/api/objc/RTCEAGLVideoView.m
index e664ede..58fd108 100644
--- a/webrtc/api/objc/RTCEAGLVideoView.m
+++ b/webrtc/api/objc/RTCEAGLVideoView.m
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#import "RTCEAGLVideoView.h"
+#import "webrtc/api/objc/RTCEAGLVideoView.h"
#import <GLKit/GLKit.h>
-#import "RTCVideoFrame.h"
-#import "RTCOpenGLVideoRenderer.h"
+#import "webrtc/api/objc/RTCOpenGLVideoRenderer.h"
+#import "webrtc/api/objc/RTCVideoFrame.h"
// RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen
// refreshes, which should be 30fps. We wrap the display link in order to avoid
@@ -210,6 +210,10 @@
}
- (void)renderFrame:(RTCVideoFrame *)frame {
+ // Generate the i420 frame on video send thread instead of main thread.
+ // TODO(tkchin): Remove this once RTCEAGLVideoView supports uploading
+ // CVPixelBuffer textures.
+ [frame convertBufferIfNeeded];
self.videoFrame = frame;
}
diff --git a/webrtc/api/objc/RTCVideoFrame+Private.h b/webrtc/api/objc/RTCVideoFrame+Private.h
index 873d3eb..52f532c 100644
--- a/webrtc/api/objc/RTCVideoFrame+Private.h
+++ b/webrtc/api/objc/RTCVideoFrame+Private.h
@@ -16,6 +16,9 @@
@interface RTCVideoFrame ()
+@property(nonatomic, readonly)
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer;
+
- (instancetype)initWithNativeFrame:(const cricket::VideoFrame *)nativeFrame
NS_DESIGNATED_INITIALIZER;
diff --git a/webrtc/api/objc/RTCVideoFrame.h b/webrtc/api/objc/RTCVideoFrame.h
index 791e41c..b44bf73 100644
--- a/webrtc/api/objc/RTCVideoFrame.h
+++ b/webrtc/api/objc/RTCVideoFrame.h
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@@ -30,8 +31,19 @@
@property(nonatomic, readonly) int32_t uPitch;
@property(nonatomic, readonly) int32_t vPitch;
+/** Timestamp in nanoseconds. */
+@property(nonatomic, readonly) int64_t timeStamp;
+
+/** The native handle should be a pixel buffer on iOS. */
+@property(nonatomic, readonly) CVPixelBufferRef nativeHandle;
+
- (instancetype)init NS_UNAVAILABLE;
+/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame.
+ * Calling the yuv plane properties will call this method if needed.
+ */
+- (void)convertBufferIfNeeded;
+
@end
NS_ASSUME_NONNULL_END
diff --git a/webrtc/api/objc/RTCVideoFrame.mm b/webrtc/api/objc/RTCVideoFrame.mm
index d70ab37..95f4ac8 100644
--- a/webrtc/api/objc/RTCVideoFrame.mm
+++ b/webrtc/api/objc/RTCVideoFrame.mm
@@ -16,6 +16,7 @@
@implementation RTCVideoFrame {
rtc::scoped_ptr<cricket::VideoFrame> _videoFrame;
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> _i420Buffer;
}
- (size_t)width {
@@ -38,30 +39,65 @@
}
- (const uint8_t *)yPlane {
- const cricket::VideoFrame *const_frame = _videoFrame.get();
- return const_frame->GetYPlane();
+ if (!self.i420Buffer) {
+ return nullptr;
+ }
+ return self.i420Buffer->data(webrtc::kYPlane);
}
- (const uint8_t *)uPlane {
- const cricket::VideoFrame *const_frame = _videoFrame.get();
- return const_frame->GetUPlane();
+ if (!self.i420Buffer) {
+ return nullptr;
+ }
+ return self.i420Buffer->data(webrtc::kUPlane);
}
- (const uint8_t *)vPlane {
- const cricket::VideoFrame *const_frame = _videoFrame.get();
- return const_frame->GetVPlane();
+ if (!self.i420Buffer) {
+ return nullptr;
+ }
+ return self.i420Buffer->data(webrtc::kVPlane);
}
- (int32_t)yPitch {
- return _videoFrame->GetYPitch();
+ if (!self.i420Buffer) {
+ return 0;
+ }
+ return self.i420Buffer->stride(webrtc::kYPlane);
}
- (int32_t)uPitch {
- return _videoFrame->GetUPitch();
+ if (!self.i420Buffer) {
+ return 0;
+ }
+ return self.i420Buffer->stride(webrtc::kUPlane);
}
- (int32_t)vPitch {
- return _videoFrame->GetVPitch();
+ if (!self.i420Buffer) {
+ return 0;
+ }
+ return self.i420Buffer->stride(webrtc::kVPlane);
+}
+
+- (int64_t)timeStamp {
+ return _videoFrame->GetTimeStamp();
+}
+
+- (CVPixelBufferRef)nativeHandle {
+ return static_cast<CVPixelBufferRef>(_videoFrame->GetNativeHandle());
+}
+
+- (void)convertBufferIfNeeded {
+ if (!_i420Buffer) {
+ if (_videoFrame->GetNativeHandle()) {
+ // Convert to I420.
+ _i420Buffer = _videoFrame->GetVideoFrameBuffer()->NativeToI420Buffer();
+ } else {
+ // Should already be I420.
+ _i420Buffer = _videoFrame->GetVideoFrameBuffer();
+ }
+ }
}
#pragma mark - Private
@@ -75,4 +111,9 @@
return self;
}
+- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)i420Buffer {
+ [self convertBufferIfNeeded];
+ return _i420Buffer;
+}
+
@end