Add CoreVideoFrameBuffer.
- Makes vt h264 decoder output CoreVideoFrameBuffer
- Makes iOS renderer convert frame buffer if it is not i420
BUG=
Review URL: https://codereview.webrtc.org/1853503003
Cr-Commit-Position: refs/heads/master@{#12224}
diff --git a/webrtc/api/objc/RTCVideoFrame.h b/webrtc/api/objc/RTCVideoFrame.h
index 791e41c..b44bf73 100644
--- a/webrtc/api/objc/RTCVideoFrame.h
+++ b/webrtc/api/objc/RTCVideoFrame.h
@@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@@ -30,8 +31,19 @@
@property(nonatomic, readonly) int32_t uPitch;
@property(nonatomic, readonly) int32_t vPitch;
+/** Timestamp in nanoseconds. */
+@property(nonatomic, readonly) int64_t timeStamp;
+
+/** The native handle should be a pixel buffer on iOS. */
+@property(nonatomic, readonly) CVPixelBufferRef nativeHandle;
+
- (instancetype)init NS_UNAVAILABLE;
+/** If the frame is backed by a CVPixelBuffer, creates a backing i420 frame.
+ * Calling the yuv plane properties will call this method if needed.
+ */
+- (void)convertBufferIfNeeded;
+
@end
NS_ASSUME_NONNULL_END