blob: cb945f41cae63192dae6dba34ebe81d0810bc880 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
15#import <UIKit/UIKit.h>
16
tkchin9eeb6242016-04-27 01:54:20 -070017#import "RTCDispatcher+Private.h"
18#import "WebRTC/RTCLogging.h"
19
20#include "webrtc/base/bind.h"
21#include "webrtc/base/checks.h"
22#include "webrtc/base/thread.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080023
24// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070025static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080026static cricket::VideoFormat const kDefaultFormat =
27 cricket::VideoFormat(640,
28 480,
29 cricket::VideoFormat::FpsToInterval(30),
30 cricket::FOURCC_NV12);
31
32// This class used to capture frames using AVFoundation APIs on iOS. It is meant
33// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
34// because other webrtc objects own cricket::VideoCapturer, which is not
35// ref counted. To prevent bad behavior we do not expose this class directly.
36@interface RTCAVFoundationVideoCapturerInternal : NSObject
37 <AVCaptureVideoDataOutputSampleBufferDelegate>
38
39@property(nonatomic, readonly) AVCaptureSession *captureSession;
40@property(nonatomic, readonly) BOOL isRunning;
hjona1cf3662016-03-14 20:55:22 -070041@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080042@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
43
44// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
45// when we receive frames. This is safe because this object should be owned by
46// it.
47- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070048
49// Starts and stops the capture session asynchronously. We cannot do this
50// synchronously without blocking a WebRTC thread.
51- (void)start;
52- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080053
54@end
55
56@implementation RTCAVFoundationVideoCapturerInternal {
57 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070058 AVCaptureDeviceInput *_frontCameraInput;
59 AVCaptureDeviceInput *_backCameraInput;
60 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080061 // The cricket::VideoCapturer that owns this class. Should never be NULL.
62 webrtc::AVFoundationVideoCapturer *_capturer;
63 BOOL _orientationHasChanged;
64}
65
66@synthesize captureSession = _captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080067@synthesize isRunning = _isRunning;
tkchin89717aa2016-03-31 17:14:04 -070068@synthesize useBackCamera = _useBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080069
tkchin89717aa2016-03-31 17:14:04 -070070// This is called from the thread that creates the video source, which is likely
71// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080072- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070073 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080074 if (self = [super init]) {
75 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070076 // Create the capture session and all relevant inputs and outputs. We need
77 // to do this in init because the application may want the capture session
78 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
79 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080080 if (![self setupCaptureSession]) {
81 return nil;
82 }
83 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
84 [center addObserver:self
85 selector:@selector(deviceOrientationDidChange:)
86 name:UIDeviceOrientationDidChangeNotification
87 object:nil];
88 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
89 object:nil
90 queue:nil
91 usingBlock:^(NSNotification *notification) {
tkchin89717aa2016-03-31 17:14:04 -070092 RTCLogError(@"Capture session error: %@", notification.userInfo);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080093 }];
94 }
95 return self;
96}
97
98- (void)dealloc {
tkchin89717aa2016-03-31 17:14:04 -070099 RTC_DCHECK(!_isRunning);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800100 [[NSNotificationCenter defaultCenter] removeObserver:self];
101 _capturer = nullptr;
102}
103
tkchin89717aa2016-03-31 17:14:04 -0700104- (AVCaptureSession *)captureSession {
105 return _captureSession;
106}
107
108// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700109- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700110 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700111}
112
tkchin89717aa2016-03-31 17:14:04 -0700113// Called from any thread (likely main thread).
114- (BOOL)useBackCamera {
115 @synchronized(self) {
116 return _useBackCamera;
117 }
118}
119
120// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800121- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700122 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700123 if (useBackCamera) {
124 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
125 "not switching.");
126 }
hjona1cf3662016-03-14 20:55:22 -0700127 return;
128 }
tkchin89717aa2016-03-31 17:14:04 -0700129 @synchronized(self) {
130 if (_useBackCamera == useBackCamera) {
131 return;
132 }
133 _useBackCamera = useBackCamera;
134 [self updateSessionInputForUseBackCamera:useBackCamera];
135 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800136}
137
tkchin89717aa2016-03-31 17:14:04 -0700138// Called from WebRTC thread.
139- (void)start {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800140 if (_isRunning) {
141 return;
142 }
tkchin89717aa2016-03-31 17:14:04 -0700143 _isRunning = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800144 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
145 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700146 _orientationHasChanged = NO;
147 [self updateOrientation];
148 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
149 AVCaptureSession *captureSession = self.captureSession;
150 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800151 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800152}
153
tkchin89717aa2016-03-31 17:14:04 -0700154// Called from same thread as start.
155- (void)stop {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800156 if (!_isRunning) {
157 return;
158 }
tkchin89717aa2016-03-31 17:14:04 -0700159 _isRunning = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800160 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
161 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700162 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
163 [_captureSession stopRunning];
164 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800165 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800166}
167
168#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
169
170- (void)captureOutput:(AVCaptureOutput *)captureOutput
171 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
172 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700173 NSParameterAssert(captureOutput == _videoDataOutput);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800174 if (!_isRunning) {
175 return;
176 }
177 _capturer->CaptureSampleBuffer(sampleBuffer);
178}
179
180- (void)captureOutput:(AVCaptureOutput *)captureOutput
181 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700182 fromConnection:(AVCaptureConnection *)connection {
183 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800184}
185
186#pragma mark - Private
187
188- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700189 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800190#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
191 NSString *version = [[UIDevice currentDevice] systemVersion];
192 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700193 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800194 }
195#endif
tkchin89717aa2016-03-31 17:14:04 -0700196 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
197 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800198 return NO;
199 }
tkchin89717aa2016-03-31 17:14:04 -0700200 captureSession.sessionPreset = kDefaultPreset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800201
tkchin89717aa2016-03-31 17:14:04 -0700202 // Add the output.
203 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
204 if (![captureSession canAddOutput:videoDataOutput]) {
205 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800206 return NO;
207 }
tkchin89717aa2016-03-31 17:14:04 -0700208 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800209
tkchin89717aa2016-03-31 17:14:04 -0700210 // Get the front and back cameras. If there isn't a front camera
211 // give up.
212 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
213 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
214 if (!frontCameraInput) {
215 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800216 return NO;
217 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800218
219 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700220 if (![captureSession canAddInput:frontCameraInput] ||
221 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
222 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800223 return NO;
224 }
tkchin89717aa2016-03-31 17:14:04 -0700225 AVCaptureDeviceInput *input = self.useBackCamera ?
226 backCameraInput : frontCameraInput;
227 [captureSession addInput:input];
228 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800229 return YES;
230}
231
tkchin89717aa2016-03-31 17:14:04 -0700232- (AVCaptureVideoDataOutput *)videoDataOutput {
233 if (!_videoDataOutput) {
234 // Make the capturer output NV12. Ideally we want I420 but that's not
235 // currently supported on iPhone / iPad.
236 AVCaptureVideoDataOutput *videoDataOutput =
237 [[AVCaptureVideoDataOutput alloc] init];
238 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
239 videoDataOutput.videoSettings = @{
240 (NSString *)kCVPixelBufferPixelFormatTypeKey :
241 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
242 };
243 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
244 dispatch_queue_t queue =
245 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession];
246 [videoDataOutput setSampleBufferDelegate:self queue:queue];
247 _videoDataOutput = videoDataOutput;
248 }
249 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800250}
251
tkchin89717aa2016-03-31 17:14:04 -0700252- (AVCaptureDevice *)videoCaptureDeviceForPosition:
253 (AVCaptureDevicePosition)position {
254 for (AVCaptureDevice *captureDevice in
255 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
256 if (captureDevice.position == position) {
257 return captureDevice;
258 }
259 }
260 return nil;
261}
262
263- (AVCaptureDeviceInput *)frontCameraInput {
264 if (!_frontCameraInput) {
265 AVCaptureDevice *frontCameraDevice =
266 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
267 if (!frontCameraDevice) {
268 RTCLogWarning(@"Failed to find front capture device.");
269 return nil;
270 }
271 NSError *error = nil;
272 AVCaptureDeviceInput *frontCameraInput =
273 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
274 error:&error];
275 if (!frontCameraInput) {
276 RTCLogError(@"Failed to create front camera input: %@",
277 error.localizedDescription);
278 return nil;
279 }
280 _frontCameraInput = frontCameraInput;
281 }
282 return _frontCameraInput;
283}
284
285- (AVCaptureDeviceInput *)backCameraInput {
286 if (!_backCameraInput) {
287 AVCaptureDevice *backCameraDevice =
288 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
289 if (!backCameraDevice) {
290 RTCLogWarning(@"Failed to find front capture device.");
291 return nil;
292 }
293 NSError *error = nil;
294 AVCaptureDeviceInput *backCameraInput =
295 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
296 error:&error];
297 if (!backCameraInput) {
298 RTCLogError(@"Failed to create front camera input: %@",
299 error.localizedDescription);
300 return nil;
301 }
302 _backCameraInput = backCameraInput;
303 }
304 return _backCameraInput;
305}
306
307- (void)deviceOrientationDidChange:(NSNotification *)notification {
308 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
309 block:^{
310 _orientationHasChanged = YES;
311 [self updateOrientation];
312 }];
313}
314
315// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800316- (void)updateOrientation {
317 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700318 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800319 if (!connection.supportsVideoOrientation) {
320 // TODO(tkchin): set rotation bit on frames.
321 return;
322 }
323 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
324 switch ([UIDevice currentDevice].orientation) {
325 case UIDeviceOrientationPortrait:
326 orientation = AVCaptureVideoOrientationPortrait;
327 break;
328 case UIDeviceOrientationPortraitUpsideDown:
329 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
330 break;
331 case UIDeviceOrientationLandscapeLeft:
332 orientation = AVCaptureVideoOrientationLandscapeRight;
333 break;
334 case UIDeviceOrientationLandscapeRight:
335 orientation = AVCaptureVideoOrientationLandscapeLeft;
336 break;
337 case UIDeviceOrientationFaceUp:
338 case UIDeviceOrientationFaceDown:
339 case UIDeviceOrientationUnknown:
340 if (!_orientationHasChanged) {
341 connection.videoOrientation = orientation;
342 }
343 return;
344 }
345 connection.videoOrientation = orientation;
346}
347
tkchin89717aa2016-03-31 17:14:04 -0700348// Update the current session input to match what's stored in _useBackCamera.
349- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
350 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
351 block:^{
352 [_captureSession beginConfiguration];
353 AVCaptureDeviceInput *oldInput = _backCameraInput;
354 AVCaptureDeviceInput *newInput = _frontCameraInput;
355 if (useBackCamera) {
356 oldInput = _frontCameraInput;
357 newInput = _backCameraInput;
358 }
359 if (oldInput) {
360 // Ok to remove this even if it's not attached. Will be no-op.
361 [_captureSession removeInput:oldInput];
362 }
363 if (newInput) {
364 [_captureSession addInput:newInput];
365 }
366 [self updateOrientation];
367 [_captureSession commitConfiguration];
368 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800369}
370
371@end
372
373namespace webrtc {
374
tkchin89717aa2016-03-31 17:14:04 -0700375enum AVFoundationVideoCapturerMessageType : uint32_t {
376 kMessageTypeFrame,
377};
378
379struct AVFoundationFrame {
380 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
381 : image_buffer(buffer), capture_time(time) {}
382 CVImageBufferRef image_buffer;
383 int64_t capture_time;
384};
385
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800386AVFoundationVideoCapturer::AVFoundationVideoCapturer()
387 : _capturer(nil), _startThread(nullptr) {
388 // Set our supported formats. This matches kDefaultPreset.
389 std::vector<cricket::VideoFormat> supportedFormats;
390 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
391 SetSupportedFormats(supportedFormats);
392 _capturer =
393 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
394}
395
396AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
397 _capturer = nil;
398}
399
400cricket::CaptureState AVFoundationVideoCapturer::Start(
401 const cricket::VideoFormat& format) {
402 if (!_capturer) {
403 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
404 return cricket::CaptureState::CS_FAILED;
405 }
406 if (_capturer.isRunning) {
407 LOG(LS_ERROR) << "The capturer is already running.";
408 return cricket::CaptureState::CS_FAILED;
409 }
410 if (format != kDefaultFormat) {
411 LOG(LS_ERROR) << "Unsupported format provided.";
412 return cricket::CaptureState::CS_FAILED;
413 }
414
415 // Keep track of which thread capture started on. This is the thread that
416 // frames need to be sent to.
417 RTC_DCHECK(!_startThread);
418 _startThread = rtc::Thread::Current();
419
420 SetCaptureFormat(&format);
421 // This isn't super accurate because it takes a while for the AVCaptureSession
422 // to spin up, and this call returns async.
423 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700424 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800425 SetCaptureState(cricket::CaptureState::CS_RUNNING);
426
427 return cricket::CaptureState::CS_STARTING;
428}
429
430void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700431 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800432 SetCaptureFormat(NULL);
433 _startThread = nullptr;
434}
435
436bool AVFoundationVideoCapturer::IsRunning() {
437 return _capturer.isRunning;
438}
439
440AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
441 return _capturer.captureSession;
442}
443
hjona1cf3662016-03-14 20:55:22 -0700444bool AVFoundationVideoCapturer::CanUseBackCamera() const {
445 return _capturer.canUseBackCamera;
446}
447
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800448void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
449 _capturer.useBackCamera = useBackCamera;
450}
451
452bool AVFoundationVideoCapturer::GetUseBackCamera() const {
453 return _capturer.useBackCamera;
454}
455
456void AVFoundationVideoCapturer::CaptureSampleBuffer(
457 CMSampleBufferRef sampleBuffer) {
458 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
459 !CMSampleBufferIsValid(sampleBuffer) ||
460 !CMSampleBufferDataIsReady(sampleBuffer)) {
461 return;
462 }
463
tkchin89717aa2016-03-31 17:14:04 -0700464 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
465 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800466 return;
467 }
468
tkchin89717aa2016-03-31 17:14:04 -0700469 // Retain the buffer and post it to the webrtc thread. It will be released
470 // after it has successfully been signaled.
471 CVBufferRetain(image_buffer);
472 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
473 _startThread->Post(this, kMessageTypeFrame,
474 new rtc::TypedMessageData<AVFoundationFrame>(frame));
475}
476
477void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
478 switch (msg->message_id) {
479 case kMessageTypeFrame: {
480 rtc::TypedMessageData<AVFoundationFrame>* data =
481 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
482 const AVFoundationFrame& frame = data->data();
483 OnFrameMessage(frame.image_buffer, frame.capture_time);
484 delete data;
485 break;
486 }
487 }
488}
489
490void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
491 int64_t capture_time) {
492 RTC_DCHECK(_startThread->IsCurrent());
493
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800494 // Base address must be unlocked to access frame data.
tkchin89717aa2016-03-31 17:14:04 -0700495 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
496 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800497 if (ret != kCVReturnSuccess) {
498 return;
499 }
500
501 static size_t const kYPlaneIndex = 0;
502 static size_t const kUVPlaneIndex = 1;
tkchin89717aa2016-03-31 17:14:04 -0700503 uint8_t* y_plane_address =
504 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
505 kYPlaneIndex));
506 size_t y_plane_height =
507 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
508 size_t y_plane_width =
509 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
510 size_t y_plane_bytes_per_row =
511 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
512 size_t uv_plane_height =
513 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
514 size_t uv_plane_bytes_per_row =
515 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
516 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
517 uv_plane_bytes_per_row * uv_plane_height;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800518
519 // Sanity check assumption that planar bytes are contiguous.
tkchin89717aa2016-03-31 17:14:04 -0700520 uint8_t* uv_plane_address =
521 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
522 kUVPlaneIndex));
523 RTC_DCHECK(uv_plane_address ==
524 y_plane_address + y_plane_height * y_plane_bytes_per_row);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800525
526 // Stuff data into a cricket::CapturedFrame.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800527 cricket::CapturedFrame frame;
tkchin89717aa2016-03-31 17:14:04 -0700528 frame.width = y_plane_width;
529 frame.height = y_plane_height;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800530 frame.pixel_width = 1;
531 frame.pixel_height = 1;
532 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
tkchin89717aa2016-03-31 17:14:04 -0700533 frame.time_stamp = capture_time;
534 frame.data = y_plane_address;
535 frame.data_size = frame_size;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800536
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800537 // This will call a superclass method that will perform the frame conversion
538 // to I420.
tkchin89717aa2016-03-31 17:14:04 -0700539 SignalFrameCaptured(this, &frame);
540
541 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
542 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800543}
544
545} // namespace webrtc