blob: d5252177dd5184b43854b37a262a809e548fbc6f [file] [log] [blame]
Zeke Chin57cc74e2015-05-05 07:52:31 -07001/*
2 * libjingle
3 * Copyright 2015 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/app/webrtc/objc/avfoundationvideocapturer.h"
29
30#include "webrtc/base/bind.h"
tkchin89717aa2016-03-31 17:14:04 -070031#include "webrtc/base/checks.h"
Niels Möller505945a2016-03-17 12:20:41 +010032#include "webrtc/base/thread.h"
Zeke Chin57cc74e2015-05-05 07:52:31 -070033
34#import <AVFoundation/AVFoundation.h>
35#import <Foundation/Foundation.h>
36#import <UIKit/UIKit.h>
37
tkchin9eeb6242016-04-27 01:54:20 -070038#import "RTCDispatcher+Private.h"
39#import "RTCLogging.h"
hayscedd8fef2015-12-08 11:08:39 -080040
Zeke Chin57cc74e2015-05-05 07:52:31 -070041// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070042static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
Zeke Chin57cc74e2015-05-05 07:52:31 -070043static cricket::VideoFormat const kDefaultFormat =
44 cricket::VideoFormat(640,
45 480,
46 cricket::VideoFormat::FpsToInterval(30),
47 cricket::FOURCC_NV12);
48
Zeke Chin57cc74e2015-05-05 07:52:31 -070049// This class used to capture frames using AVFoundation APIs on iOS. It is meant
50// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
51// because other webrtc objects own cricket::VideoCapturer, which is not
52// ref counted. To prevent bad behavior we do not expose this class directly.
53@interface RTCAVFoundationVideoCapturerInternal : NSObject
54 <AVCaptureVideoDataOutputSampleBufferDelegate>
55
tkchin89717aa2016-03-31 17:14:04 -070056@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin57cc74e2015-05-05 07:52:31 -070057@property(nonatomic, readonly) BOOL isRunning;
hjona1cf3662016-03-14 20:55:22 -070058@property(nonatomic, readonly) BOOL canUseBackCamera;
Zeke Chin57cc74e2015-05-05 07:52:31 -070059@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
60
61// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
62// when we receive frames. This is safe because this object should be owned by
63// it.
tkchin89717aa2016-03-31 17:14:04 -070064- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
65
66// Starts and stops the capture session asynchronously. We cannot do this
67// synchronously without blocking a WebRTC thread.
68- (void)start;
69- (void)stop;
Zeke Chin57cc74e2015-05-05 07:52:31 -070070
71@end
72
73@implementation RTCAVFoundationVideoCapturerInternal {
74 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070075 AVCaptureDeviceInput *_frontCameraInput;
76 AVCaptureDeviceInput *_backCameraInput;
77 AVCaptureVideoDataOutput *_videoDataOutput;
Zeke Chin57cc74e2015-05-05 07:52:31 -070078 // The cricket::VideoCapturer that owns this class. Should never be NULL.
tkchin89717aa2016-03-31 17:14:04 -070079 webrtc::AVFoundationVideoCapturer *_capturer;
Zeke Chin57cc74e2015-05-05 07:52:31 -070080 BOOL _orientationHasChanged;
81}
82
83@synthesize captureSession = _captureSession;
Zeke Chin57cc74e2015-05-05 07:52:31 -070084@synthesize isRunning = _isRunning;
tkchin89717aa2016-03-31 17:14:04 -070085@synthesize useBackCamera = _useBackCamera;
Zeke Chin57cc74e2015-05-05 07:52:31 -070086
tkchin89717aa2016-03-31 17:14:04 -070087// This is called from the thread that creates the video source, which is likely
88// the main thread.
89- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
90 RTC_DCHECK(capturer);
Zeke Chin57cc74e2015-05-05 07:52:31 -070091 if (self = [super init]) {
92 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070093 // Create the capture session and all relevant inputs and outputs. We need
94 // to do this in init because the application may want the capture session
95 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
96 // created here are retained until dealloc and never recreated.
Zeke Chin57cc74e2015-05-05 07:52:31 -070097 if (![self setupCaptureSession]) {
98 return nil;
99 }
tkchin89717aa2016-03-31 17:14:04 -0700100 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700101 [center addObserver:self
102 selector:@selector(deviceOrientationDidChange:)
103 name:UIDeviceOrientationDidChangeNotification
104 object:nil];
105 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
106 object:nil
107 queue:nil
tkchin89717aa2016-03-31 17:14:04 -0700108 usingBlock:^(NSNotification *notification) {
109 RTCLogError(@"Capture session error: %@", notification.userInfo);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700110 }];
111 }
112 return self;
113}
114
115- (void)dealloc {
tkchin89717aa2016-03-31 17:14:04 -0700116 RTC_DCHECK(!_isRunning);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700117 [[NSNotificationCenter defaultCenter] removeObserver:self];
118 _capturer = nullptr;
119}
120
tkchin89717aa2016-03-31 17:14:04 -0700121- (AVCaptureSession *)captureSession {
122 return _captureSession;
123}
124
125// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700126- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700127 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700128}
129
tkchin89717aa2016-03-31 17:14:04 -0700130// Called from any thread (likely main thread).
131- (BOOL)useBackCamera {
132 @synchronized(self) {
133 return _useBackCamera;
134 }
135}
136
137// Called from any thread (likely main thread).
Zeke Chin57cc74e2015-05-05 07:52:31 -0700138- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700139 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700140 if (useBackCamera) {
141 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
142 "not switching.");
143 }
hjona1cf3662016-03-14 20:55:22 -0700144 return;
145 }
tkchin89717aa2016-03-31 17:14:04 -0700146 @synchronized(self) {
147 if (_useBackCamera == useBackCamera) {
148 return;
149 }
150 _useBackCamera = useBackCamera;
151 [self updateSessionInputForUseBackCamera:useBackCamera];
152 }
Zeke Chin57cc74e2015-05-05 07:52:31 -0700153}
154
tkchin89717aa2016-03-31 17:14:04 -0700155// Called from WebRTC thread.
156- (void)start {
Zeke Chin57cc74e2015-05-05 07:52:31 -0700157 if (_isRunning) {
158 return;
159 }
tkchin89717aa2016-03-31 17:14:04 -0700160 _isRunning = YES;
hayscedd8fef2015-12-08 11:08:39 -0800161 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
162 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700163 _orientationHasChanged = NO;
164 [self updateOrientation];
165 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
166 AVCaptureSession *captureSession = self.captureSession;
167 [captureSession startRunning];
hayscedd8fef2015-12-08 11:08:39 -0800168 }];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700169}
170
tkchin89717aa2016-03-31 17:14:04 -0700171// Called from same thread as start.
172- (void)stop {
Zeke Chin57cc74e2015-05-05 07:52:31 -0700173 if (!_isRunning) {
174 return;
175 }
tkchin89717aa2016-03-31 17:14:04 -0700176 _isRunning = NO;
hayscedd8fef2015-12-08 11:08:39 -0800177 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
178 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700179 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
180 [_captureSession stopRunning];
181 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
hayscedd8fef2015-12-08 11:08:39 -0800182 }];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700183}
184
185#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
186
tkchin89717aa2016-03-31 17:14:04 -0700187- (void)captureOutput:(AVCaptureOutput *)captureOutput
Zeke Chin57cc74e2015-05-05 07:52:31 -0700188 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700189 fromConnection:(AVCaptureConnection *)connection {
190 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700191 if (!_isRunning) {
192 return;
193 }
194 _capturer->CaptureSampleBuffer(sampleBuffer);
195}
196
tkchin89717aa2016-03-31 17:14:04 -0700197- (void)captureOutput:(AVCaptureOutput *)captureOutput
Zeke Chin57cc74e2015-05-05 07:52:31 -0700198 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700199 fromConnection:(AVCaptureConnection *)connection {
200 RTCLogError(@"Dropped sample buffer.");
Zeke Chin57cc74e2015-05-05 07:52:31 -0700201}
202
203#pragma mark - Private
204
205- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700206 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700207#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
tkchin89717aa2016-03-31 17:14:04 -0700208 NSString *version = [[UIDevice currentDevice] systemVersion];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700209 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700210 captureSession.usesApplicationAudioSession = NO;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700211 }
212#endif
tkchin89717aa2016-03-31 17:14:04 -0700213 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
214 RTCLogError(@"Session preset unsupported.");
Zeke Chin57cc74e2015-05-05 07:52:31 -0700215 return NO;
216 }
tkchin89717aa2016-03-31 17:14:04 -0700217 captureSession.sessionPreset = kDefaultPreset;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700218
tkchin89717aa2016-03-31 17:14:04 -0700219 // Add the output.
220 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
221 if (![captureSession canAddOutput:videoDataOutput]) {
222 RTCLogError(@"Video data output unsupported.");
Zeke Chin57cc74e2015-05-05 07:52:31 -0700223 return NO;
224 }
tkchin89717aa2016-03-31 17:14:04 -0700225 [captureSession addOutput:videoDataOutput];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700226
tkchin89717aa2016-03-31 17:14:04 -0700227 // Get the front and back cameras. If there isn't a front camera
228 // give up.
229 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
230 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
231 if (!frontCameraInput) {
232 RTCLogError(@"No front camera for capture session.");
Zeke Chin57cc74e2015-05-05 07:52:31 -0700233 return NO;
234 }
Zeke Chin57cc74e2015-05-05 07:52:31 -0700235
236 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700237 if (![captureSession canAddInput:frontCameraInput] ||
238 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
239 RTCLogError(@"Session does not support capture inputs.");
Zeke Chin57cc74e2015-05-05 07:52:31 -0700240 return NO;
241 }
tkchin89717aa2016-03-31 17:14:04 -0700242 AVCaptureDeviceInput *input = self.useBackCamera ?
243 backCameraInput : frontCameraInput;
244 [captureSession addInput:input];
245 _captureSession = captureSession;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700246 return YES;
247}
248
tkchin89717aa2016-03-31 17:14:04 -0700249- (AVCaptureVideoDataOutput *)videoDataOutput {
250 if (!_videoDataOutput) {
251 // Make the capturer output NV12. Ideally we want I420 but that's not
252 // currently supported on iPhone / iPad.
253 AVCaptureVideoDataOutput *videoDataOutput =
254 [[AVCaptureVideoDataOutput alloc] init];
255 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
256 videoDataOutput.videoSettings = @{
257 (NSString *)kCVPixelBufferPixelFormatTypeKey :
258 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
259 };
260 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
261 dispatch_queue_t queue =
262 [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession];
263 [videoDataOutput setSampleBufferDelegate:self queue:queue];
264 _videoDataOutput = videoDataOutput;
265 }
266 return _videoDataOutput;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700267}
268
tkchin89717aa2016-03-31 17:14:04 -0700269- (AVCaptureDevice *)videoCaptureDeviceForPosition:
270 (AVCaptureDevicePosition)position {
271 for (AVCaptureDevice *captureDevice in
272 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
273 if (captureDevice.position == position) {
274 return captureDevice;
275 }
276 }
277 return nil;
278}
279
280- (AVCaptureDeviceInput *)frontCameraInput {
281 if (!_frontCameraInput) {
282 AVCaptureDevice *frontCameraDevice =
283 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
284 if (!frontCameraDevice) {
285 RTCLogWarning(@"Failed to find front capture device.");
286 return nil;
287 }
288 NSError *error = nil;
289 AVCaptureDeviceInput *frontCameraInput =
290 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
291 error:&error];
292 if (!frontCameraInput) {
293 RTCLogError(@"Failed to create front camera input: %@",
294 error.localizedDescription);
295 return nil;
296 }
297 _frontCameraInput = frontCameraInput;
298 }
299 return _frontCameraInput;
300}
301
302- (AVCaptureDeviceInput *)backCameraInput {
303 if (!_backCameraInput) {
304 AVCaptureDevice *backCameraDevice =
305 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
306 if (!backCameraDevice) {
307 RTCLogWarning(@"Failed to find front capture device.");
308 return nil;
309 }
310 NSError *error = nil;
311 AVCaptureDeviceInput *backCameraInput =
312 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
313 error:&error];
314 if (!backCameraInput) {
315 RTCLogError(@"Failed to create front camera input: %@",
316 error.localizedDescription);
317 return nil;
318 }
319 _backCameraInput = backCameraInput;
320 }
321 return _backCameraInput;
322}
323
324- (void)deviceOrientationDidChange:(NSNotification *)notification {
325 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
326 block:^{
327 _orientationHasChanged = YES;
328 [self updateOrientation];
329 }];
330}
331
332// Called from capture session queue.
Zeke Chin57cc74e2015-05-05 07:52:31 -0700333- (void)updateOrientation {
tkchin89717aa2016-03-31 17:14:04 -0700334 AVCaptureConnection *connection =
335 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700336 if (!connection.supportsVideoOrientation) {
337 // TODO(tkchin): set rotation bit on frames.
338 return;
339 }
340 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
341 switch ([UIDevice currentDevice].orientation) {
342 case UIDeviceOrientationPortrait:
Zeke Chin7be99bd2015-05-29 16:34:38 -0700343 orientation = AVCaptureVideoOrientationPortrait;
Jon Hjelle14c26952015-05-29 15:24:52 -0700344 break;
Jon Hjellec2cb2662015-05-29 16:38:26 -0700345 case UIDeviceOrientationPortraitUpsideDown:
346 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
347 break;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700348 case UIDeviceOrientationLandscapeLeft:
349 orientation = AVCaptureVideoOrientationLandscapeRight;
350 break;
351 case UIDeviceOrientationLandscapeRight:
352 orientation = AVCaptureVideoOrientationLandscapeLeft;
353 break;
354 case UIDeviceOrientationFaceUp:
355 case UIDeviceOrientationFaceDown:
356 case UIDeviceOrientationUnknown:
357 if (!_orientationHasChanged) {
358 connection.videoOrientation = orientation;
359 }
360 return;
361 }
362 connection.videoOrientation = orientation;
363}
364
tkchin89717aa2016-03-31 17:14:04 -0700365// Update the current session input to match what's stored in _useBackCamera.
366- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
367 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
368 block:^{
369 [_captureSession beginConfiguration];
370 AVCaptureDeviceInput *oldInput = _backCameraInput;
371 AVCaptureDeviceInput *newInput = _frontCameraInput;
372 if (useBackCamera) {
373 oldInput = _frontCameraInput;
374 newInput = _backCameraInput;
375 }
376 if (oldInput) {
377 // Ok to remove this even if it's not attached. Will be no-op.
378 [_captureSession removeInput:oldInput];
379 }
380 if (newInput) {
381 [_captureSession addInput:newInput];
382 }
383 [self updateOrientation];
384 [_captureSession commitConfiguration];
385 }];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700386}
387
388@end
389
390namespace webrtc {
391
tkchin89717aa2016-03-31 17:14:04 -0700392enum AVFoundationVideoCapturerMessageType : uint32_t {
393 kMessageTypeFrame,
394};
395
396struct AVFoundationFrame {
397 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
398 : image_buffer(buffer), capture_time(time) {}
399 CVImageBufferRef image_buffer;
400 int64_t capture_time;
401};
402
Zeke Chin57cc74e2015-05-05 07:52:31 -0700403AVFoundationVideoCapturer::AVFoundationVideoCapturer()
magjedb09b6602015-10-01 03:02:44 -0700404 : _capturer(nil), _startThread(nullptr) {
Zeke Chin57cc74e2015-05-05 07:52:31 -0700405 // Set our supported formats. This matches kDefaultPreset.
406 std::vector<cricket::VideoFormat> supportedFormats;
407 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
408 SetSupportedFormats(supportedFormats);
409 _capturer =
410 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
411}
412
413AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
414 _capturer = nil;
415}
416
417cricket::CaptureState AVFoundationVideoCapturer::Start(
418 const cricket::VideoFormat& format) {
419 if (!_capturer) {
420 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
421 return cricket::CaptureState::CS_FAILED;
422 }
423 if (_capturer.isRunning) {
424 LOG(LS_ERROR) << "The capturer is already running.";
425 return cricket::CaptureState::CS_FAILED;
426 }
427 if (format != kDefaultFormat) {
428 LOG(LS_ERROR) << "Unsupported format provided.";
429 return cricket::CaptureState::CS_FAILED;
430 }
431
432 // Keep track of which thread capture started on. This is the thread that
433 // frames need to be sent to.
henrikg91d6ede2015-09-17 00:24:34 -0700434 RTC_DCHECK(!_startThread);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700435 _startThread = rtc::Thread::Current();
436
437 SetCaptureFormat(&format);
438 // This isn't super accurate because it takes a while for the AVCaptureSession
439 // to spin up, and this call returns async.
440 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700441 [_capturer start];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700442 SetCaptureState(cricket::CaptureState::CS_RUNNING);
443
444 return cricket::CaptureState::CS_STARTING;
445}
446
447void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700448 [_capturer stop];
Zeke Chin57cc74e2015-05-05 07:52:31 -0700449 SetCaptureFormat(NULL);
450 _startThread = nullptr;
451}
452
453bool AVFoundationVideoCapturer::IsRunning() {
454 return _capturer.isRunning;
455}
456
457AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
458 return _capturer.captureSession;
459}
460
hjona1cf3662016-03-14 20:55:22 -0700461bool AVFoundationVideoCapturer::CanUseBackCamera() const {
462 return _capturer.canUseBackCamera;
463}
464
Zeke Chin57cc74e2015-05-05 07:52:31 -0700465void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
466 _capturer.useBackCamera = useBackCamera;
467}
468
469bool AVFoundationVideoCapturer::GetUseBackCamera() const {
470 return _capturer.useBackCamera;
471}
472
473void AVFoundationVideoCapturer::CaptureSampleBuffer(
474 CMSampleBufferRef sampleBuffer) {
475 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
476 !CMSampleBufferIsValid(sampleBuffer) ||
477 !CMSampleBufferDataIsReady(sampleBuffer)) {
478 return;
479 }
480
tkchin89717aa2016-03-31 17:14:04 -0700481 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
482 if (image_buffer == NULL) {
Zeke Chin57cc74e2015-05-05 07:52:31 -0700483 return;
484 }
485
tkchin89717aa2016-03-31 17:14:04 -0700486 // Retain the buffer and post it to the webrtc thread. It will be released
487 // after it has successfully been signaled.
488 CVBufferRetain(image_buffer);
489 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700490 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700491 new rtc::TypedMessageData<AVFoundationFrame>(frame));
492}
493
494void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
495 switch (msg->message_id) {
496 case kMessageTypeFrame: {
497 rtc::TypedMessageData<AVFoundationFrame>* data =
498 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
499 const AVFoundationFrame& frame = data->data();
500 OnFrameMessage(frame.image_buffer, frame.capture_time);
501 delete data;
502 break;
503 }
504 }
505}
506
507void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
508 int64_t capture_time) {
509 RTC_DCHECK(_startThread->IsCurrent());
510
Zeke Chin57cc74e2015-05-05 07:52:31 -0700511 // Base address must be unlocked to access frame data.
tkchin89717aa2016-03-31 17:14:04 -0700512 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
513 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700514 if (ret != kCVReturnSuccess) {
515 return;
516 }
517
518 static size_t const kYPlaneIndex = 0;
519 static size_t const kUVPlaneIndex = 1;
tkchin89717aa2016-03-31 17:14:04 -0700520 uint8_t* y_plane_address =
521 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
522 kYPlaneIndex));
523 size_t y_plane_height =
524 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
525 size_t y_plane_width =
526 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
527 size_t y_plane_bytes_per_row =
528 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
529 size_t uv_plane_height =
530 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
531 size_t uv_plane_bytes_per_row =
532 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
533 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
534 uv_plane_bytes_per_row * uv_plane_height;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700535
536 // Sanity check assumption that planar bytes are contiguous.
tkchin89717aa2016-03-31 17:14:04 -0700537 uint8_t* uv_plane_address =
538 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
539 kUVPlaneIndex));
540 RTC_DCHECK(uv_plane_address ==
541 y_plane_address + y_plane_height * y_plane_bytes_per_row);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700542
543 // Stuff data into a cricket::CapturedFrame.
Zeke Chin57cc74e2015-05-05 07:52:31 -0700544 cricket::CapturedFrame frame;
tkchin89717aa2016-03-31 17:14:04 -0700545 frame.width = y_plane_width;
546 frame.height = y_plane_height;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700547 frame.pixel_width = 1;
548 frame.pixel_height = 1;
Peter Boström0c4e06b2015-10-07 12:23:21 +0200549 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
tkchin89717aa2016-03-31 17:14:04 -0700550 frame.time_stamp = capture_time;
551 frame.data = y_plane_address;
552 frame.data_size = frame_size;
Zeke Chin57cc74e2015-05-05 07:52:31 -0700553
Zeke Chin57cc74e2015-05-05 07:52:31 -0700554 // This will call a superclass method that will perform the frame conversion
555 // to I420.
tkchin89717aa2016-03-31 17:14:04 -0700556 SignalFrameCaptured(this, &frame);
557
558 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
559 CVBufferRelease(image_buffer);
Zeke Chin57cc74e2015-05-05 07:52:31 -0700560}
561
562} // namespace webrtc