blob: 0d510fb26d899fe44160dbf9fa84997fd456620e [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
15#import <UIKit/UIKit.h>
16
tkchin9eeb6242016-04-27 01:54:20 -070017#import "RTCDispatcher+Private.h"
18#import "WebRTC/RTCLogging.h"
19
20#include "webrtc/base/bind.h"
21#include "webrtc/base/checks.h"
22#include "webrtc/base/thread.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080023
24// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070025static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080026static cricket::VideoFormat const kDefaultFormat =
27 cricket::VideoFormat(640,
28 480,
29 cricket::VideoFormat::FpsToInterval(30),
30 cricket::FOURCC_NV12);
31
32// This class used to capture frames using AVFoundation APIs on iOS. It is meant
33// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
34// because other webrtc objects own cricket::VideoCapturer, which is not
35// ref counted. To prevent bad behavior we do not expose this class directly.
36@interface RTCAVFoundationVideoCapturerInternal : NSObject
37 <AVCaptureVideoDataOutputSampleBufferDelegate>
38
39@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070040@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070041@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080042@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070043@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
44@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080045
46// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
47// when we receive frames. This is safe because this object should be owned by
48// it.
49- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070050
51// Starts and stops the capture session asynchronously. We cannot do this
52// synchronously without blocking a WebRTC thread.
53- (void)start;
54- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080055
56@end
57
58@implementation RTCAVFoundationVideoCapturerInternal {
59 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070060 AVCaptureDeviceInput *_frontCameraInput;
61 AVCaptureDeviceInput *_backCameraInput;
62 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080063 // The cricket::VideoCapturer that owns this class. Should never be NULL.
64 webrtc::AVFoundationVideoCapturer *_capturer;
65 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -070066 BOOL _hasRetriedOnFatalError;
67 BOOL _isRunning;
68 BOOL _hasStarted;
69 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080070}
71
72@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070073@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -070074@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -070075@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080076
tkchin89717aa2016-03-31 17:14:04 -070077// This is called from the thread that creates the video source, which is likely
78// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080079- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070080 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080081 if (self = [super init]) {
82 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070083 // Create the capture session and all relevant inputs and outputs. We need
84 // to do this in init because the application may want the capture session
85 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
86 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080087 if (![self setupCaptureSession]) {
88 return nil;
89 }
90 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
91 [center addObserver:self
92 selector:@selector(deviceOrientationDidChange:)
93 name:UIDeviceOrientationDidChangeNotification
94 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -070095 [center addObserver:self
96 selector:@selector(handleCaptureSessionInterruption:)
97 name:AVCaptureSessionWasInterruptedNotification
98 object:_captureSession];
99 [center addObserver:self
100 selector:@selector(handleCaptureSessionInterruptionEnded:)
101 name:AVCaptureSessionInterruptionEndedNotification
102 object:_captureSession];
103 [center addObserver:self
104 selector:@selector(handleCaptureSessionRuntimeError:)
105 name:AVCaptureSessionRuntimeErrorNotification
106 object:_captureSession];
107 [center addObserver:self
108 selector:@selector(handleCaptureSessionDidStartRunning:)
109 name:AVCaptureSessionDidStartRunningNotification
110 object:_captureSession];
111 [center addObserver:self
112 selector:@selector(handleCaptureSessionDidStopRunning:)
113 name:AVCaptureSessionDidStopRunningNotification
114 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800115 }
116 return self;
117}
118
119- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700120 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800121 [[NSNotificationCenter defaultCenter] removeObserver:self];
122 _capturer = nullptr;
123}
124
tkchin89717aa2016-03-31 17:14:04 -0700125- (AVCaptureSession *)captureSession {
126 return _captureSession;
127}
128
Zeke Chin52516802016-06-03 11:59:22 -0700129- (dispatch_queue_t)frameQueue {
130 if (!_frameQueue) {
131 _frameQueue =
132 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
133 DISPATCH_QUEUE_SERIAL);
134 dispatch_set_target_queue(
135 _frameQueue,
136 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
137 }
138 return _frameQueue;
139}
140
tkchin89717aa2016-03-31 17:14:04 -0700141// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700142- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700143 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700144}
145
tkchin89717aa2016-03-31 17:14:04 -0700146// Called from any thread (likely main thread).
147- (BOOL)useBackCamera {
148 @synchronized(self) {
149 return _useBackCamera;
150 }
151}
152
153// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800154- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700155 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700156 if (useBackCamera) {
157 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
158 "not switching.");
159 }
hjona1cf3662016-03-14 20:55:22 -0700160 return;
161 }
tkchin89717aa2016-03-31 17:14:04 -0700162 @synchronized(self) {
163 if (_useBackCamera == useBackCamera) {
164 return;
165 }
166 _useBackCamera = useBackCamera;
167 [self updateSessionInputForUseBackCamera:useBackCamera];
168 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800169}
170
Zeke Chin52516802016-06-03 11:59:22 -0700171- (BOOL)isRunning {
172 rtc::CritScope cs(&_crit);
173 return _isRunning;
174}
175
176- (void)setIsRunning:(BOOL)isRunning {
177 rtc::CritScope cs(&_crit);
178 _isRunning = isRunning;
179}
180
tkchin89717aa2016-03-31 17:14:04 -0700181// Called from WebRTC thread.
182- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700183 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800184 return;
185 }
Zeke Chin52516802016-06-03 11:59:22 -0700186 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800187 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
188 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700189 _orientationHasChanged = NO;
190 [self updateOrientation];
191 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
192 AVCaptureSession *captureSession = self.captureSession;
193 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800194 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800195}
196
tkchin89717aa2016-03-31 17:14:04 -0700197// Called from same thread as start.
198- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700199 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800200 return;
201 }
Zeke Chin52516802016-06-03 11:59:22 -0700202 self.hasStarted = NO;
203 // Due to this async block, it's possible that the ObjC object outlives the
204 // C++ one. In order to not invoke functions on the C++ object, we set
205 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800206 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
207 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700208 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
209 [_captureSession stopRunning];
210 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800211 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800212}
213
Zeke Chin52516802016-06-03 11:59:22 -0700214#pragma mark iOS notifications
215
216- (void)deviceOrientationDidChange:(NSNotification *)notification {
217 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
218 block:^{
219 _orientationHasChanged = YES;
220 [self updateOrientation];
221 }];
222}
223
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800224#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
225
226- (void)captureOutput:(AVCaptureOutput *)captureOutput
227 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
228 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700229 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700230 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800231 return;
232 }
233 _capturer->CaptureSampleBuffer(sampleBuffer);
234}
235
236- (void)captureOutput:(AVCaptureOutput *)captureOutput
237 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700238 fromConnection:(AVCaptureConnection *)connection {
239 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800240}
241
Zeke Chin52516802016-06-03 11:59:22 -0700242#pragma mark - AVCaptureSession notifications
243
244- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
245 NSString *reasonString = nil;
246#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
247 NSNumber *reason =
248 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
249 if (reason) {
250 switch (reason.intValue) {
251 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
252 reasonString = @"VideoDeviceNotAvailableInBackground";
253 break;
254 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
255 reasonString = @"AudioDeviceInUseByAnotherClient";
256 break;
257 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
258 reasonString = @"VideoDeviceInUseByAnotherClient";
259 break;
260 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
261 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
262 break;
263 }
264 }
265#endif
266 RTCLog(@"Capture session interrupted: %@", reasonString);
267 // TODO(tkchin): Handle this case.
268}
269
270- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
271 RTCLog(@"Capture session interruption ended.");
272 // TODO(tkchin): Handle this case.
273}
274
275- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
276 NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
277 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
278
279 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
280 block:^{
281 if (error.code == AVErrorMediaServicesWereReset) {
282 [self handleNonFatalError];
283 } else {
284 [self handleFatalError];
285 }
286 }];
287}
288
289- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
290 RTCLog(@"Capture session started.");
291 self.isRunning = YES;
292 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
293 block:^{
294 // If we successfully restarted after an unknown error, allow future
295 // retries on fatal errors.
296 _hasRetriedOnFatalError = NO;
297 }];
298}
299
300- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
301 RTCLog(@"Capture session stopped.");
302 self.isRunning = NO;
303}
304
305- (void)handleFatalError {
306 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
307 block:^{
308 if (!_hasRetriedOnFatalError) {
309 RTCLogWarning(@"Attempting to recover from fatal capture error.");
310 [self handleNonFatalError];
311 _hasRetriedOnFatalError = YES;
312 } else {
313 RTCLogError(@"Previous fatal error recovery failed.");
314 }
315 }];
316}
317
318- (void)handleNonFatalError {
319 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
320 block:^{
321 if (self.hasStarted) {
322 RTCLog(@"Restarting capture session after error.");
323 [self.captureSession startRunning];
324 }
325 }];
326}
327
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800328#pragma mark - Private
329
330- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700331 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800332#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
333 NSString *version = [[UIDevice currentDevice] systemVersion];
334 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700335 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800336 }
337#endif
tkchin89717aa2016-03-31 17:14:04 -0700338 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
339 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800340 return NO;
341 }
tkchin89717aa2016-03-31 17:14:04 -0700342 captureSession.sessionPreset = kDefaultPreset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800343
tkchin89717aa2016-03-31 17:14:04 -0700344 // Add the output.
345 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
346 if (![captureSession canAddOutput:videoDataOutput]) {
347 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800348 return NO;
349 }
tkchin89717aa2016-03-31 17:14:04 -0700350 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800351
tkchin89717aa2016-03-31 17:14:04 -0700352 // Get the front and back cameras. If there isn't a front camera
353 // give up.
354 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
355 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
356 if (!frontCameraInput) {
357 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800358 return NO;
359 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800360
361 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700362 if (![captureSession canAddInput:frontCameraInput] ||
363 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
364 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800365 return NO;
366 }
tkchin89717aa2016-03-31 17:14:04 -0700367 AVCaptureDeviceInput *input = self.useBackCamera ?
368 backCameraInput : frontCameraInput;
369 [captureSession addInput:input];
370 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800371 return YES;
372}
373
tkchin89717aa2016-03-31 17:14:04 -0700374- (AVCaptureVideoDataOutput *)videoDataOutput {
375 if (!_videoDataOutput) {
376 // Make the capturer output NV12. Ideally we want I420 but that's not
377 // currently supported on iPhone / iPad.
378 AVCaptureVideoDataOutput *videoDataOutput =
379 [[AVCaptureVideoDataOutput alloc] init];
380 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
381 videoDataOutput.videoSettings = @{
382 (NSString *)kCVPixelBufferPixelFormatTypeKey :
383 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
384 };
385 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700386 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700387 _videoDataOutput = videoDataOutput;
388 }
389 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800390}
391
tkchin89717aa2016-03-31 17:14:04 -0700392- (AVCaptureDevice *)videoCaptureDeviceForPosition:
393 (AVCaptureDevicePosition)position {
394 for (AVCaptureDevice *captureDevice in
395 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
396 if (captureDevice.position == position) {
397 return captureDevice;
398 }
399 }
400 return nil;
401}
402
403- (AVCaptureDeviceInput *)frontCameraInput {
404 if (!_frontCameraInput) {
405 AVCaptureDevice *frontCameraDevice =
406 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
407 if (!frontCameraDevice) {
408 RTCLogWarning(@"Failed to find front capture device.");
409 return nil;
410 }
411 NSError *error = nil;
412 AVCaptureDeviceInput *frontCameraInput =
413 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
414 error:&error];
415 if (!frontCameraInput) {
416 RTCLogError(@"Failed to create front camera input: %@",
417 error.localizedDescription);
418 return nil;
419 }
420 _frontCameraInput = frontCameraInput;
421 }
422 return _frontCameraInput;
423}
424
425- (AVCaptureDeviceInput *)backCameraInput {
426 if (!_backCameraInput) {
427 AVCaptureDevice *backCameraDevice =
428 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
429 if (!backCameraDevice) {
430 RTCLogWarning(@"Failed to find front capture device.");
431 return nil;
432 }
433 NSError *error = nil;
434 AVCaptureDeviceInput *backCameraInput =
435 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
436 error:&error];
437 if (!backCameraInput) {
438 RTCLogError(@"Failed to create front camera input: %@",
439 error.localizedDescription);
440 return nil;
441 }
442 _backCameraInput = backCameraInput;
443 }
444 return _backCameraInput;
445}
446
tkchin89717aa2016-03-31 17:14:04 -0700447// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800448- (void)updateOrientation {
449 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700450 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800451 if (!connection.supportsVideoOrientation) {
452 // TODO(tkchin): set rotation bit on frames.
453 return;
454 }
455 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
456 switch ([UIDevice currentDevice].orientation) {
457 case UIDeviceOrientationPortrait:
458 orientation = AVCaptureVideoOrientationPortrait;
459 break;
460 case UIDeviceOrientationPortraitUpsideDown:
461 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
462 break;
463 case UIDeviceOrientationLandscapeLeft:
464 orientation = AVCaptureVideoOrientationLandscapeRight;
465 break;
466 case UIDeviceOrientationLandscapeRight:
467 orientation = AVCaptureVideoOrientationLandscapeLeft;
468 break;
469 case UIDeviceOrientationFaceUp:
470 case UIDeviceOrientationFaceDown:
471 case UIDeviceOrientationUnknown:
472 if (!_orientationHasChanged) {
473 connection.videoOrientation = orientation;
474 }
475 return;
476 }
477 connection.videoOrientation = orientation;
478}
479
tkchin89717aa2016-03-31 17:14:04 -0700480// Update the current session input to match what's stored in _useBackCamera.
481- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{
484 [_captureSession beginConfiguration];
485 AVCaptureDeviceInput *oldInput = _backCameraInput;
486 AVCaptureDeviceInput *newInput = _frontCameraInput;
487 if (useBackCamera) {
488 oldInput = _frontCameraInput;
489 newInput = _backCameraInput;
490 }
491 if (oldInput) {
492 // Ok to remove this even if it's not attached. Will be no-op.
493 [_captureSession removeInput:oldInput];
494 }
495 if (newInput) {
496 [_captureSession addInput:newInput];
497 }
498 [self updateOrientation];
499 [_captureSession commitConfiguration];
500 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800501}
502
503@end
504
505namespace webrtc {
506
tkchin89717aa2016-03-31 17:14:04 -0700507enum AVFoundationVideoCapturerMessageType : uint32_t {
508 kMessageTypeFrame,
509};
510
511struct AVFoundationFrame {
512 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
513 : image_buffer(buffer), capture_time(time) {}
514 CVImageBufferRef image_buffer;
515 int64_t capture_time;
516};
517
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800518AVFoundationVideoCapturer::AVFoundationVideoCapturer()
519 : _capturer(nil), _startThread(nullptr) {
520 // Set our supported formats. This matches kDefaultPreset.
521 std::vector<cricket::VideoFormat> supportedFormats;
522 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
523 SetSupportedFormats(supportedFormats);
524 _capturer =
525 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
526}
527
528AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
529 _capturer = nil;
530}
531
532cricket::CaptureState AVFoundationVideoCapturer::Start(
533 const cricket::VideoFormat& format) {
534 if (!_capturer) {
535 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
536 return cricket::CaptureState::CS_FAILED;
537 }
538 if (_capturer.isRunning) {
539 LOG(LS_ERROR) << "The capturer is already running.";
540 return cricket::CaptureState::CS_FAILED;
541 }
542 if (format != kDefaultFormat) {
543 LOG(LS_ERROR) << "Unsupported format provided.";
544 return cricket::CaptureState::CS_FAILED;
545 }
546
547 // Keep track of which thread capture started on. This is the thread that
548 // frames need to be sent to.
549 RTC_DCHECK(!_startThread);
550 _startThread = rtc::Thread::Current();
551
552 SetCaptureFormat(&format);
553 // This isn't super accurate because it takes a while for the AVCaptureSession
554 // to spin up, and this call returns async.
555 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700556 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800557 SetCaptureState(cricket::CaptureState::CS_RUNNING);
558
559 return cricket::CaptureState::CS_STARTING;
560}
561
562void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700563 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800564 SetCaptureFormat(NULL);
565 _startThread = nullptr;
566}
567
568bool AVFoundationVideoCapturer::IsRunning() {
569 return _capturer.isRunning;
570}
571
572AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
573 return _capturer.captureSession;
574}
575
hjona1cf3662016-03-14 20:55:22 -0700576bool AVFoundationVideoCapturer::CanUseBackCamera() const {
577 return _capturer.canUseBackCamera;
578}
579
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800580void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
581 _capturer.useBackCamera = useBackCamera;
582}
583
584bool AVFoundationVideoCapturer::GetUseBackCamera() const {
585 return _capturer.useBackCamera;
586}
587
588void AVFoundationVideoCapturer::CaptureSampleBuffer(
589 CMSampleBufferRef sampleBuffer) {
590 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
591 !CMSampleBufferIsValid(sampleBuffer) ||
592 !CMSampleBufferDataIsReady(sampleBuffer)) {
593 return;
594 }
595
tkchin89717aa2016-03-31 17:14:04 -0700596 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
597 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800598 return;
599 }
600
tkchin89717aa2016-03-31 17:14:04 -0700601 // Retain the buffer and post it to the webrtc thread. It will be released
602 // after it has successfully been signaled.
603 CVBufferRetain(image_buffer);
604 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
605 _startThread->Post(this, kMessageTypeFrame,
606 new rtc::TypedMessageData<AVFoundationFrame>(frame));
607}
608
609void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
610 switch (msg->message_id) {
611 case kMessageTypeFrame: {
612 rtc::TypedMessageData<AVFoundationFrame>* data =
613 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
614 const AVFoundationFrame& frame = data->data();
615 OnFrameMessage(frame.image_buffer, frame.capture_time);
616 delete data;
617 break;
618 }
619 }
620}
621
622void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
623 int64_t capture_time) {
624 RTC_DCHECK(_startThread->IsCurrent());
625
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800626 // Base address must be unlocked to access frame data.
tkchin89717aa2016-03-31 17:14:04 -0700627 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
628 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800629 if (ret != kCVReturnSuccess) {
630 return;
631 }
632
633 static size_t const kYPlaneIndex = 0;
634 static size_t const kUVPlaneIndex = 1;
tkchin89717aa2016-03-31 17:14:04 -0700635 uint8_t* y_plane_address =
636 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
637 kYPlaneIndex));
638 size_t y_plane_height =
639 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
640 size_t y_plane_width =
641 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
642 size_t y_plane_bytes_per_row =
643 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
644 size_t uv_plane_height =
645 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
646 size_t uv_plane_bytes_per_row =
647 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
648 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
649 uv_plane_bytes_per_row * uv_plane_height;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800650
651 // Sanity check assumption that planar bytes are contiguous.
tkchin89717aa2016-03-31 17:14:04 -0700652 uint8_t* uv_plane_address =
653 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
654 kUVPlaneIndex));
655 RTC_DCHECK(uv_plane_address ==
656 y_plane_address + y_plane_height * y_plane_bytes_per_row);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800657
658 // Stuff data into a cricket::CapturedFrame.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800659 cricket::CapturedFrame frame;
tkchin89717aa2016-03-31 17:14:04 -0700660 frame.width = y_plane_width;
661 frame.height = y_plane_height;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800662 frame.pixel_width = 1;
663 frame.pixel_height = 1;
664 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
tkchin89717aa2016-03-31 17:14:04 -0700665 frame.time_stamp = capture_time;
666 frame.data = y_plane_address;
667 frame.data_size = frame_size;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800668
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800669 // This will call a superclass method that will perform the frame conversion
670 // to I420.
tkchin89717aa2016-03-31 17:14:04 -0700671 SignalFrameCaptured(this, &frame);
672
673 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
674 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800675}
676
677} // namespace webrtc