blob: 5708346ae2bccdee06da02c5eea8d2a13d37d453 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
magjed2ab012c2016-08-25 03:25:04 -070025#include "libyuv/rotate.h"
26
tkchin9eeb6242016-04-27 01:54:20 -070027#include "webrtc/base/bind.h"
28#include "webrtc/base/checks.h"
29#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070030#include "webrtc/common_video/include/corevideo_frame_buffer.h"
magjed2ab012c2016-08-25 03:25:04 -070031#include "webrtc/common_video/rotation.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080032
kthelgason4a85abb2016-08-19 01:24:46 -070033struct AVCaptureSessionPresetResolution {
34 NSString *sessionPreset;
35 int width;
36 int height;
37};
38
39#if TARGET_OS_IPHONE
40static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
41 { AVCaptureSessionPreset352x288, 352, 288},
42 { AVCaptureSessionPreset640x480, 640, 480},
43 { AVCaptureSessionPreset1280x720, 1280, 720},
44 { AVCaptureSessionPreset1920x1080, 1920, 1080},
45};
46#else // macOS
47static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
48 { AVCaptureSessionPreset320x240, 320, 240},
49 { AVCaptureSessionPreset352x288, 352, 288},
50 { AVCaptureSessionPreset640x480, 640, 480},
51 { AVCaptureSessionPreset960x540, 960, 540},
52 { AVCaptureSessionPreset1280x720, 1280, 720},
53};
54#endif
55
56// Mapping from cricket::VideoFormat to AVCaptureSession presets.
57static NSString *GetSessionPresetForVideoFormat(
58 const cricket::VideoFormat& format) {
59 for (const auto preset : kAvailablePresets) {
60 // Check both orientations
61 if ((format.width == preset.width && format.height == preset.height) ||
62 (format.width == preset.height && format.height == preset.width)) {
63 return preset.sessionPreset;
64 }
65 }
66 // If no matching preset is found, use a default one.
67 return AVCaptureSessionPreset640x480;
68}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080069
70// This class used to capture frames using AVFoundation APIs on iOS. It is meant
71// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
72// because other webrtc objects own cricket::VideoCapturer, which is not
73// ref counted. To prevent bad behavior we do not expose this class directly.
74@interface RTCAVFoundationVideoCapturerInternal : NSObject
75 <AVCaptureVideoDataOutputSampleBufferDelegate>
76
77@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070078@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070079@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080080@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070081@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
82@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080083
84// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
85// when we receive frames. This is safe because this object should be owned by
86// it.
87- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -070088- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -070089
90// Starts and stops the capture session asynchronously. We cannot do this
91// synchronously without blocking a WebRTC thread.
92- (void)start;
93- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080094
95@end
96
97@implementation RTCAVFoundationVideoCapturerInternal {
98 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070099 AVCaptureDeviceInput *_frontCameraInput;
100 AVCaptureDeviceInput *_backCameraInput;
101 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800102 // The cricket::VideoCapturer that owns this class. Should never be NULL.
103 webrtc::AVFoundationVideoCapturer *_capturer;
magjed2ab012c2016-08-25 03:25:04 -0700104 webrtc::VideoRotation _rotation;
Zeke Chin52516802016-06-03 11:59:22 -0700105 BOOL _hasRetriedOnFatalError;
106 BOOL _isRunning;
107 BOOL _hasStarted;
108 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800109}
110
111@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700112@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700113@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -0700114@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800115
tkchin89717aa2016-03-31 17:14:04 -0700116// This is called from the thread that creates the video source, which is likely
117// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800118- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700119 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800120 if (self = [super init]) {
121 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700122 // Create the capture session and all relevant inputs and outputs. We need
123 // to do this in init because the application may want the capture session
124 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
125 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800126 if (![self setupCaptureSession]) {
127 return nil;
128 }
129 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700130#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800131 [center addObserver:self
132 selector:@selector(deviceOrientationDidChange:)
133 name:UIDeviceOrientationDidChangeNotification
134 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700135 [center addObserver:self
136 selector:@selector(handleCaptureSessionInterruption:)
137 name:AVCaptureSessionWasInterruptedNotification
138 object:_captureSession];
139 [center addObserver:self
140 selector:@selector(handleCaptureSessionInterruptionEnded:)
141 name:AVCaptureSessionInterruptionEndedNotification
142 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700143 [center addObserver:self
144 selector:@selector(handleApplicationDidBecomeActive:)
145 name:UIApplicationDidBecomeActiveNotification
146 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700147#endif
Zeke Chin52516802016-06-03 11:59:22 -0700148 [center addObserver:self
149 selector:@selector(handleCaptureSessionRuntimeError:)
150 name:AVCaptureSessionRuntimeErrorNotification
151 object:_captureSession];
152 [center addObserver:self
153 selector:@selector(handleCaptureSessionDidStartRunning:)
154 name:AVCaptureSessionDidStartRunningNotification
155 object:_captureSession];
156 [center addObserver:self
157 selector:@selector(handleCaptureSessionDidStopRunning:)
158 name:AVCaptureSessionDidStopRunningNotification
159 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800160 }
161 return self;
162}
163
164- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700165 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800166 [[NSNotificationCenter defaultCenter] removeObserver:self];
167 _capturer = nullptr;
168}
169
tkchin89717aa2016-03-31 17:14:04 -0700170- (AVCaptureSession *)captureSession {
171 return _captureSession;
172}
173
kthelgason4a85abb2016-08-19 01:24:46 -0700174- (AVCaptureDevice *)getActiveCaptureDevice {
175 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
176}
177
Zeke Chin52516802016-06-03 11:59:22 -0700178- (dispatch_queue_t)frameQueue {
179 if (!_frameQueue) {
180 _frameQueue =
181 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
182 DISPATCH_QUEUE_SERIAL);
183 dispatch_set_target_queue(
184 _frameQueue,
185 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
186 }
187 return _frameQueue;
188}
189
tkchin89717aa2016-03-31 17:14:04 -0700190// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700191- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700192 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700193}
194
tkchin89717aa2016-03-31 17:14:04 -0700195// Called from any thread (likely main thread).
196- (BOOL)useBackCamera {
197 @synchronized(self) {
198 return _useBackCamera;
199 }
200}
201
202// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800203- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700204 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700205 if (useBackCamera) {
206 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
207 "not switching.");
208 }
hjona1cf3662016-03-14 20:55:22 -0700209 return;
210 }
tkchin89717aa2016-03-31 17:14:04 -0700211 @synchronized(self) {
212 if (_useBackCamera == useBackCamera) {
213 return;
214 }
215 _useBackCamera = useBackCamera;
216 [self updateSessionInputForUseBackCamera:useBackCamera];
217 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800218}
219
Zeke Chin52516802016-06-03 11:59:22 -0700220- (BOOL)isRunning {
221 rtc::CritScope cs(&_crit);
222 return _isRunning;
223}
224
225- (void)setIsRunning:(BOOL)isRunning {
226 rtc::CritScope cs(&_crit);
227 _isRunning = isRunning;
228}
229
tkchin89717aa2016-03-31 17:14:04 -0700230// Called from WebRTC thread.
231- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700232 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800233 return;
234 }
Zeke Chin52516802016-06-03 11:59:22 -0700235 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800236 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
237 block:^{
magjed2ab012c2016-08-25 03:25:04 -0700238#if TARGET_OS_IPHONE
239 // Default to portrait orientation on iPhone. This will be reset in
240 // updateOrientation unless orientation is unknown/faceup/facedown.
241 _rotation = webrtc::kVideoRotation_90;
242#else
243 // No rotation on Mac.
244 _rotation = webrtc::kVideoRotation_0;
245#endif
tkchin89717aa2016-03-31 17:14:04 -0700246 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700247#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700248 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700249#endif
tkchin89717aa2016-03-31 17:14:04 -0700250 AVCaptureSession *captureSession = self.captureSession;
251 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800252 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800253}
254
tkchin89717aa2016-03-31 17:14:04 -0700255// Called from same thread as start.
256- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700257 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800258 return;
259 }
Zeke Chin52516802016-06-03 11:59:22 -0700260 self.hasStarted = NO;
261 // Due to this async block, it's possible that the ObjC object outlives the
262 // C++ one. In order to not invoke functions on the C++ object, we set
263 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800264 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
265 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700266 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
267 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700268#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700269 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700270#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800271 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800272}
273
Zeke Chin52516802016-06-03 11:59:22 -0700274#pragma mark iOS notifications
275
adam.fedorfc22e032016-06-08 17:24:37 -0700276#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700277- (void)deviceOrientationDidChange:(NSNotification *)notification {
278 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
279 block:^{
Zeke Chin52516802016-06-03 11:59:22 -0700280 [self updateOrientation];
281 }];
282}
adam.fedorfc22e032016-06-08 17:24:37 -0700283#endif
Zeke Chin52516802016-06-03 11:59:22 -0700284
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800285#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
286
287- (void)captureOutput:(AVCaptureOutput *)captureOutput
288 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
289 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700290 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700291 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800292 return;
293 }
magjed2ab012c2016-08-25 03:25:04 -0700294 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800295}
296
297- (void)captureOutput:(AVCaptureOutput *)captureOutput
298 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700299 fromConnection:(AVCaptureConnection *)connection {
300 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800301}
302
Zeke Chin52516802016-06-03 11:59:22 -0700303#pragma mark - AVCaptureSession notifications
304
305- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
306 NSString *reasonString = nil;
307#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
308 NSNumber *reason =
309 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
310 if (reason) {
311 switch (reason.intValue) {
312 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
313 reasonString = @"VideoDeviceNotAvailableInBackground";
314 break;
315 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
316 reasonString = @"AudioDeviceInUseByAnotherClient";
317 break;
318 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
319 reasonString = @"VideoDeviceInUseByAnotherClient";
320 break;
321 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
322 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
323 break;
324 }
325 }
326#endif
327 RTCLog(@"Capture session interrupted: %@", reasonString);
328 // TODO(tkchin): Handle this case.
329}
330
331- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
332 RTCLog(@"Capture session interruption ended.");
333 // TODO(tkchin): Handle this case.
334}
335
336- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700337 NSError *error =
338 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700339 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700340
341 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
342 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700343#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700344 if (error.code == AVErrorMediaServicesWereReset) {
345 [self handleNonFatalError];
346 } else {
347 [self handleFatalError];
348 }
adam.fedorfc22e032016-06-08 17:24:37 -0700349#else
350 [self handleFatalError];
351#endif
Zeke Chin52516802016-06-03 11:59:22 -0700352 }];
353}
354
355- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
356 RTCLog(@"Capture session started.");
357 self.isRunning = YES;
358 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
359 block:^{
360 // If we successfully restarted after an unknown error, allow future
361 // retries on fatal errors.
362 _hasRetriedOnFatalError = NO;
363 }];
364}
365
366- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
367 RTCLog(@"Capture session stopped.");
368 self.isRunning = NO;
369}
370
371- (void)handleFatalError {
372 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
373 block:^{
374 if (!_hasRetriedOnFatalError) {
375 RTCLogWarning(@"Attempting to recover from fatal capture error.");
376 [self handleNonFatalError];
377 _hasRetriedOnFatalError = YES;
378 } else {
379 RTCLogError(@"Previous fatal error recovery failed.");
380 }
381 }];
382}
383
384- (void)handleNonFatalError {
385 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
386 block:^{
387 if (self.hasStarted) {
388 RTCLog(@"Restarting capture session after error.");
389 [self.captureSession startRunning];
390 }
391 }];
392}
393
tkchin11840252016-08-24 12:05:56 -0700394#if TARGET_OS_IPHONE
395
396#pragma mark - UIApplication notifications
397
398- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
399 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
400 block:^{
401 if (self.hasStarted && !self.captureSession.isRunning) {
402 RTCLog(@"Restarting capture session on active.");
403 [self.captureSession startRunning];
404 }
405 }];
406}
407
408#endif // TARGET_OS_IPHONE
409
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800410#pragma mark - Private
411
412- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700413 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800414#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
415 NSString *version = [[UIDevice currentDevice] systemVersion];
416 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700417 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800418 }
419#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800420
tkchin89717aa2016-03-31 17:14:04 -0700421 // Add the output.
422 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
423 if (![captureSession canAddOutput:videoDataOutput]) {
424 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800425 return NO;
426 }
tkchin89717aa2016-03-31 17:14:04 -0700427 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800428
tkchin89717aa2016-03-31 17:14:04 -0700429 // Get the front and back cameras. If there isn't a front camera
430 // give up.
431 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
432 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
433 if (!frontCameraInput) {
434 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800435 return NO;
436 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800437
438 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700439 if (![captureSession canAddInput:frontCameraInput] ||
440 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
441 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800442 return NO;
443 }
tkchin89717aa2016-03-31 17:14:04 -0700444 AVCaptureDeviceInput *input = self.useBackCamera ?
445 backCameraInput : frontCameraInput;
446 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700447
tkchin89717aa2016-03-31 17:14:04 -0700448 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800449 return YES;
450}
451
tkchin89717aa2016-03-31 17:14:04 -0700452- (AVCaptureVideoDataOutput *)videoDataOutput {
453 if (!_videoDataOutput) {
454 // Make the capturer output NV12. Ideally we want I420 but that's not
455 // currently supported on iPhone / iPad.
456 AVCaptureVideoDataOutput *videoDataOutput =
457 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700458 videoDataOutput.videoSettings = @{
459 (NSString *)kCVPixelBufferPixelFormatTypeKey :
460 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
461 };
462 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700463 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700464 _videoDataOutput = videoDataOutput;
465 }
466 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800467}
468
tkchin89717aa2016-03-31 17:14:04 -0700469- (AVCaptureDevice *)videoCaptureDeviceForPosition:
470 (AVCaptureDevicePosition)position {
471 for (AVCaptureDevice *captureDevice in
472 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
473 if (captureDevice.position == position) {
474 return captureDevice;
475 }
476 }
477 return nil;
478}
479
480- (AVCaptureDeviceInput *)frontCameraInput {
481 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700482#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700483 AVCaptureDevice *frontCameraDevice =
484 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700485#else
486 AVCaptureDevice *frontCameraDevice =
487 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
488#endif
tkchin89717aa2016-03-31 17:14:04 -0700489 if (!frontCameraDevice) {
490 RTCLogWarning(@"Failed to find front capture device.");
491 return nil;
492 }
493 NSError *error = nil;
494 AVCaptureDeviceInput *frontCameraInput =
495 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
496 error:&error];
497 if (!frontCameraInput) {
498 RTCLogError(@"Failed to create front camera input: %@",
499 error.localizedDescription);
500 return nil;
501 }
502 _frontCameraInput = frontCameraInput;
503 }
504 return _frontCameraInput;
505}
506
507- (AVCaptureDeviceInput *)backCameraInput {
508 if (!_backCameraInput) {
509 AVCaptureDevice *backCameraDevice =
510 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
511 if (!backCameraDevice) {
512 RTCLogWarning(@"Failed to find front capture device.");
513 return nil;
514 }
515 NSError *error = nil;
516 AVCaptureDeviceInput *backCameraInput =
517 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
518 error:&error];
519 if (!backCameraInput) {
520 RTCLogError(@"Failed to create front camera input: %@",
521 error.localizedDescription);
522 return nil;
523 }
524 _backCameraInput = backCameraInput;
525 }
526 return _backCameraInput;
527}
528
tkchind7629102016-07-28 14:52:55 -0700529- (void)setMinFrameDuration:(CMTime)minFrameDuration
530 forDevice:(AVCaptureDevice *)device {
531 NSError *error = nil;
532 if (![device lockForConfiguration:&error]) {
533 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
534 return;
535 }
536 device.activeVideoMinFrameDuration = minFrameDuration;
537 [device unlockForConfiguration];
538}
539
tkchin89717aa2016-03-31 17:14:04 -0700540// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800541- (void)updateOrientation {
adam.fedorfc22e032016-06-08 17:24:37 -0700542#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800543 switch ([UIDevice currentDevice].orientation) {
544 case UIDeviceOrientationPortrait:
magjed2ab012c2016-08-25 03:25:04 -0700545 _rotation = webrtc::kVideoRotation_90;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800546 break;
547 case UIDeviceOrientationPortraitUpsideDown:
magjed2ab012c2016-08-25 03:25:04 -0700548 _rotation = webrtc::kVideoRotation_270;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800549 break;
550 case UIDeviceOrientationLandscapeLeft:
magjed2ab012c2016-08-25 03:25:04 -0700551 _rotation = webrtc::kVideoRotation_180;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800552 break;
553 case UIDeviceOrientationLandscapeRight:
magjed2ab012c2016-08-25 03:25:04 -0700554 _rotation = webrtc::kVideoRotation_0;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800555 break;
556 case UIDeviceOrientationFaceUp:
557 case UIDeviceOrientationFaceDown:
558 case UIDeviceOrientationUnknown:
magjed2ab012c2016-08-25 03:25:04 -0700559 // Ignore.
560 break;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800561 }
adam.fedorfc22e032016-06-08 17:24:37 -0700562#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800563}
564
tkchin89717aa2016-03-31 17:14:04 -0700565// Update the current session input to match what's stored in _useBackCamera.
566- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
567 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
568 block:^{
569 [_captureSession beginConfiguration];
570 AVCaptureDeviceInput *oldInput = _backCameraInput;
571 AVCaptureDeviceInput *newInput = _frontCameraInput;
572 if (useBackCamera) {
573 oldInput = _frontCameraInput;
574 newInput = _backCameraInput;
575 }
576 if (oldInput) {
577 // Ok to remove this even if it's not attached. Will be no-op.
578 [_captureSession removeInput:oldInput];
579 }
580 if (newInput) {
581 [_captureSession addInput:newInput];
582 }
583 [self updateOrientation];
584 [_captureSession commitConfiguration];
kthelgason4a85abb2016-08-19 01:24:46 -0700585
586 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFormat()->interval);
587 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
tkchin89717aa2016-03-31 17:14:04 -0700588 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800589}
590
591@end
592
593namespace webrtc {
594
tkchin89717aa2016-03-31 17:14:04 -0700595enum AVFoundationVideoCapturerMessageType : uint32_t {
596 kMessageTypeFrame,
597};
598
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200599AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
kthelgason4a85abb2016-08-19 01:24:46 -0700600 // Set our supported formats. This matches kAvailablePresets.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800601 _capturer =
602 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700603
604 std::vector<cricket::VideoFormat> supported_formats;
605 int framerate = 30;
606
607#if TARGET_OS_IPHONE
608 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
609 set_enable_video_adapter(false);
610 framerate = 15;
611 }
612#endif
613
614 for (const auto preset : kAvailablePresets) {
615 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
616 const auto format = cricket::VideoFormat(
617 preset.width,
618 preset.height,
619 cricket::VideoFormat::FpsToInterval(framerate),
620 cricket::FOURCC_NV12);
621 supported_formats.push_back(format);
622 }
623 }
624
625 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800626}
627
628AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
629 _capturer = nil;
630}
631
632cricket::CaptureState AVFoundationVideoCapturer::Start(
633 const cricket::VideoFormat& format) {
634 if (!_capturer) {
635 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
636 return cricket::CaptureState::CS_FAILED;
637 }
638 if (_capturer.isRunning) {
639 LOG(LS_ERROR) << "The capturer is already running.";
640 return cricket::CaptureState::CS_FAILED;
641 }
kthelgason4a85abb2016-08-19 01:24:46 -0700642
643 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
644 RTC_DCHECK(desiredPreset);
645
646 [_capturer.captureSession beginConfiguration];
647 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
648 LOG(LS_ERROR) << "Unsupported video format.";
649 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800650 return cricket::CaptureState::CS_FAILED;
651 }
kthelgason4a85abb2016-08-19 01:24:46 -0700652 _capturer.captureSession.sessionPreset = desiredPreset;
653 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800654
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800655 SetCaptureFormat(&format);
656 // This isn't super accurate because it takes a while for the AVCaptureSession
657 // to spin up, and this call returns async.
658 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700659 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800660 SetCaptureState(cricket::CaptureState::CS_RUNNING);
661
kthelgason4a85abb2016-08-19 01:24:46 -0700662 // Adjust the framerate for all capture devices.
663 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
664 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
665 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
666
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800667 return cricket::CaptureState::CS_STARTING;
668}
669
670void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700671 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800672 SetCaptureFormat(NULL);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800673}
674
675bool AVFoundationVideoCapturer::IsRunning() {
676 return _capturer.isRunning;
677}
678
679AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
680 return _capturer.captureSession;
681}
682
hjona1cf3662016-03-14 20:55:22 -0700683bool AVFoundationVideoCapturer::CanUseBackCamera() const {
684 return _capturer.canUseBackCamera;
685}
686
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800687void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
688 _capturer.useBackCamera = useBackCamera;
689}
690
691bool AVFoundationVideoCapturer::GetUseBackCamera() const {
692 return _capturer.useBackCamera;
693}
694
695void AVFoundationVideoCapturer::CaptureSampleBuffer(
magjed2ab012c2016-08-25 03:25:04 -0700696 CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) {
697 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
698 !CMSampleBufferIsValid(sample_buffer) ||
699 !CMSampleBufferDataIsReady(sample_buffer)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800700 return;
701 }
702
magjed2ab012c2016-08-25 03:25:04 -0700703 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
tkchin89717aa2016-03-31 17:14:04 -0700704 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800705 return;
706 }
707
magjed39607c92016-07-14 08:12:17 -0700708 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
709 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
710
711 const int captured_width = buffer->width();
712 const int captured_height = buffer->height();
713
714 int adapted_width;
715 int adapted_height;
716 int crop_width;
717 int crop_height;
718 int crop_x;
719 int crop_y;
720 int64_t translated_camera_time_us;
721
722 if (!AdaptFrame(captured_width, captured_height,
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200723 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
magjed39607c92016-07-14 08:12:17 -0700724 rtc::TimeMicros(), &adapted_width, &adapted_height,
725 &crop_width, &crop_height, &crop_x, &crop_y,
726 &translated_camera_time_us)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800727 return;
728 }
729
magjed39607c92016-07-14 08:12:17 -0700730 if (adapted_width != captured_width || crop_width != captured_width ||
magjed2ab012c2016-08-25 03:25:04 -0700731 adapted_height != captured_height || crop_height != captured_height ||
732 (apply_rotation() && rotation != webrtc::kVideoRotation_0)) {
magjed39607c92016-07-14 08:12:17 -0700733 // TODO(magjed): Avoid converting to I420.
734 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
735 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
736 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
737 crop_y, crop_width, crop_height);
magjed2ab012c2016-08-25 03:25:04 -0700738 if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) {
739 buffer = scaled_buffer;
740 } else {
741 // Applying rotation is only supported for legacy reasons and performance
742 // is not critical here.
nisseedebf452016-09-15 07:20:40 -0700743 rtc::scoped_refptr<webrtc::I420Buffer> rotated_buffer(
744 (rotation == webrtc::kVideoRotation_180)
745 ? I420Buffer::Create(adapted_width, adapted_height)
746 : I420Buffer::Create(adapted_height, adapted_width));
747 libyuv::I420Rotate(
748 scaled_buffer->DataY(), scaled_buffer->StrideY(),
749 scaled_buffer->DataU(), scaled_buffer->StrideU(),
750 scaled_buffer->DataV(), scaled_buffer->StrideV(),
751 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
752 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
753 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
754 crop_width, crop_height,
755 static_cast<libyuv::RotationMode>(rotation));
756 buffer = rotated_buffer;
magjed2ab012c2016-08-25 03:25:04 -0700757 }
magjed39607c92016-07-14 08:12:17 -0700758 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800759
magjed2ab012c2016-08-25 03:25:04 -0700760 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700761 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700762 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800763}
764
765} // namespace webrtc