blob: c71c9fdbd3d14eca8e3f1f60334a52aa9c66be85 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
magjed2ab012c2016-08-25 03:25:04 -070025#include "libyuv/rotate.h"
26
tkchin9eeb6242016-04-27 01:54:20 -070027#include "webrtc/base/bind.h"
28#include "webrtc/base/checks.h"
29#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070030#include "webrtc/common_video/include/corevideo_frame_buffer.h"
magjed2ab012c2016-08-25 03:25:04 -070031#include "webrtc/common_video/rotation.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080032
kthelgason4a85abb2016-08-19 01:24:46 -070033struct AVCaptureSessionPresetResolution {
34 NSString *sessionPreset;
35 int width;
36 int height;
37};
38
39#if TARGET_OS_IPHONE
40static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
41 { AVCaptureSessionPreset352x288, 352, 288},
42 { AVCaptureSessionPreset640x480, 640, 480},
43 { AVCaptureSessionPreset1280x720, 1280, 720},
44 { AVCaptureSessionPreset1920x1080, 1920, 1080},
45};
46#else // macOS
47static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
48 { AVCaptureSessionPreset320x240, 320, 240},
49 { AVCaptureSessionPreset352x288, 352, 288},
50 { AVCaptureSessionPreset640x480, 640, 480},
51 { AVCaptureSessionPreset960x540, 960, 540},
52 { AVCaptureSessionPreset1280x720, 1280, 720},
53};
54#endif
55
56// Mapping from cricket::VideoFormat to AVCaptureSession presets.
57static NSString *GetSessionPresetForVideoFormat(
58 const cricket::VideoFormat& format) {
59 for (const auto preset : kAvailablePresets) {
60 // Check both orientations
61 if ((format.width == preset.width && format.height == preset.height) ||
62 (format.width == preset.height && format.height == preset.width)) {
63 return preset.sessionPreset;
64 }
65 }
66 // If no matching preset is found, use a default one.
67 return AVCaptureSessionPreset640x480;
68}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080069
70// This class used to capture frames using AVFoundation APIs on iOS. It is meant
71// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
72// because other webrtc objects own cricket::VideoCapturer, which is not
73// ref counted. To prevent bad behavior we do not expose this class directly.
74@interface RTCAVFoundationVideoCapturerInternal : NSObject
75 <AVCaptureVideoDataOutputSampleBufferDelegate>
76
77@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070078@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070079@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080080@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070081@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
82@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080083
84// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
85// when we receive frames. This is safe because this object should be owned by
86// it.
87- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -070088- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -070089
90// Starts and stops the capture session asynchronously. We cannot do this
91// synchronously without blocking a WebRTC thread.
92- (void)start;
93- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080094
95@end
96
97@implementation RTCAVFoundationVideoCapturerInternal {
98 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070099 AVCaptureDeviceInput *_frontCameraInput;
100 AVCaptureDeviceInput *_backCameraInput;
101 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800102 // The cricket::VideoCapturer that owns this class. Should never be NULL.
103 webrtc::AVFoundationVideoCapturer *_capturer;
magjed2ab012c2016-08-25 03:25:04 -0700104 webrtc::VideoRotation _rotation;
Zeke Chin52516802016-06-03 11:59:22 -0700105 BOOL _hasRetriedOnFatalError;
106 BOOL _isRunning;
107 BOOL _hasStarted;
108 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800109}
110
111@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700112@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700113@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -0700114@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800115
tkchin89717aa2016-03-31 17:14:04 -0700116// This is called from the thread that creates the video source, which is likely
117// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800118- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700119 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800120 if (self = [super init]) {
121 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700122 // Create the capture session and all relevant inputs and outputs. We need
123 // to do this in init because the application may want the capture session
124 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
125 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800126 if (![self setupCaptureSession]) {
127 return nil;
128 }
129 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700130#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800131 [center addObserver:self
132 selector:@selector(deviceOrientationDidChange:)
133 name:UIDeviceOrientationDidChangeNotification
134 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700135 [center addObserver:self
136 selector:@selector(handleCaptureSessionInterruption:)
137 name:AVCaptureSessionWasInterruptedNotification
138 object:_captureSession];
139 [center addObserver:self
140 selector:@selector(handleCaptureSessionInterruptionEnded:)
141 name:AVCaptureSessionInterruptionEndedNotification
142 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700143 [center addObserver:self
144 selector:@selector(handleApplicationDidBecomeActive:)
145 name:UIApplicationDidBecomeActiveNotification
146 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700147#endif
Zeke Chin52516802016-06-03 11:59:22 -0700148 [center addObserver:self
149 selector:@selector(handleCaptureSessionRuntimeError:)
150 name:AVCaptureSessionRuntimeErrorNotification
151 object:_captureSession];
152 [center addObserver:self
153 selector:@selector(handleCaptureSessionDidStartRunning:)
154 name:AVCaptureSessionDidStartRunningNotification
155 object:_captureSession];
156 [center addObserver:self
157 selector:@selector(handleCaptureSessionDidStopRunning:)
158 name:AVCaptureSessionDidStopRunningNotification
159 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800160 }
161 return self;
162}
163
164- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700165 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800166 [[NSNotificationCenter defaultCenter] removeObserver:self];
167 _capturer = nullptr;
168}
169
tkchin89717aa2016-03-31 17:14:04 -0700170- (AVCaptureSession *)captureSession {
171 return _captureSession;
172}
173
kthelgason4a85abb2016-08-19 01:24:46 -0700174- (AVCaptureDevice *)getActiveCaptureDevice {
175 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
176}
177
Zeke Chin52516802016-06-03 11:59:22 -0700178- (dispatch_queue_t)frameQueue {
179 if (!_frameQueue) {
180 _frameQueue =
181 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
182 DISPATCH_QUEUE_SERIAL);
183 dispatch_set_target_queue(
184 _frameQueue,
185 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
186 }
187 return _frameQueue;
188}
189
tkchin89717aa2016-03-31 17:14:04 -0700190// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700191- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700192 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700193}
194
tkchin89717aa2016-03-31 17:14:04 -0700195// Called from any thread (likely main thread).
196- (BOOL)useBackCamera {
197 @synchronized(self) {
198 return _useBackCamera;
199 }
200}
201
202// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800203- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700204 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700205 if (useBackCamera) {
206 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
207 "not switching.");
208 }
hjona1cf3662016-03-14 20:55:22 -0700209 return;
210 }
tkchin89717aa2016-03-31 17:14:04 -0700211 @synchronized(self) {
212 if (_useBackCamera == useBackCamera) {
213 return;
214 }
215 _useBackCamera = useBackCamera;
216 [self updateSessionInputForUseBackCamera:useBackCamera];
217 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800218}
219
Zeke Chin52516802016-06-03 11:59:22 -0700220- (BOOL)isRunning {
221 rtc::CritScope cs(&_crit);
222 return _isRunning;
223}
224
225- (void)setIsRunning:(BOOL)isRunning {
226 rtc::CritScope cs(&_crit);
227 _isRunning = isRunning;
228}
229
tkchin89717aa2016-03-31 17:14:04 -0700230// Called from WebRTC thread.
231- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700232 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800233 return;
234 }
Zeke Chin52516802016-06-03 11:59:22 -0700235 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800236 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
237 block:^{
magjed2ab012c2016-08-25 03:25:04 -0700238#if TARGET_OS_IPHONE
239 // Default to portrait orientation on iPhone. This will be reset in
240 // updateOrientation unless orientation is unknown/faceup/facedown.
241 _rotation = webrtc::kVideoRotation_90;
242#else
243 // No rotation on Mac.
244 _rotation = webrtc::kVideoRotation_0;
245#endif
tkchin89717aa2016-03-31 17:14:04 -0700246 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700247#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700248 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700249#endif
tkchin89717aa2016-03-31 17:14:04 -0700250 AVCaptureSession *captureSession = self.captureSession;
251 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800252 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800253}
254
tkchin89717aa2016-03-31 17:14:04 -0700255// Called from same thread as start.
256- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700257 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800258 return;
259 }
Zeke Chin52516802016-06-03 11:59:22 -0700260 self.hasStarted = NO;
261 // Due to this async block, it's possible that the ObjC object outlives the
262 // C++ one. In order to not invoke functions on the C++ object, we set
263 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800264 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
265 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700266 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
267 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700268#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700269 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700270#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800271 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800272}
273
Zeke Chin52516802016-06-03 11:59:22 -0700274#pragma mark iOS notifications
275
adam.fedorfc22e032016-06-08 17:24:37 -0700276#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700277- (void)deviceOrientationDidChange:(NSNotification *)notification {
278 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
279 block:^{
Zeke Chin52516802016-06-03 11:59:22 -0700280 [self updateOrientation];
281 }];
282}
adam.fedorfc22e032016-06-08 17:24:37 -0700283#endif
Zeke Chin52516802016-06-03 11:59:22 -0700284
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800285#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
286
287- (void)captureOutput:(AVCaptureOutput *)captureOutput
288 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
289 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700290 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700291 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800292 return;
293 }
magjed2ab012c2016-08-25 03:25:04 -0700294 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800295}
296
297- (void)captureOutput:(AVCaptureOutput *)captureOutput
298 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700299 fromConnection:(AVCaptureConnection *)connection {
300 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800301}
302
Zeke Chin52516802016-06-03 11:59:22 -0700303#pragma mark - AVCaptureSession notifications
304
305- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
306 NSString *reasonString = nil;
kwiberg77eab702016-09-28 17:42:01 -0700307#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
308 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
Zeke Chin52516802016-06-03 11:59:22 -0700309 NSNumber *reason =
310 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
311 if (reason) {
312 switch (reason.intValue) {
313 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
314 reasonString = @"VideoDeviceNotAvailableInBackground";
315 break;
316 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
317 reasonString = @"AudioDeviceInUseByAnotherClient";
318 break;
319 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
320 reasonString = @"VideoDeviceInUseByAnotherClient";
321 break;
322 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
323 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
324 break;
325 }
326 }
327#endif
328 RTCLog(@"Capture session interrupted: %@", reasonString);
329 // TODO(tkchin): Handle this case.
330}
331
332- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
333 RTCLog(@"Capture session interruption ended.");
334 // TODO(tkchin): Handle this case.
335}
336
337- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700338 NSError *error =
339 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700340 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700341
342 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
343 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700344#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700345 if (error.code == AVErrorMediaServicesWereReset) {
346 [self handleNonFatalError];
347 } else {
348 [self handleFatalError];
349 }
adam.fedorfc22e032016-06-08 17:24:37 -0700350#else
351 [self handleFatalError];
352#endif
Zeke Chin52516802016-06-03 11:59:22 -0700353 }];
354}
355
356- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
357 RTCLog(@"Capture session started.");
358 self.isRunning = YES;
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
360 block:^{
361 // If we successfully restarted after an unknown error, allow future
362 // retries on fatal errors.
363 _hasRetriedOnFatalError = NO;
364 }];
365}
366
367- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
368 RTCLog(@"Capture session stopped.");
369 self.isRunning = NO;
370}
371
372- (void)handleFatalError {
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
374 block:^{
375 if (!_hasRetriedOnFatalError) {
376 RTCLogWarning(@"Attempting to recover from fatal capture error.");
377 [self handleNonFatalError];
378 _hasRetriedOnFatalError = YES;
379 } else {
380 RTCLogError(@"Previous fatal error recovery failed.");
381 }
382 }];
383}
384
385- (void)handleNonFatalError {
386 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
387 block:^{
388 if (self.hasStarted) {
389 RTCLog(@"Restarting capture session after error.");
390 [self.captureSession startRunning];
391 }
392 }];
393}
394
tkchin11840252016-08-24 12:05:56 -0700395#if TARGET_OS_IPHONE
396
397#pragma mark - UIApplication notifications
398
399- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
400 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
401 block:^{
402 if (self.hasStarted && !self.captureSession.isRunning) {
403 RTCLog(@"Restarting capture session on active.");
404 [self.captureSession startRunning];
405 }
406 }];
407}
408
409#endif // TARGET_OS_IPHONE
410
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800411#pragma mark - Private
412
413- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700414 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Kári Tristan Helgason15e4ec32016-09-30 08:56:33 +0200415#if defined(WEBRTC_IOS)
416 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800417#endif
tkchin89717aa2016-03-31 17:14:04 -0700418 // Add the output.
419 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
420 if (![captureSession canAddOutput:videoDataOutput]) {
421 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800422 return NO;
423 }
tkchin89717aa2016-03-31 17:14:04 -0700424 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800425
tkchin89717aa2016-03-31 17:14:04 -0700426 // Get the front and back cameras. If there isn't a front camera
427 // give up.
428 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
429 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
430 if (!frontCameraInput) {
431 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800432 return NO;
433 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800434
435 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700436 if (![captureSession canAddInput:frontCameraInput] ||
437 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
438 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800439 return NO;
440 }
tkchin89717aa2016-03-31 17:14:04 -0700441 AVCaptureDeviceInput *input = self.useBackCamera ?
442 backCameraInput : frontCameraInput;
443 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700444
tkchin89717aa2016-03-31 17:14:04 -0700445 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800446 return YES;
447}
448
tkchin89717aa2016-03-31 17:14:04 -0700449- (AVCaptureVideoDataOutput *)videoDataOutput {
450 if (!_videoDataOutput) {
451 // Make the capturer output NV12. Ideally we want I420 but that's not
452 // currently supported on iPhone / iPad.
453 AVCaptureVideoDataOutput *videoDataOutput =
454 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700455 videoDataOutput.videoSettings = @{
456 (NSString *)kCVPixelBufferPixelFormatTypeKey :
457 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
458 };
459 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700460 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700461 _videoDataOutput = videoDataOutput;
462 }
463 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800464}
465
tkchin89717aa2016-03-31 17:14:04 -0700466- (AVCaptureDevice *)videoCaptureDeviceForPosition:
467 (AVCaptureDevicePosition)position {
468 for (AVCaptureDevice *captureDevice in
469 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
470 if (captureDevice.position == position) {
471 return captureDevice;
472 }
473 }
474 return nil;
475}
476
477- (AVCaptureDeviceInput *)frontCameraInput {
478 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700479#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700480 AVCaptureDevice *frontCameraDevice =
481 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700482#else
483 AVCaptureDevice *frontCameraDevice =
484 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
485#endif
tkchin89717aa2016-03-31 17:14:04 -0700486 if (!frontCameraDevice) {
487 RTCLogWarning(@"Failed to find front capture device.");
488 return nil;
489 }
490 NSError *error = nil;
491 AVCaptureDeviceInput *frontCameraInput =
492 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
493 error:&error];
494 if (!frontCameraInput) {
495 RTCLogError(@"Failed to create front camera input: %@",
496 error.localizedDescription);
497 return nil;
498 }
499 _frontCameraInput = frontCameraInput;
500 }
501 return _frontCameraInput;
502}
503
504- (AVCaptureDeviceInput *)backCameraInput {
505 if (!_backCameraInput) {
506 AVCaptureDevice *backCameraDevice =
507 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
508 if (!backCameraDevice) {
509 RTCLogWarning(@"Failed to find front capture device.");
510 return nil;
511 }
512 NSError *error = nil;
513 AVCaptureDeviceInput *backCameraInput =
514 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
515 error:&error];
516 if (!backCameraInput) {
517 RTCLogError(@"Failed to create front camera input: %@",
518 error.localizedDescription);
519 return nil;
520 }
521 _backCameraInput = backCameraInput;
522 }
523 return _backCameraInput;
524}
525
tkchind7629102016-07-28 14:52:55 -0700526- (void)setMinFrameDuration:(CMTime)minFrameDuration
527 forDevice:(AVCaptureDevice *)device {
528 NSError *error = nil;
529 if (![device lockForConfiguration:&error]) {
530 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
531 return;
532 }
533 device.activeVideoMinFrameDuration = minFrameDuration;
534 [device unlockForConfiguration];
535}
536
tkchin89717aa2016-03-31 17:14:04 -0700537// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800538- (void)updateOrientation {
adam.fedorfc22e032016-06-08 17:24:37 -0700539#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800540 switch ([UIDevice currentDevice].orientation) {
541 case UIDeviceOrientationPortrait:
magjed2ab012c2016-08-25 03:25:04 -0700542 _rotation = webrtc::kVideoRotation_90;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800543 break;
544 case UIDeviceOrientationPortraitUpsideDown:
magjed2ab012c2016-08-25 03:25:04 -0700545 _rotation = webrtc::kVideoRotation_270;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800546 break;
547 case UIDeviceOrientationLandscapeLeft:
magjed2ab012c2016-08-25 03:25:04 -0700548 _rotation = webrtc::kVideoRotation_180;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800549 break;
550 case UIDeviceOrientationLandscapeRight:
magjed2ab012c2016-08-25 03:25:04 -0700551 _rotation = webrtc::kVideoRotation_0;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800552 break;
553 case UIDeviceOrientationFaceUp:
554 case UIDeviceOrientationFaceDown:
555 case UIDeviceOrientationUnknown:
magjed2ab012c2016-08-25 03:25:04 -0700556 // Ignore.
557 break;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800558 }
adam.fedorfc22e032016-06-08 17:24:37 -0700559#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800560}
561
tkchin89717aa2016-03-31 17:14:04 -0700562// Update the current session input to match what's stored in _useBackCamera.
563- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
564 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
565 block:^{
566 [_captureSession beginConfiguration];
567 AVCaptureDeviceInput *oldInput = _backCameraInput;
568 AVCaptureDeviceInput *newInput = _frontCameraInput;
569 if (useBackCamera) {
570 oldInput = _frontCameraInput;
571 newInput = _backCameraInput;
572 }
573 if (oldInput) {
574 // Ok to remove this even if it's not attached. Will be no-op.
575 [_captureSession removeInput:oldInput];
576 }
577 if (newInput) {
578 [_captureSession addInput:newInput];
579 }
580 [self updateOrientation];
581 [_captureSession commitConfiguration];
kthelgason4a85abb2016-08-19 01:24:46 -0700582
583 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFormat()->interval);
584 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
tkchin89717aa2016-03-31 17:14:04 -0700585 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800586}
587
588@end
589
590namespace webrtc {
591
tkchin89717aa2016-03-31 17:14:04 -0700592enum AVFoundationVideoCapturerMessageType : uint32_t {
593 kMessageTypeFrame,
594};
595
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200596AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
kthelgason4a85abb2016-08-19 01:24:46 -0700597 // Set our supported formats. This matches kAvailablePresets.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800598 _capturer =
599 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700600
601 std::vector<cricket::VideoFormat> supported_formats;
602 int framerate = 30;
603
604#if TARGET_OS_IPHONE
605 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
606 set_enable_video_adapter(false);
607 framerate = 15;
608 }
609#endif
610
611 for (const auto preset : kAvailablePresets) {
612 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
613 const auto format = cricket::VideoFormat(
614 preset.width,
615 preset.height,
616 cricket::VideoFormat::FpsToInterval(framerate),
617 cricket::FOURCC_NV12);
618 supported_formats.push_back(format);
619 }
620 }
621
622 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800623}
624
625AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
626 _capturer = nil;
627}
628
629cricket::CaptureState AVFoundationVideoCapturer::Start(
630 const cricket::VideoFormat& format) {
631 if (!_capturer) {
632 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
633 return cricket::CaptureState::CS_FAILED;
634 }
635 if (_capturer.isRunning) {
636 LOG(LS_ERROR) << "The capturer is already running.";
637 return cricket::CaptureState::CS_FAILED;
638 }
kthelgason4a85abb2016-08-19 01:24:46 -0700639
640 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
641 RTC_DCHECK(desiredPreset);
642
643 [_capturer.captureSession beginConfiguration];
644 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
645 LOG(LS_ERROR) << "Unsupported video format.";
646 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800647 return cricket::CaptureState::CS_FAILED;
648 }
kthelgason4a85abb2016-08-19 01:24:46 -0700649 _capturer.captureSession.sessionPreset = desiredPreset;
650 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800651
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800652 SetCaptureFormat(&format);
653 // This isn't super accurate because it takes a while for the AVCaptureSession
654 // to spin up, and this call returns async.
655 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700656 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800657 SetCaptureState(cricket::CaptureState::CS_RUNNING);
658
kthelgason4a85abb2016-08-19 01:24:46 -0700659 // Adjust the framerate for all capture devices.
660 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
661 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
662 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
663
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800664 return cricket::CaptureState::CS_STARTING;
665}
666
667void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700668 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800669 SetCaptureFormat(NULL);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800670}
671
672bool AVFoundationVideoCapturer::IsRunning() {
673 return _capturer.isRunning;
674}
675
676AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
677 return _capturer.captureSession;
678}
679
hjona1cf3662016-03-14 20:55:22 -0700680bool AVFoundationVideoCapturer::CanUseBackCamera() const {
681 return _capturer.canUseBackCamera;
682}
683
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800684void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
685 _capturer.useBackCamera = useBackCamera;
686}
687
688bool AVFoundationVideoCapturer::GetUseBackCamera() const {
689 return _capturer.useBackCamera;
690}
691
692void AVFoundationVideoCapturer::CaptureSampleBuffer(
magjed2ab012c2016-08-25 03:25:04 -0700693 CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) {
694 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
695 !CMSampleBufferIsValid(sample_buffer) ||
696 !CMSampleBufferDataIsReady(sample_buffer)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800697 return;
698 }
699
magjed2ab012c2016-08-25 03:25:04 -0700700 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
tkchin89717aa2016-03-31 17:14:04 -0700701 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800702 return;
703 }
704
magjed39607c92016-07-14 08:12:17 -0700705 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
706 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
707
708 const int captured_width = buffer->width();
709 const int captured_height = buffer->height();
710
711 int adapted_width;
712 int adapted_height;
713 int crop_width;
714 int crop_height;
715 int crop_x;
716 int crop_y;
717 int64_t translated_camera_time_us;
718
719 if (!AdaptFrame(captured_width, captured_height,
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200720 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
magjed39607c92016-07-14 08:12:17 -0700721 rtc::TimeMicros(), &adapted_width, &adapted_height,
722 &crop_width, &crop_height, &crop_x, &crop_y,
723 &translated_camera_time_us)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800724 return;
725 }
726
magjed39607c92016-07-14 08:12:17 -0700727 if (adapted_width != captured_width || crop_width != captured_width ||
magjed2ab012c2016-08-25 03:25:04 -0700728 adapted_height != captured_height || crop_height != captured_height ||
729 (apply_rotation() && rotation != webrtc::kVideoRotation_0)) {
magjed39607c92016-07-14 08:12:17 -0700730 // TODO(magjed): Avoid converting to I420.
731 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
732 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
733 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
734 crop_y, crop_width, crop_height);
magjed2ab012c2016-08-25 03:25:04 -0700735 if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) {
736 buffer = scaled_buffer;
737 } else {
738 // Applying rotation is only supported for legacy reasons and performance
739 // is not critical here.
nisseedebf452016-09-15 07:20:40 -0700740 rtc::scoped_refptr<webrtc::I420Buffer> rotated_buffer(
741 (rotation == webrtc::kVideoRotation_180)
742 ? I420Buffer::Create(adapted_width, adapted_height)
743 : I420Buffer::Create(adapted_height, adapted_width));
744 libyuv::I420Rotate(
745 scaled_buffer->DataY(), scaled_buffer->StrideY(),
746 scaled_buffer->DataU(), scaled_buffer->StrideU(),
747 scaled_buffer->DataV(), scaled_buffer->StrideV(),
748 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
749 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
750 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
751 crop_width, crop_height,
752 static_cast<libyuv::RotationMode>(rotation));
753 buffer = rotated_buffer;
magjed2ab012c2016-08-25 03:25:04 -0700754 }
magjed39607c92016-07-14 08:12:17 -0700755 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800756
magjed2ab012c2016-08-25 03:25:04 -0700757 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700758 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700759 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800760}
761
762} // namespace webrtc