blob: 052cdf8057eddcb1b956871600f8a9b7d035ff3a [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
magjed2ab012c2016-08-25 03:25:04 -070025#include "libyuv/rotate.h"
26
tkchin9eeb6242016-04-27 01:54:20 -070027#include "webrtc/base/bind.h"
28#include "webrtc/base/checks.h"
29#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070030#include "webrtc/common_video/include/corevideo_frame_buffer.h"
magjed2ab012c2016-08-25 03:25:04 -070031#include "webrtc/common_video/rotation.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080032
kthelgason4a85abb2016-08-19 01:24:46 -070033struct AVCaptureSessionPresetResolution {
34 NSString *sessionPreset;
35 int width;
36 int height;
37};
38
39#if TARGET_OS_IPHONE
40static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
41 { AVCaptureSessionPreset352x288, 352, 288},
42 { AVCaptureSessionPreset640x480, 640, 480},
43 { AVCaptureSessionPreset1280x720, 1280, 720},
44 { AVCaptureSessionPreset1920x1080, 1920, 1080},
45};
46#else // macOS
47static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
48 { AVCaptureSessionPreset320x240, 320, 240},
49 { AVCaptureSessionPreset352x288, 352, 288},
50 { AVCaptureSessionPreset640x480, 640, 480},
51 { AVCaptureSessionPreset960x540, 960, 540},
52 { AVCaptureSessionPreset1280x720, 1280, 720},
53};
54#endif
55
56// Mapping from cricket::VideoFormat to AVCaptureSession presets.
57static NSString *GetSessionPresetForVideoFormat(
58 const cricket::VideoFormat& format) {
59 for (const auto preset : kAvailablePresets) {
60 // Check both orientations
61 if ((format.width == preset.width && format.height == preset.height) ||
62 (format.width == preset.height && format.height == preset.width)) {
63 return preset.sessionPreset;
64 }
65 }
66 // If no matching preset is found, use a default one.
67 return AVCaptureSessionPreset640x480;
68}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080069
70// This class used to capture frames using AVFoundation APIs on iOS. It is meant
71// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
72// because other webrtc objects own cricket::VideoCapturer, which is not
73// ref counted. To prevent bad behavior we do not expose this class directly.
74@interface RTCAVFoundationVideoCapturerInternal : NSObject
75 <AVCaptureVideoDataOutputSampleBufferDelegate>
76
77@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070078@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070079@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080080@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
denicijaedbae5e2016-09-30 00:21:11 -070081@property(atomic, assign) BOOL isRunning; // Whether the capture session is running.
Zeke Chin52516802016-06-03 11:59:22 -070082@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080083
84// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
85// when we receive frames. This is safe because this object should be owned by
86// it.
87- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -070088- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -070089
90// Starts and stops the capture session asynchronously. We cannot do this
91// synchronously without blocking a WebRTC thread.
92- (void)start;
93- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080094
95@end
96
97@implementation RTCAVFoundationVideoCapturerInternal {
98 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070099 AVCaptureDeviceInput *_frontCameraInput;
100 AVCaptureDeviceInput *_backCameraInput;
101 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800102 // The cricket::VideoCapturer that owns this class. Should never be NULL.
103 webrtc::AVFoundationVideoCapturer *_capturer;
magjed2ab012c2016-08-25 03:25:04 -0700104 webrtc::VideoRotation _rotation;
Zeke Chin52516802016-06-03 11:59:22 -0700105 BOOL _hasRetriedOnFatalError;
106 BOOL _isRunning;
107 BOOL _hasStarted;
108 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800109}
110
111@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700112@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700113@synthesize useBackCamera = _useBackCamera;
denicijaedbae5e2016-09-30 00:21:11 -0700114
115@synthesize isRunning = _isRunning;
Zeke Chin52516802016-06-03 11:59:22 -0700116@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800117
tkchin89717aa2016-03-31 17:14:04 -0700118// This is called from the thread that creates the video source, which is likely
119// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800120- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700121 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800122 if (self = [super init]) {
123 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700124 // Create the capture session and all relevant inputs and outputs. We need
125 // to do this in init because the application may want the capture session
126 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
127 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800128 if (![self setupCaptureSession]) {
129 return nil;
130 }
131 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700132#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800133 [center addObserver:self
134 selector:@selector(deviceOrientationDidChange:)
135 name:UIDeviceOrientationDidChangeNotification
136 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700137 [center addObserver:self
138 selector:@selector(handleCaptureSessionInterruption:)
139 name:AVCaptureSessionWasInterruptedNotification
140 object:_captureSession];
141 [center addObserver:self
142 selector:@selector(handleCaptureSessionInterruptionEnded:)
143 name:AVCaptureSessionInterruptionEndedNotification
144 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700145 [center addObserver:self
146 selector:@selector(handleApplicationDidBecomeActive:)
147 name:UIApplicationDidBecomeActiveNotification
148 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700149#endif
Zeke Chin52516802016-06-03 11:59:22 -0700150 [center addObserver:self
151 selector:@selector(handleCaptureSessionRuntimeError:)
152 name:AVCaptureSessionRuntimeErrorNotification
153 object:_captureSession];
154 [center addObserver:self
155 selector:@selector(handleCaptureSessionDidStartRunning:)
156 name:AVCaptureSessionDidStartRunningNotification
157 object:_captureSession];
158 [center addObserver:self
159 selector:@selector(handleCaptureSessionDidStopRunning:)
160 name:AVCaptureSessionDidStopRunningNotification
161 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800162 }
163 return self;
164}
165
166- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700167 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800168 [[NSNotificationCenter defaultCenter] removeObserver:self];
169 _capturer = nullptr;
170}
171
tkchin89717aa2016-03-31 17:14:04 -0700172- (AVCaptureSession *)captureSession {
173 return _captureSession;
174}
175
kthelgason4a85abb2016-08-19 01:24:46 -0700176- (AVCaptureDevice *)getActiveCaptureDevice {
177 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
178}
179
Zeke Chin52516802016-06-03 11:59:22 -0700180- (dispatch_queue_t)frameQueue {
181 if (!_frameQueue) {
182 _frameQueue =
183 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
184 DISPATCH_QUEUE_SERIAL);
185 dispatch_set_target_queue(
186 _frameQueue,
187 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
188 }
189 return _frameQueue;
190}
191
tkchin89717aa2016-03-31 17:14:04 -0700192// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700193- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700194 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700195}
196
tkchin89717aa2016-03-31 17:14:04 -0700197// Called from any thread (likely main thread).
198- (BOOL)useBackCamera {
199 @synchronized(self) {
200 return _useBackCamera;
201 }
202}
203
204// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800205- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700206 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700207 if (useBackCamera) {
208 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
209 "not switching.");
210 }
hjona1cf3662016-03-14 20:55:22 -0700211 return;
212 }
tkchin89717aa2016-03-31 17:14:04 -0700213 @synchronized(self) {
214 if (_useBackCamera == useBackCamera) {
215 return;
216 }
217 _useBackCamera = useBackCamera;
218 [self updateSessionInputForUseBackCamera:useBackCamera];
219 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800220}
221
tkchin89717aa2016-03-31 17:14:04 -0700222// Called from WebRTC thread.
223- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700224 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800225 return;
226 }
Zeke Chin52516802016-06-03 11:59:22 -0700227 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800228 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
229 block:^{
magjed2ab012c2016-08-25 03:25:04 -0700230#if TARGET_OS_IPHONE
231 // Default to portrait orientation on iPhone. This will be reset in
232 // updateOrientation unless orientation is unknown/faceup/facedown.
233 _rotation = webrtc::kVideoRotation_90;
234#else
235 // No rotation on Mac.
236 _rotation = webrtc::kVideoRotation_0;
237#endif
tkchin89717aa2016-03-31 17:14:04 -0700238 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700239#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700240 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700241#endif
tkchin89717aa2016-03-31 17:14:04 -0700242 AVCaptureSession *captureSession = self.captureSession;
243 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800244 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800245}
246
tkchin89717aa2016-03-31 17:14:04 -0700247// Called from same thread as start.
248- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700249 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800250 return;
251 }
Zeke Chin52516802016-06-03 11:59:22 -0700252 self.hasStarted = NO;
253 // Due to this async block, it's possible that the ObjC object outlives the
254 // C++ one. In order to not invoke functions on the C++ object, we set
255 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800256 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
257 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700258 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
259 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700260#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700261 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700262#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800263 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800264}
265
Zeke Chin52516802016-06-03 11:59:22 -0700266#pragma mark iOS notifications
267
adam.fedorfc22e032016-06-08 17:24:37 -0700268#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700269- (void)deviceOrientationDidChange:(NSNotification *)notification {
270 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
271 block:^{
Zeke Chin52516802016-06-03 11:59:22 -0700272 [self updateOrientation];
273 }];
274}
adam.fedorfc22e032016-06-08 17:24:37 -0700275#endif
Zeke Chin52516802016-06-03 11:59:22 -0700276
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800277#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
278
279- (void)captureOutput:(AVCaptureOutput *)captureOutput
280 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
281 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700282 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700283 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800284 return;
285 }
magjed2ab012c2016-08-25 03:25:04 -0700286 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800287}
288
289- (void)captureOutput:(AVCaptureOutput *)captureOutput
290 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700291 fromConnection:(AVCaptureConnection *)connection {
292 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800293}
294
Zeke Chin52516802016-06-03 11:59:22 -0700295#pragma mark - AVCaptureSession notifications
296
297- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
298 NSString *reasonString = nil;
kwiberg77eab702016-09-28 17:42:01 -0700299#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
300 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
Zeke Chin52516802016-06-03 11:59:22 -0700301 NSNumber *reason =
302 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
303 if (reason) {
304 switch (reason.intValue) {
305 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
306 reasonString = @"VideoDeviceNotAvailableInBackground";
307 break;
308 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
309 reasonString = @"AudioDeviceInUseByAnotherClient";
310 break;
311 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
312 reasonString = @"VideoDeviceInUseByAnotherClient";
313 break;
314 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
315 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
316 break;
317 }
318 }
319#endif
320 RTCLog(@"Capture session interrupted: %@", reasonString);
321 // TODO(tkchin): Handle this case.
322}
323
324- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
325 RTCLog(@"Capture session interruption ended.");
326 // TODO(tkchin): Handle this case.
327}
328
329- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700330 NSError *error =
331 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700332 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700333
334 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
335 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700336#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700337 if (error.code == AVErrorMediaServicesWereReset) {
338 [self handleNonFatalError];
339 } else {
340 [self handleFatalError];
341 }
adam.fedorfc22e032016-06-08 17:24:37 -0700342#else
343 [self handleFatalError];
344#endif
Zeke Chin52516802016-06-03 11:59:22 -0700345 }];
346}
347
348- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
349 RTCLog(@"Capture session started.");
denicijaedbae5e2016-09-30 00:21:11 -0700350
Zeke Chin52516802016-06-03 11:59:22 -0700351 self.isRunning = YES;
352 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
353 block:^{
354 // If we successfully restarted after an unknown error, allow future
355 // retries on fatal errors.
356 _hasRetriedOnFatalError = NO;
357 }];
358}
359
360- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
361 RTCLog(@"Capture session stopped.");
362 self.isRunning = NO;
363}
364
365- (void)handleFatalError {
366 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
367 block:^{
368 if (!_hasRetriedOnFatalError) {
369 RTCLogWarning(@"Attempting to recover from fatal capture error.");
370 [self handleNonFatalError];
371 _hasRetriedOnFatalError = YES;
372 } else {
373 RTCLogError(@"Previous fatal error recovery failed.");
374 }
375 }];
376}
377
378- (void)handleNonFatalError {
379 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
380 block:^{
381 if (self.hasStarted) {
382 RTCLog(@"Restarting capture session after error.");
383 [self.captureSession startRunning];
384 }
385 }];
386}
387
tkchin11840252016-08-24 12:05:56 -0700388#if TARGET_OS_IPHONE
389
390#pragma mark - UIApplication notifications
391
392- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
393 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
394 block:^{
395 if (self.hasStarted && !self.captureSession.isRunning) {
396 RTCLog(@"Restarting capture session on active.");
397 [self.captureSession startRunning];
398 }
399 }];
400}
401
402#endif // TARGET_OS_IPHONE
403
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800404#pragma mark - Private
405
406- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700407 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Kári Tristan Helgason15e4ec32016-09-30 08:56:33 +0200408#if defined(WEBRTC_IOS)
409 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800410#endif
tkchin89717aa2016-03-31 17:14:04 -0700411 // Add the output.
412 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
413 if (![captureSession canAddOutput:videoDataOutput]) {
414 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800415 return NO;
416 }
tkchin89717aa2016-03-31 17:14:04 -0700417 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800418
tkchin89717aa2016-03-31 17:14:04 -0700419 // Get the front and back cameras. If there isn't a front camera
420 // give up.
421 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
422 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
423 if (!frontCameraInput) {
424 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800425 return NO;
426 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800427
428 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700429 if (![captureSession canAddInput:frontCameraInput] ||
430 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
431 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800432 return NO;
433 }
tkchin89717aa2016-03-31 17:14:04 -0700434 AVCaptureDeviceInput *input = self.useBackCamera ?
435 backCameraInput : frontCameraInput;
436 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700437
tkchin89717aa2016-03-31 17:14:04 -0700438 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800439 return YES;
440}
441
tkchin89717aa2016-03-31 17:14:04 -0700442- (AVCaptureVideoDataOutput *)videoDataOutput {
443 if (!_videoDataOutput) {
444 // Make the capturer output NV12. Ideally we want I420 but that's not
445 // currently supported on iPhone / iPad.
446 AVCaptureVideoDataOutput *videoDataOutput =
447 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700448 videoDataOutput.videoSettings = @{
449 (NSString *)kCVPixelBufferPixelFormatTypeKey :
450 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
451 };
452 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700453 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700454 _videoDataOutput = videoDataOutput;
455 }
456 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800457}
458
tkchin89717aa2016-03-31 17:14:04 -0700459- (AVCaptureDevice *)videoCaptureDeviceForPosition:
460 (AVCaptureDevicePosition)position {
461 for (AVCaptureDevice *captureDevice in
462 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
463 if (captureDevice.position == position) {
464 return captureDevice;
465 }
466 }
467 return nil;
468}
469
470- (AVCaptureDeviceInput *)frontCameraInput {
471 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700472#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700473 AVCaptureDevice *frontCameraDevice =
474 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700475#else
476 AVCaptureDevice *frontCameraDevice =
477 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
478#endif
tkchin89717aa2016-03-31 17:14:04 -0700479 if (!frontCameraDevice) {
480 RTCLogWarning(@"Failed to find front capture device.");
481 return nil;
482 }
483 NSError *error = nil;
484 AVCaptureDeviceInput *frontCameraInput =
485 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
486 error:&error];
487 if (!frontCameraInput) {
488 RTCLogError(@"Failed to create front camera input: %@",
489 error.localizedDescription);
490 return nil;
491 }
492 _frontCameraInput = frontCameraInput;
493 }
494 return _frontCameraInput;
495}
496
497- (AVCaptureDeviceInput *)backCameraInput {
498 if (!_backCameraInput) {
499 AVCaptureDevice *backCameraDevice =
500 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
501 if (!backCameraDevice) {
502 RTCLogWarning(@"Failed to find front capture device.");
503 return nil;
504 }
505 NSError *error = nil;
506 AVCaptureDeviceInput *backCameraInput =
507 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
508 error:&error];
509 if (!backCameraInput) {
510 RTCLogError(@"Failed to create front camera input: %@",
511 error.localizedDescription);
512 return nil;
513 }
514 _backCameraInput = backCameraInput;
515 }
516 return _backCameraInput;
517}
518
tkchind7629102016-07-28 14:52:55 -0700519- (void)setMinFrameDuration:(CMTime)minFrameDuration
520 forDevice:(AVCaptureDevice *)device {
521 NSError *error = nil;
522 if (![device lockForConfiguration:&error]) {
523 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
524 return;
525 }
526 device.activeVideoMinFrameDuration = minFrameDuration;
527 [device unlockForConfiguration];
528}
529
tkchin89717aa2016-03-31 17:14:04 -0700530// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800531- (void)updateOrientation {
adam.fedorfc22e032016-06-08 17:24:37 -0700532#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800533 switch ([UIDevice currentDevice].orientation) {
534 case UIDeviceOrientationPortrait:
magjed2ab012c2016-08-25 03:25:04 -0700535 _rotation = webrtc::kVideoRotation_90;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800536 break;
537 case UIDeviceOrientationPortraitUpsideDown:
magjed2ab012c2016-08-25 03:25:04 -0700538 _rotation = webrtc::kVideoRotation_270;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800539 break;
540 case UIDeviceOrientationLandscapeLeft:
magjed2ab012c2016-08-25 03:25:04 -0700541 _rotation = webrtc::kVideoRotation_180;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800542 break;
543 case UIDeviceOrientationLandscapeRight:
magjed2ab012c2016-08-25 03:25:04 -0700544 _rotation = webrtc::kVideoRotation_0;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800545 break;
546 case UIDeviceOrientationFaceUp:
547 case UIDeviceOrientationFaceDown:
548 case UIDeviceOrientationUnknown:
magjed2ab012c2016-08-25 03:25:04 -0700549 // Ignore.
550 break;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800551 }
adam.fedorfc22e032016-06-08 17:24:37 -0700552#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800553}
554
tkchin89717aa2016-03-31 17:14:04 -0700555// Update the current session input to match what's stored in _useBackCamera.
556- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
557 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
558 block:^{
559 [_captureSession beginConfiguration];
560 AVCaptureDeviceInput *oldInput = _backCameraInput;
561 AVCaptureDeviceInput *newInput = _frontCameraInput;
562 if (useBackCamera) {
563 oldInput = _frontCameraInput;
564 newInput = _backCameraInput;
565 }
566 if (oldInput) {
567 // Ok to remove this even if it's not attached. Will be no-op.
568 [_captureSession removeInput:oldInput];
569 }
570 if (newInput) {
571 [_captureSession addInput:newInput];
572 }
573 [self updateOrientation];
574 [_captureSession commitConfiguration];
kthelgason4a85abb2016-08-19 01:24:46 -0700575
576 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFormat()->interval);
577 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
tkchin89717aa2016-03-31 17:14:04 -0700578 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800579}
580
581@end
582
583namespace webrtc {
584
tkchin89717aa2016-03-31 17:14:04 -0700585enum AVFoundationVideoCapturerMessageType : uint32_t {
586 kMessageTypeFrame,
587};
588
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200589AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
kthelgason4a85abb2016-08-19 01:24:46 -0700590 // Set our supported formats. This matches kAvailablePresets.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800591 _capturer =
592 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700593
594 std::vector<cricket::VideoFormat> supported_formats;
595 int framerate = 30;
596
597#if TARGET_OS_IPHONE
598 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
599 set_enable_video_adapter(false);
600 framerate = 15;
601 }
602#endif
603
604 for (const auto preset : kAvailablePresets) {
605 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
606 const auto format = cricket::VideoFormat(
607 preset.width,
608 preset.height,
609 cricket::VideoFormat::FpsToInterval(framerate),
610 cricket::FOURCC_NV12);
611 supported_formats.push_back(format);
612 }
613 }
614
615 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800616}
617
618AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
619 _capturer = nil;
620}
621
622cricket::CaptureState AVFoundationVideoCapturer::Start(
623 const cricket::VideoFormat& format) {
624 if (!_capturer) {
625 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
626 return cricket::CaptureState::CS_FAILED;
627 }
628 if (_capturer.isRunning) {
629 LOG(LS_ERROR) << "The capturer is already running.";
630 return cricket::CaptureState::CS_FAILED;
631 }
kthelgason4a85abb2016-08-19 01:24:46 -0700632
633 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
634 RTC_DCHECK(desiredPreset);
635
636 [_capturer.captureSession beginConfiguration];
637 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
638 LOG(LS_ERROR) << "Unsupported video format.";
639 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800640 return cricket::CaptureState::CS_FAILED;
641 }
kthelgason4a85abb2016-08-19 01:24:46 -0700642 _capturer.captureSession.sessionPreset = desiredPreset;
643 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800644
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800645 SetCaptureFormat(&format);
646 // This isn't super accurate because it takes a while for the AVCaptureSession
647 // to spin up, and this call returns async.
648 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700649 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800650 SetCaptureState(cricket::CaptureState::CS_RUNNING);
651
kthelgason4a85abb2016-08-19 01:24:46 -0700652 // Adjust the framerate for all capture devices.
653 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
654 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
655 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
656
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800657 return cricket::CaptureState::CS_STARTING;
658}
659
660void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700661 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800662 SetCaptureFormat(NULL);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800663}
664
665bool AVFoundationVideoCapturer::IsRunning() {
666 return _capturer.isRunning;
667}
668
669AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
670 return _capturer.captureSession;
671}
672
hjona1cf3662016-03-14 20:55:22 -0700673bool AVFoundationVideoCapturer::CanUseBackCamera() const {
674 return _capturer.canUseBackCamera;
675}
676
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800677void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
678 _capturer.useBackCamera = useBackCamera;
679}
680
681bool AVFoundationVideoCapturer::GetUseBackCamera() const {
682 return _capturer.useBackCamera;
683}
684
685void AVFoundationVideoCapturer::CaptureSampleBuffer(
magjed2ab012c2016-08-25 03:25:04 -0700686 CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) {
687 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
688 !CMSampleBufferIsValid(sample_buffer) ||
689 !CMSampleBufferDataIsReady(sample_buffer)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800690 return;
691 }
692
magjed2ab012c2016-08-25 03:25:04 -0700693 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
tkchin89717aa2016-03-31 17:14:04 -0700694 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800695 return;
696 }
697
magjed39607c92016-07-14 08:12:17 -0700698 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
699 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
700
701 const int captured_width = buffer->width();
702 const int captured_height = buffer->height();
703
704 int adapted_width;
705 int adapted_height;
706 int crop_width;
707 int crop_height;
708 int crop_x;
709 int crop_y;
710 int64_t translated_camera_time_us;
711
712 if (!AdaptFrame(captured_width, captured_height,
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200713 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
magjed39607c92016-07-14 08:12:17 -0700714 rtc::TimeMicros(), &adapted_width, &adapted_height,
715 &crop_width, &crop_height, &crop_x, &crop_y,
716 &translated_camera_time_us)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800717 return;
718 }
719
magjed39607c92016-07-14 08:12:17 -0700720 if (adapted_width != captured_width || crop_width != captured_width ||
magjed2ab012c2016-08-25 03:25:04 -0700721 adapted_height != captured_height || crop_height != captured_height ||
722 (apply_rotation() && rotation != webrtc::kVideoRotation_0)) {
magjed39607c92016-07-14 08:12:17 -0700723 // TODO(magjed): Avoid converting to I420.
724 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
725 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
726 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
727 crop_y, crop_width, crop_height);
magjed2ab012c2016-08-25 03:25:04 -0700728 if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) {
729 buffer = scaled_buffer;
730 } else {
731 // Applying rotation is only supported for legacy reasons and performance
732 // is not critical here.
nisseedebf452016-09-15 07:20:40 -0700733 rtc::scoped_refptr<webrtc::I420Buffer> rotated_buffer(
734 (rotation == webrtc::kVideoRotation_180)
735 ? I420Buffer::Create(adapted_width, adapted_height)
736 : I420Buffer::Create(adapted_height, adapted_width));
737 libyuv::I420Rotate(
738 scaled_buffer->DataY(), scaled_buffer->StrideY(),
739 scaled_buffer->DataU(), scaled_buffer->StrideU(),
740 scaled_buffer->DataV(), scaled_buffer->StrideV(),
741 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
742 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
743 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
744 crop_width, crop_height,
745 static_cast<libyuv::RotationMode>(rotation));
746 buffer = rotated_buffer;
magjed2ab012c2016-08-25 03:25:04 -0700747 }
magjed39607c92016-07-14 08:12:17 -0700748 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800749
magjed2ab012c2016-08-25 03:25:04 -0700750 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700751 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700752 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800753}
754
755} // namespace webrtc