blob: 7025d888ec7fc3ce625c9546ac56febfae1163d7 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
25#include "webrtc/base/bind.h"
26#include "webrtc/base/checks.h"
27#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070028#include "webrtc/common_video/include/corevideo_frame_buffer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080029
kthelgason4a85abb2016-08-19 01:24:46 -070030struct AVCaptureSessionPresetResolution {
31 NSString *sessionPreset;
32 int width;
33 int height;
34};
35
36#if TARGET_OS_IPHONE
37static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
38 { AVCaptureSessionPreset352x288, 352, 288},
39 { AVCaptureSessionPreset640x480, 640, 480},
40 { AVCaptureSessionPreset1280x720, 1280, 720},
41 { AVCaptureSessionPreset1920x1080, 1920, 1080},
42};
43#else // macOS
44static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
45 { AVCaptureSessionPreset320x240, 320, 240},
46 { AVCaptureSessionPreset352x288, 352, 288},
47 { AVCaptureSessionPreset640x480, 640, 480},
48 { AVCaptureSessionPreset960x540, 960, 540},
49 { AVCaptureSessionPreset1280x720, 1280, 720},
50};
51#endif
52
53// Mapping from cricket::VideoFormat to AVCaptureSession presets.
54static NSString *GetSessionPresetForVideoFormat(
55 const cricket::VideoFormat& format) {
56 for (const auto preset : kAvailablePresets) {
57 // Check both orientations
58 if ((format.width == preset.width && format.height == preset.height) ||
59 (format.width == preset.height && format.height == preset.width)) {
60 return preset.sessionPreset;
61 }
62 }
63 // If no matching preset is found, use a default one.
64 return AVCaptureSessionPreset640x480;
65}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080066
67// This class used to capture frames using AVFoundation APIs on iOS. It is meant
68// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
69// because other webrtc objects own cricket::VideoCapturer, which is not
70// ref counted. To prevent bad behavior we do not expose this class directly.
71@interface RTCAVFoundationVideoCapturerInternal : NSObject
72 <AVCaptureVideoDataOutputSampleBufferDelegate>
73
74@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070075@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070076@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080077@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070078@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
79@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080080
81// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
82// when we receive frames. This is safe because this object should be owned by
83// it.
84- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -070085- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -070086
87// Starts and stops the capture session asynchronously. We cannot do this
88// synchronously without blocking a WebRTC thread.
89- (void)start;
90- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080091
92@end
93
94@implementation RTCAVFoundationVideoCapturerInternal {
95 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070096 AVCaptureDeviceInput *_frontCameraInput;
97 AVCaptureDeviceInput *_backCameraInput;
98 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080099 // The cricket::VideoCapturer that owns this class. Should never be NULL.
100 webrtc::AVFoundationVideoCapturer *_capturer;
101 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -0700102 BOOL _hasRetriedOnFatalError;
103 BOOL _isRunning;
104 BOOL _hasStarted;
105 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800106}
107
108@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700109@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700110@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -0700111@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800112
tkchin89717aa2016-03-31 17:14:04 -0700113// This is called from the thread that creates the video source, which is likely
114// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800115- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700116 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800117 if (self = [super init]) {
118 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700119 // Create the capture session and all relevant inputs and outputs. We need
120 // to do this in init because the application may want the capture session
121 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
122 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800123 if (![self setupCaptureSession]) {
124 return nil;
125 }
126 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700127#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800128 [center addObserver:self
129 selector:@selector(deviceOrientationDidChange:)
130 name:UIDeviceOrientationDidChangeNotification
131 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700132 [center addObserver:self
133 selector:@selector(handleCaptureSessionInterruption:)
134 name:AVCaptureSessionWasInterruptedNotification
135 object:_captureSession];
136 [center addObserver:self
137 selector:@selector(handleCaptureSessionInterruptionEnded:)
138 name:AVCaptureSessionInterruptionEndedNotification
139 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700140 [center addObserver:self
141 selector:@selector(handleApplicationDidBecomeActive:)
142 name:UIApplicationDidBecomeActiveNotification
143 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700144#endif
Zeke Chin52516802016-06-03 11:59:22 -0700145 [center addObserver:self
146 selector:@selector(handleCaptureSessionRuntimeError:)
147 name:AVCaptureSessionRuntimeErrorNotification
148 object:_captureSession];
149 [center addObserver:self
150 selector:@selector(handleCaptureSessionDidStartRunning:)
151 name:AVCaptureSessionDidStartRunningNotification
152 object:_captureSession];
153 [center addObserver:self
154 selector:@selector(handleCaptureSessionDidStopRunning:)
155 name:AVCaptureSessionDidStopRunningNotification
156 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800157 }
158 return self;
159}
160
161- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700162 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800163 [[NSNotificationCenter defaultCenter] removeObserver:self];
164 _capturer = nullptr;
165}
166
tkchin89717aa2016-03-31 17:14:04 -0700167- (AVCaptureSession *)captureSession {
168 return _captureSession;
169}
170
kthelgason4a85abb2016-08-19 01:24:46 -0700171- (AVCaptureDevice *)getActiveCaptureDevice {
172 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
173}
174
Zeke Chin52516802016-06-03 11:59:22 -0700175- (dispatch_queue_t)frameQueue {
176 if (!_frameQueue) {
177 _frameQueue =
178 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
179 DISPATCH_QUEUE_SERIAL);
180 dispatch_set_target_queue(
181 _frameQueue,
182 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
183 }
184 return _frameQueue;
185}
186
tkchin89717aa2016-03-31 17:14:04 -0700187// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700188- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700189 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700190}
191
tkchin89717aa2016-03-31 17:14:04 -0700192// Called from any thread (likely main thread).
193- (BOOL)useBackCamera {
194 @synchronized(self) {
195 return _useBackCamera;
196 }
197}
198
199// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800200- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700201 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700202 if (useBackCamera) {
203 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
204 "not switching.");
205 }
hjona1cf3662016-03-14 20:55:22 -0700206 return;
207 }
tkchin89717aa2016-03-31 17:14:04 -0700208 @synchronized(self) {
209 if (_useBackCamera == useBackCamera) {
210 return;
211 }
212 _useBackCamera = useBackCamera;
213 [self updateSessionInputForUseBackCamera:useBackCamera];
214 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800215}
216
Zeke Chin52516802016-06-03 11:59:22 -0700217- (BOOL)isRunning {
218 rtc::CritScope cs(&_crit);
219 return _isRunning;
220}
221
222- (void)setIsRunning:(BOOL)isRunning {
223 rtc::CritScope cs(&_crit);
224 _isRunning = isRunning;
225}
226
tkchin89717aa2016-03-31 17:14:04 -0700227// Called from WebRTC thread.
228- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700229 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800230 return;
231 }
Zeke Chin52516802016-06-03 11:59:22 -0700232 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800233 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
234 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700235 _orientationHasChanged = NO;
236 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700237#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700238 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700239#endif
tkchin89717aa2016-03-31 17:14:04 -0700240 AVCaptureSession *captureSession = self.captureSession;
241 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800242 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800243}
244
tkchin89717aa2016-03-31 17:14:04 -0700245// Called from same thread as start.
246- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700247 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800248 return;
249 }
Zeke Chin52516802016-06-03 11:59:22 -0700250 self.hasStarted = NO;
251 // Due to this async block, it's possible that the ObjC object outlives the
252 // C++ one. In order to not invoke functions on the C++ object, we set
253 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800254 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
255 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700256 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
257 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700258#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700259 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700260#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800261 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800262}
263
Zeke Chin52516802016-06-03 11:59:22 -0700264#pragma mark iOS notifications
265
adam.fedorfc22e032016-06-08 17:24:37 -0700266#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700267- (void)deviceOrientationDidChange:(NSNotification *)notification {
268 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
269 block:^{
270 _orientationHasChanged = YES;
271 [self updateOrientation];
272 }];
273}
adam.fedorfc22e032016-06-08 17:24:37 -0700274#endif
Zeke Chin52516802016-06-03 11:59:22 -0700275
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800276#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
277
278- (void)captureOutput:(AVCaptureOutput *)captureOutput
279 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
280 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700281 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700282 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800283 return;
284 }
285 _capturer->CaptureSampleBuffer(sampleBuffer);
286}
287
288- (void)captureOutput:(AVCaptureOutput *)captureOutput
289 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700290 fromConnection:(AVCaptureConnection *)connection {
291 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800292}
293
Zeke Chin52516802016-06-03 11:59:22 -0700294#pragma mark - AVCaptureSession notifications
295
296- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
297 NSString *reasonString = nil;
298#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
299 NSNumber *reason =
300 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
301 if (reason) {
302 switch (reason.intValue) {
303 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
304 reasonString = @"VideoDeviceNotAvailableInBackground";
305 break;
306 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
307 reasonString = @"AudioDeviceInUseByAnotherClient";
308 break;
309 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
310 reasonString = @"VideoDeviceInUseByAnotherClient";
311 break;
312 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
313 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
314 break;
315 }
316 }
317#endif
318 RTCLog(@"Capture session interrupted: %@", reasonString);
319 // TODO(tkchin): Handle this case.
320}
321
322- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
323 RTCLog(@"Capture session interruption ended.");
324 // TODO(tkchin): Handle this case.
325}
326
327- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700328 NSError *error =
329 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700330 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700331
332 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
333 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700334#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700335 if (error.code == AVErrorMediaServicesWereReset) {
336 [self handleNonFatalError];
337 } else {
338 [self handleFatalError];
339 }
adam.fedorfc22e032016-06-08 17:24:37 -0700340#else
341 [self handleFatalError];
342#endif
Zeke Chin52516802016-06-03 11:59:22 -0700343 }];
344}
345
346- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
347 RTCLog(@"Capture session started.");
348 self.isRunning = YES;
349 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
350 block:^{
351 // If we successfully restarted after an unknown error, allow future
352 // retries on fatal errors.
353 _hasRetriedOnFatalError = NO;
354 }];
355}
356
357- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
358 RTCLog(@"Capture session stopped.");
359 self.isRunning = NO;
360}
361
362- (void)handleFatalError {
363 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
364 block:^{
365 if (!_hasRetriedOnFatalError) {
366 RTCLogWarning(@"Attempting to recover from fatal capture error.");
367 [self handleNonFatalError];
368 _hasRetriedOnFatalError = YES;
369 } else {
370 RTCLogError(@"Previous fatal error recovery failed.");
371 }
372 }];
373}
374
375- (void)handleNonFatalError {
376 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
377 block:^{
378 if (self.hasStarted) {
379 RTCLog(@"Restarting capture session after error.");
380 [self.captureSession startRunning];
381 }
382 }];
383}
384
tkchin11840252016-08-24 12:05:56 -0700385#if TARGET_OS_IPHONE
386
387#pragma mark - UIApplication notifications
388
389- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
390 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
391 block:^{
392 if (self.hasStarted && !self.captureSession.isRunning) {
393 RTCLog(@"Restarting capture session on active.");
394 [self.captureSession startRunning];
395 }
396 }];
397}
398
399#endif // TARGET_OS_IPHONE
400
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800401#pragma mark - Private
402
403- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700404 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800405#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
406 NSString *version = [[UIDevice currentDevice] systemVersion];
407 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700408 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800409 }
410#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800411
tkchin89717aa2016-03-31 17:14:04 -0700412 // Add the output.
413 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
414 if (![captureSession canAddOutput:videoDataOutput]) {
415 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800416 return NO;
417 }
tkchin89717aa2016-03-31 17:14:04 -0700418 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800419
tkchin89717aa2016-03-31 17:14:04 -0700420 // Get the front and back cameras. If there isn't a front camera
421 // give up.
422 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
423 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
424 if (!frontCameraInput) {
425 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800426 return NO;
427 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800428
429 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700430 if (![captureSession canAddInput:frontCameraInput] ||
431 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
432 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800433 return NO;
434 }
tkchin89717aa2016-03-31 17:14:04 -0700435 AVCaptureDeviceInput *input = self.useBackCamera ?
436 backCameraInput : frontCameraInput;
437 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700438
tkchin89717aa2016-03-31 17:14:04 -0700439 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800440 return YES;
441}
442
tkchin89717aa2016-03-31 17:14:04 -0700443- (AVCaptureVideoDataOutput *)videoDataOutput {
444 if (!_videoDataOutput) {
445 // Make the capturer output NV12. Ideally we want I420 but that's not
446 // currently supported on iPhone / iPad.
447 AVCaptureVideoDataOutput *videoDataOutput =
448 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700449 videoDataOutput.videoSettings = @{
450 (NSString *)kCVPixelBufferPixelFormatTypeKey :
451 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
452 };
453 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700454 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700455 _videoDataOutput = videoDataOutput;
456 }
457 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800458}
459
tkchin89717aa2016-03-31 17:14:04 -0700460- (AVCaptureDevice *)videoCaptureDeviceForPosition:
461 (AVCaptureDevicePosition)position {
462 for (AVCaptureDevice *captureDevice in
463 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
464 if (captureDevice.position == position) {
465 return captureDevice;
466 }
467 }
468 return nil;
469}
470
471- (AVCaptureDeviceInput *)frontCameraInput {
472 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700473#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700474 AVCaptureDevice *frontCameraDevice =
475 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700476#else
477 AVCaptureDevice *frontCameraDevice =
478 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
479#endif
tkchin89717aa2016-03-31 17:14:04 -0700480 if (!frontCameraDevice) {
481 RTCLogWarning(@"Failed to find front capture device.");
482 return nil;
483 }
484 NSError *error = nil;
485 AVCaptureDeviceInput *frontCameraInput =
486 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
487 error:&error];
488 if (!frontCameraInput) {
489 RTCLogError(@"Failed to create front camera input: %@",
490 error.localizedDescription);
491 return nil;
492 }
493 _frontCameraInput = frontCameraInput;
494 }
495 return _frontCameraInput;
496}
497
498- (AVCaptureDeviceInput *)backCameraInput {
499 if (!_backCameraInput) {
500 AVCaptureDevice *backCameraDevice =
501 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
502 if (!backCameraDevice) {
503 RTCLogWarning(@"Failed to find front capture device.");
504 return nil;
505 }
506 NSError *error = nil;
507 AVCaptureDeviceInput *backCameraInput =
508 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
509 error:&error];
510 if (!backCameraInput) {
511 RTCLogError(@"Failed to create front camera input: %@",
512 error.localizedDescription);
513 return nil;
514 }
515 _backCameraInput = backCameraInput;
516 }
517 return _backCameraInput;
518}
519
tkchind7629102016-07-28 14:52:55 -0700520- (void)setMinFrameDuration:(CMTime)minFrameDuration
521 forDevice:(AVCaptureDevice *)device {
522 NSError *error = nil;
523 if (![device lockForConfiguration:&error]) {
524 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
525 return;
526 }
527 device.activeVideoMinFrameDuration = minFrameDuration;
528 [device unlockForConfiguration];
529}
530
tkchin89717aa2016-03-31 17:14:04 -0700531// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800532- (void)updateOrientation {
533 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700534 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800535 if (!connection.supportsVideoOrientation) {
536 // TODO(tkchin): set rotation bit on frames.
537 return;
538 }
adam.fedorfc22e032016-06-08 17:24:37 -0700539#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800540 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
541 switch ([UIDevice currentDevice].orientation) {
542 case UIDeviceOrientationPortrait:
543 orientation = AVCaptureVideoOrientationPortrait;
544 break;
545 case UIDeviceOrientationPortraitUpsideDown:
546 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
547 break;
548 case UIDeviceOrientationLandscapeLeft:
549 orientation = AVCaptureVideoOrientationLandscapeRight;
550 break;
551 case UIDeviceOrientationLandscapeRight:
552 orientation = AVCaptureVideoOrientationLandscapeLeft;
553 break;
554 case UIDeviceOrientationFaceUp:
555 case UIDeviceOrientationFaceDown:
556 case UIDeviceOrientationUnknown:
557 if (!_orientationHasChanged) {
558 connection.videoOrientation = orientation;
559 }
560 return;
561 }
562 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700563#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800564}
565
tkchin89717aa2016-03-31 17:14:04 -0700566// Update the current session input to match what's stored in _useBackCamera.
567- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
568 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
569 block:^{
570 [_captureSession beginConfiguration];
571 AVCaptureDeviceInput *oldInput = _backCameraInput;
572 AVCaptureDeviceInput *newInput = _frontCameraInput;
573 if (useBackCamera) {
574 oldInput = _frontCameraInput;
575 newInput = _backCameraInput;
576 }
577 if (oldInput) {
578 // Ok to remove this even if it's not attached. Will be no-op.
579 [_captureSession removeInput:oldInput];
580 }
581 if (newInput) {
582 [_captureSession addInput:newInput];
583 }
584 [self updateOrientation];
585 [_captureSession commitConfiguration];
kthelgason4a85abb2016-08-19 01:24:46 -0700586
587 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFormat()->interval);
588 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
tkchin89717aa2016-03-31 17:14:04 -0700589 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800590}
591
592@end
593
594namespace webrtc {
595
tkchin89717aa2016-03-31 17:14:04 -0700596enum AVFoundationVideoCapturerMessageType : uint32_t {
597 kMessageTypeFrame,
598};
599
600struct AVFoundationFrame {
601 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
602 : image_buffer(buffer), capture_time(time) {}
603 CVImageBufferRef image_buffer;
604 int64_t capture_time;
605};
606
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800607AVFoundationVideoCapturer::AVFoundationVideoCapturer()
608 : _capturer(nil), _startThread(nullptr) {
kthelgason4a85abb2016-08-19 01:24:46 -0700609 // Set our supported formats. This matches kAvailablePresets.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800610 _capturer =
611 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700612
613 std::vector<cricket::VideoFormat> supported_formats;
614 int framerate = 30;
615
616#if TARGET_OS_IPHONE
617 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
618 set_enable_video_adapter(false);
619 framerate = 15;
620 }
621#endif
622
623 for (const auto preset : kAvailablePresets) {
624 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
625 const auto format = cricket::VideoFormat(
626 preset.width,
627 preset.height,
628 cricket::VideoFormat::FpsToInterval(framerate),
629 cricket::FOURCC_NV12);
630 supported_formats.push_back(format);
631 }
632 }
633
634 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800635}
636
637AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
638 _capturer = nil;
639}
640
641cricket::CaptureState AVFoundationVideoCapturer::Start(
642 const cricket::VideoFormat& format) {
643 if (!_capturer) {
644 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
645 return cricket::CaptureState::CS_FAILED;
646 }
647 if (_capturer.isRunning) {
648 LOG(LS_ERROR) << "The capturer is already running.";
649 return cricket::CaptureState::CS_FAILED;
650 }
kthelgason4a85abb2016-08-19 01:24:46 -0700651
652 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
653 RTC_DCHECK(desiredPreset);
654
655 [_capturer.captureSession beginConfiguration];
656 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
657 LOG(LS_ERROR) << "Unsupported video format.";
658 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800659 return cricket::CaptureState::CS_FAILED;
660 }
kthelgason4a85abb2016-08-19 01:24:46 -0700661 _capturer.captureSession.sessionPreset = desiredPreset;
662 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800663
664 // Keep track of which thread capture started on. This is the thread that
665 // frames need to be sent to.
666 RTC_DCHECK(!_startThread);
667 _startThread = rtc::Thread::Current();
668
669 SetCaptureFormat(&format);
670 // This isn't super accurate because it takes a while for the AVCaptureSession
671 // to spin up, and this call returns async.
672 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700673 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800674 SetCaptureState(cricket::CaptureState::CS_RUNNING);
675
kthelgason4a85abb2016-08-19 01:24:46 -0700676 // Adjust the framerate for all capture devices.
677 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
678 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
679 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
680
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800681 return cricket::CaptureState::CS_STARTING;
682}
683
684void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700685 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800686 SetCaptureFormat(NULL);
687 _startThread = nullptr;
688}
689
690bool AVFoundationVideoCapturer::IsRunning() {
691 return _capturer.isRunning;
692}
693
694AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
695 return _capturer.captureSession;
696}
697
hjona1cf3662016-03-14 20:55:22 -0700698bool AVFoundationVideoCapturer::CanUseBackCamera() const {
699 return _capturer.canUseBackCamera;
700}
701
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800702void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
703 _capturer.useBackCamera = useBackCamera;
704}
705
706bool AVFoundationVideoCapturer::GetUseBackCamera() const {
707 return _capturer.useBackCamera;
708}
709
710void AVFoundationVideoCapturer::CaptureSampleBuffer(
711 CMSampleBufferRef sampleBuffer) {
712 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
713 !CMSampleBufferIsValid(sampleBuffer) ||
714 !CMSampleBufferDataIsReady(sampleBuffer)) {
715 return;
716 }
717
tkchin89717aa2016-03-31 17:14:04 -0700718 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
719 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800720 return;
721 }
722
tkchin89717aa2016-03-31 17:14:04 -0700723 // Retain the buffer and post it to the webrtc thread. It will be released
724 // after it has successfully been signaled.
725 CVBufferRetain(image_buffer);
726 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700727 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700728 new rtc::TypedMessageData<AVFoundationFrame>(frame));
729}
730
731void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
732 switch (msg->message_id) {
733 case kMessageTypeFrame: {
734 rtc::TypedMessageData<AVFoundationFrame>* data =
735 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
736 const AVFoundationFrame& frame = data->data();
737 OnFrameMessage(frame.image_buffer, frame.capture_time);
738 delete data;
739 break;
740 }
741 }
742}
743
744void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
magjed39607c92016-07-14 08:12:17 -0700745 int64_t capture_time_ns) {
tkchin89717aa2016-03-31 17:14:04 -0700746 RTC_DCHECK(_startThread->IsCurrent());
747
magjed39607c92016-07-14 08:12:17 -0700748 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
749 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
750
751 const int captured_width = buffer->width();
752 const int captured_height = buffer->height();
753
754 int adapted_width;
755 int adapted_height;
756 int crop_width;
757 int crop_height;
758 int crop_x;
759 int crop_y;
760 int64_t translated_camera_time_us;
761
762 if (!AdaptFrame(captured_width, captured_height,
763 capture_time_ns / rtc::kNumNanosecsPerMicrosec,
764 rtc::TimeMicros(), &adapted_width, &adapted_height,
765 &crop_width, &crop_height, &crop_x, &crop_y,
766 &translated_camera_time_us)) {
767 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800768 return;
769 }
770
magjed39607c92016-07-14 08:12:17 -0700771 if (adapted_width != captured_width || crop_width != captured_width ||
772 adapted_height != captured_height || crop_height != captured_height) {
773 // TODO(magjed): Avoid converting to I420.
774 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
775 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
776 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
777 crop_y, crop_width, crop_height);
778 buffer = scaled_buffer;
779 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800780
magjed39607c92016-07-14 08:12:17 -0700781 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700782 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700783 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800784
tkchin89717aa2016-03-31 17:14:04 -0700785 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800786}
787
788} // namespace webrtc