blob: 4195f9bd37e804a79b79f73d914b010f3b798421 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
magjed2ab012c2016-08-25 03:25:04 -070025#include "libyuv/rotate.h"
26
tkchin9eeb6242016-04-27 01:54:20 -070027#include "webrtc/base/bind.h"
28#include "webrtc/base/checks.h"
29#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070030#include "webrtc/common_video/include/corevideo_frame_buffer.h"
magjed2ab012c2016-08-25 03:25:04 -070031#include "webrtc/common_video/rotation.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080032
denicija4f15ca52016-10-06 02:32:02 -070033// TODO(denicija): add support for higher frame rates.
34// See http://crbug/webrtc/6355 for more info.
35static const int kFramesPerSecond = 30;
kthelgason4a85abb2016-08-19 01:24:46 -070036
denicija4f15ca52016-10-06 02:32:02 -070037static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
38 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
39 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
40}
kthelgason4a85abb2016-08-19 01:24:46 -070041
denicija4f15ca52016-10-06 02:32:02 -070042static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) {
43 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
44}
45
46// Returns filtered array of device formats based on predefined constraints our
47// stack imposes.
48static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats(
49 const AVCaptureDevice *device,
50 int supportedFps) {
51 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
52 [NSMutableArray array];
53
54 for (AVCaptureDeviceFormat *format in device.formats) {
55 // Filter out subTypes that we currently don't support in the stack
56 FourCharCode mediaSubType =
57 CMFormatDescriptionGetMediaSubType(format.formatDescription);
58 if (!IsMediaSubTypeSupported(mediaSubType)) {
59 continue;
60 }
61
62 // Filter out frame rate ranges that we currently don't support in the stack
63 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
64 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
65 [eligibleDeviceFormats addObject:format];
66 break;
67 }
kthelgason4a85abb2016-08-19 01:24:46 -070068 }
69 }
denicija4f15ca52016-10-06 02:32:02 -070070
71 return [eligibleDeviceFormats copy];
72}
73
74// Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
75static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
76 const AVCaptureDevice *device,
77 const cricket::VideoFormat &videoFormat) {
78 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
79 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
80 GetEligibleDeviceFormats(device, videoFormat.framerate());
81
82 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
83 CMVideoDimensions dimension =
84 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
85 FourCharCode mediaSubType =
86 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
87
88 if (videoFormat.width == dimension.width &&
89 videoFormat.height == dimension.height) {
90 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
91 // This is the preferred format so no need to wait for better option.
92 return deviceFormat;
93 } else {
94 // This is good candidate, but let's wait for something better.
95 desiredDeviceFormat = deviceFormat;
96 }
97 }
98 }
99
100 return desiredDeviceFormat;
101}
102
103// Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
104// device.
105static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
106 AVCaptureDevice *device) {
107 std::set<cricket::VideoFormat> supportedFormats;
108
109 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
110 GetEligibleDeviceFormats(device, kFramesPerSecond);
111
112 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
113 CMVideoDimensions dimension =
114 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
115 cricket::VideoFormat format = cricket::VideoFormat(
116 dimension.width, dimension.height,
117 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
118 cricket::FOURCC_NV12);
119 supportedFormats.insert(format);
120 }
121
122 return supportedFormats;
123}
124
125// Sets device format for the provided capture device. Returns YES/NO depending on success.
126// TODO(denicija): When this file is split this static method should be reconsidered.
127// Perhaps adding a category on AVCaptureDevice would be better.
128static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device,
129 AVCaptureSession *session,
130 const cricket::VideoFormat &format) {
131 AVCaptureDeviceFormat *deviceFormat =
132 GetDeviceFormatForVideoFormat(device, format);
133 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
134
135 NSError *error = nil;
136 BOOL success = YES;
137 [session beginConfiguration];
138 if ([device lockForConfiguration:&error]) {
139 @try {
140 device.activeFormat = deviceFormat;
141 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
142 } @catch (NSException *exception) {
143 RTCLogError(
144 @"Failed to set active format!\n User info:%@",
145 exception.userInfo);
146 success = NO;
147 }
148
149 [device unlockForConfiguration];
150 } else {
151 RTCLogError(
152 @"Failed to lock device %@. Error: %@",
153 device, error.userInfo);
154 success = NO;
155 }
156 [session commitConfiguration];
157
158 return success;
kthelgason4a85abb2016-08-19 01:24:46 -0700159}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800160
161// This class used to capture frames using AVFoundation APIs on iOS. It is meant
162// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
163// because other webrtc objects own cricket::VideoCapturer, which is not
164// ref counted. To prevent bad behavior we do not expose this class directly.
165@interface RTCAVFoundationVideoCapturerInternal : NSObject
166 <AVCaptureVideoDataOutputSampleBufferDelegate>
167
168@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700169@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -0700170@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800171@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
denicijaedbae5e2016-09-30 00:21:11 -0700172@property(atomic, assign) BOOL isRunning; // Whether the capture session is running.
Zeke Chin52516802016-06-03 11:59:22 -0700173@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800174
175// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
176// when we receive frames. This is safe because this object should be owned by
177// it.
178- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -0700179- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -0700180
denicija4f15ca52016-10-06 02:32:02 -0700181- (nullable AVCaptureDevice *)frontCaptureDevice;
182- (nullable AVCaptureDevice *)backCaptureDevice;
183
tkchin89717aa2016-03-31 17:14:04 -0700184// Starts and stops the capture session asynchronously. We cannot do this
185// synchronously without blocking a WebRTC thread.
186- (void)start;
187- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800188
189@end
190
191@implementation RTCAVFoundationVideoCapturerInternal {
192 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -0700193 AVCaptureDeviceInput *_frontCameraInput;
194 AVCaptureDeviceInput *_backCameraInput;
195 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800196 // The cricket::VideoCapturer that owns this class. Should never be NULL.
197 webrtc::AVFoundationVideoCapturer *_capturer;
magjed2ab012c2016-08-25 03:25:04 -0700198 webrtc::VideoRotation _rotation;
Zeke Chin52516802016-06-03 11:59:22 -0700199 BOOL _hasRetriedOnFatalError;
200 BOOL _isRunning;
201 BOOL _hasStarted;
202 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800203}
204
205@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700206@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700207@synthesize useBackCamera = _useBackCamera;
denicijaedbae5e2016-09-30 00:21:11 -0700208
209@synthesize isRunning = _isRunning;
Zeke Chin52516802016-06-03 11:59:22 -0700210@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800211
tkchin89717aa2016-03-31 17:14:04 -0700212// This is called from the thread that creates the video source, which is likely
213// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800214- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700215 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800216 if (self = [super init]) {
217 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700218 // Create the capture session and all relevant inputs and outputs. We need
219 // to do this in init because the application may want the capture session
220 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
221 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800222 if (![self setupCaptureSession]) {
223 return nil;
224 }
225 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700226#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800227 [center addObserver:self
228 selector:@selector(deviceOrientationDidChange:)
229 name:UIDeviceOrientationDidChangeNotification
230 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700231 [center addObserver:self
232 selector:@selector(handleCaptureSessionInterruption:)
233 name:AVCaptureSessionWasInterruptedNotification
234 object:_captureSession];
235 [center addObserver:self
236 selector:@selector(handleCaptureSessionInterruptionEnded:)
237 name:AVCaptureSessionInterruptionEndedNotification
238 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700239 [center addObserver:self
240 selector:@selector(handleApplicationDidBecomeActive:)
241 name:UIApplicationDidBecomeActiveNotification
242 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700243#endif
Zeke Chin52516802016-06-03 11:59:22 -0700244 [center addObserver:self
245 selector:@selector(handleCaptureSessionRuntimeError:)
246 name:AVCaptureSessionRuntimeErrorNotification
247 object:_captureSession];
248 [center addObserver:self
249 selector:@selector(handleCaptureSessionDidStartRunning:)
250 name:AVCaptureSessionDidStartRunningNotification
251 object:_captureSession];
252 [center addObserver:self
253 selector:@selector(handleCaptureSessionDidStopRunning:)
254 name:AVCaptureSessionDidStopRunningNotification
255 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800256 }
257 return self;
258}
259
260- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700261 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800262 [[NSNotificationCenter defaultCenter] removeObserver:self];
263 _capturer = nullptr;
264}
265
tkchin89717aa2016-03-31 17:14:04 -0700266- (AVCaptureSession *)captureSession {
267 return _captureSession;
268}
269
kthelgason4a85abb2016-08-19 01:24:46 -0700270- (AVCaptureDevice *)getActiveCaptureDevice {
271 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
272}
273
denicija4f15ca52016-10-06 02:32:02 -0700274- (AVCaptureDevice *)frontCaptureDevice {
275 return _frontCameraInput.device;
276}
277
278- (AVCaptureDevice *)backCaptureDevice {
279 return _backCameraInput.device;
280}
281
Zeke Chin52516802016-06-03 11:59:22 -0700282- (dispatch_queue_t)frameQueue {
283 if (!_frameQueue) {
284 _frameQueue =
285 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
286 DISPATCH_QUEUE_SERIAL);
287 dispatch_set_target_queue(
288 _frameQueue,
289 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
290 }
291 return _frameQueue;
292}
293
tkchin89717aa2016-03-31 17:14:04 -0700294// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700295- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700296 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700297}
298
tkchin89717aa2016-03-31 17:14:04 -0700299// Called from any thread (likely main thread).
300- (BOOL)useBackCamera {
301 @synchronized(self) {
302 return _useBackCamera;
303 }
304}
305
306// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800307- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700308 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700309 if (useBackCamera) {
310 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
311 "not switching.");
312 }
hjona1cf3662016-03-14 20:55:22 -0700313 return;
314 }
tkchin89717aa2016-03-31 17:14:04 -0700315 @synchronized(self) {
316 if (_useBackCamera == useBackCamera) {
317 return;
318 }
319 _useBackCamera = useBackCamera;
320 [self updateSessionInputForUseBackCamera:useBackCamera];
321 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800322}
323
tkchin89717aa2016-03-31 17:14:04 -0700324// Called from WebRTC thread.
325- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700326 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800327 return;
328 }
Zeke Chin52516802016-06-03 11:59:22 -0700329 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800330 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
331 block:^{
magjed2ab012c2016-08-25 03:25:04 -0700332#if TARGET_OS_IPHONE
333 // Default to portrait orientation on iPhone. This will be reset in
334 // updateOrientation unless orientation is unknown/faceup/facedown.
335 _rotation = webrtc::kVideoRotation_90;
336#else
337 // No rotation on Mac.
338 _rotation = webrtc::kVideoRotation_0;
339#endif
tkchin89717aa2016-03-31 17:14:04 -0700340 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700341#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700342 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700343#endif
tkchin89717aa2016-03-31 17:14:04 -0700344 AVCaptureSession *captureSession = self.captureSession;
345 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800346 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800347}
348
tkchin89717aa2016-03-31 17:14:04 -0700349// Called from same thread as start.
350- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700351 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800352 return;
353 }
Zeke Chin52516802016-06-03 11:59:22 -0700354 self.hasStarted = NO;
355 // Due to this async block, it's possible that the ObjC object outlives the
356 // C++ one. In order to not invoke functions on the C++ object, we set
357 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800358 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
359 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700360 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
361 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700362#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700363 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700364#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800365 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800366}
367
Zeke Chin52516802016-06-03 11:59:22 -0700368#pragma mark iOS notifications
369
adam.fedorfc22e032016-06-08 17:24:37 -0700370#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700371- (void)deviceOrientationDidChange:(NSNotification *)notification {
372 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
373 block:^{
Zeke Chin52516802016-06-03 11:59:22 -0700374 [self updateOrientation];
375 }];
376}
adam.fedorfc22e032016-06-08 17:24:37 -0700377#endif
Zeke Chin52516802016-06-03 11:59:22 -0700378
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800379#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
380
381- (void)captureOutput:(AVCaptureOutput *)captureOutput
382 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
383 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700384 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700385 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800386 return;
387 }
magjed2ab012c2016-08-25 03:25:04 -0700388 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800389}
390
391- (void)captureOutput:(AVCaptureOutput *)captureOutput
392 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700393 fromConnection:(AVCaptureConnection *)connection {
394 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800395}
396
Zeke Chin52516802016-06-03 11:59:22 -0700397#pragma mark - AVCaptureSession notifications
398
399- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
400 NSString *reasonString = nil;
kwiberg77eab702016-09-28 17:42:01 -0700401#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
402 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
Zeke Chin52516802016-06-03 11:59:22 -0700403 NSNumber *reason =
404 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
405 if (reason) {
406 switch (reason.intValue) {
407 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
408 reasonString = @"VideoDeviceNotAvailableInBackground";
409 break;
410 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
411 reasonString = @"AudioDeviceInUseByAnotherClient";
412 break;
413 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
414 reasonString = @"VideoDeviceInUseByAnotherClient";
415 break;
416 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
417 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
418 break;
419 }
420 }
421#endif
422 RTCLog(@"Capture session interrupted: %@", reasonString);
423 // TODO(tkchin): Handle this case.
424}
425
426- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
427 RTCLog(@"Capture session interruption ended.");
428 // TODO(tkchin): Handle this case.
429}
430
431- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700432 NSError *error =
433 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700434 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700435
436 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
437 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700438#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700439 if (error.code == AVErrorMediaServicesWereReset) {
440 [self handleNonFatalError];
441 } else {
442 [self handleFatalError];
443 }
adam.fedorfc22e032016-06-08 17:24:37 -0700444#else
445 [self handleFatalError];
446#endif
Zeke Chin52516802016-06-03 11:59:22 -0700447 }];
448}
449
450- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
451 RTCLog(@"Capture session started.");
denicijaedbae5e2016-09-30 00:21:11 -0700452
Zeke Chin52516802016-06-03 11:59:22 -0700453 self.isRunning = YES;
454 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
455 block:^{
456 // If we successfully restarted after an unknown error, allow future
457 // retries on fatal errors.
458 _hasRetriedOnFatalError = NO;
459 }];
460}
461
462- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
463 RTCLog(@"Capture session stopped.");
464 self.isRunning = NO;
465}
466
467- (void)handleFatalError {
468 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
469 block:^{
470 if (!_hasRetriedOnFatalError) {
471 RTCLogWarning(@"Attempting to recover from fatal capture error.");
472 [self handleNonFatalError];
473 _hasRetriedOnFatalError = YES;
474 } else {
475 RTCLogError(@"Previous fatal error recovery failed.");
476 }
477 }];
478}
479
480- (void)handleNonFatalError {
481 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
482 block:^{
483 if (self.hasStarted) {
484 RTCLog(@"Restarting capture session after error.");
485 [self.captureSession startRunning];
486 }
487 }];
488}
489
tkchin11840252016-08-24 12:05:56 -0700490#if TARGET_OS_IPHONE
491
492#pragma mark - UIApplication notifications
493
494- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
495 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
496 block:^{
497 if (self.hasStarted && !self.captureSession.isRunning) {
498 RTCLog(@"Restarting capture session on active.");
499 [self.captureSession startRunning];
500 }
501 }];
502}
503
504#endif // TARGET_OS_IPHONE
505
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800506#pragma mark - Private
507
508- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700509 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Kári Tristan Helgason15e4ec32016-09-30 08:56:33 +0200510#if defined(WEBRTC_IOS)
511 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800512#endif
tkchin89717aa2016-03-31 17:14:04 -0700513 // Add the output.
514 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
515 if (![captureSession canAddOutput:videoDataOutput]) {
516 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800517 return NO;
518 }
tkchin89717aa2016-03-31 17:14:04 -0700519 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800520
tkchin89717aa2016-03-31 17:14:04 -0700521 // Get the front and back cameras. If there isn't a front camera
522 // give up.
523 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
524 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
525 if (!frontCameraInput) {
526 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800527 return NO;
528 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800529
530 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700531 if (![captureSession canAddInput:frontCameraInput] ||
532 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
533 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800534 return NO;
535 }
tkchin89717aa2016-03-31 17:14:04 -0700536 AVCaptureDeviceInput *input = self.useBackCamera ?
537 backCameraInput : frontCameraInput;
538 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700539
tkchin89717aa2016-03-31 17:14:04 -0700540 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800541 return YES;
542}
543
tkchin89717aa2016-03-31 17:14:04 -0700544- (AVCaptureVideoDataOutput *)videoDataOutput {
545 if (!_videoDataOutput) {
546 // Make the capturer output NV12. Ideally we want I420 but that's not
547 // currently supported on iPhone / iPad.
548 AVCaptureVideoDataOutput *videoDataOutput =
549 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700550 videoDataOutput.videoSettings = @{
551 (NSString *)kCVPixelBufferPixelFormatTypeKey :
552 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
553 };
554 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700555 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700556 _videoDataOutput = videoDataOutput;
557 }
558 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800559}
560
tkchin89717aa2016-03-31 17:14:04 -0700561- (AVCaptureDevice *)videoCaptureDeviceForPosition:
562 (AVCaptureDevicePosition)position {
563 for (AVCaptureDevice *captureDevice in
564 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
565 if (captureDevice.position == position) {
566 return captureDevice;
567 }
568 }
569 return nil;
570}
571
572- (AVCaptureDeviceInput *)frontCameraInput {
573 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700574#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700575 AVCaptureDevice *frontCameraDevice =
576 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700577#else
578 AVCaptureDevice *frontCameraDevice =
579 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
580#endif
tkchin89717aa2016-03-31 17:14:04 -0700581 if (!frontCameraDevice) {
582 RTCLogWarning(@"Failed to find front capture device.");
583 return nil;
584 }
585 NSError *error = nil;
586 AVCaptureDeviceInput *frontCameraInput =
587 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
588 error:&error];
589 if (!frontCameraInput) {
590 RTCLogError(@"Failed to create front camera input: %@",
591 error.localizedDescription);
592 return nil;
593 }
594 _frontCameraInput = frontCameraInput;
595 }
596 return _frontCameraInput;
597}
598
599- (AVCaptureDeviceInput *)backCameraInput {
600 if (!_backCameraInput) {
601 AVCaptureDevice *backCameraDevice =
602 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
603 if (!backCameraDevice) {
604 RTCLogWarning(@"Failed to find front capture device.");
605 return nil;
606 }
607 NSError *error = nil;
608 AVCaptureDeviceInput *backCameraInput =
609 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
610 error:&error];
611 if (!backCameraInput) {
612 RTCLogError(@"Failed to create front camera input: %@",
613 error.localizedDescription);
614 return nil;
615 }
616 _backCameraInput = backCameraInput;
617 }
618 return _backCameraInput;
619}
620
tkchin89717aa2016-03-31 17:14:04 -0700621// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800622- (void)updateOrientation {
adam.fedorfc22e032016-06-08 17:24:37 -0700623#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800624 switch ([UIDevice currentDevice].orientation) {
625 case UIDeviceOrientationPortrait:
magjed2ab012c2016-08-25 03:25:04 -0700626 _rotation = webrtc::kVideoRotation_90;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800627 break;
628 case UIDeviceOrientationPortraitUpsideDown:
magjed2ab012c2016-08-25 03:25:04 -0700629 _rotation = webrtc::kVideoRotation_270;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800630 break;
631 case UIDeviceOrientationLandscapeLeft:
magjed2ab012c2016-08-25 03:25:04 -0700632 _rotation = webrtc::kVideoRotation_180;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800633 break;
634 case UIDeviceOrientationLandscapeRight:
magjed2ab012c2016-08-25 03:25:04 -0700635 _rotation = webrtc::kVideoRotation_0;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800636 break;
637 case UIDeviceOrientationFaceUp:
638 case UIDeviceOrientationFaceDown:
639 case UIDeviceOrientationUnknown:
magjed2ab012c2016-08-25 03:25:04 -0700640 // Ignore.
641 break;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800642 }
adam.fedorfc22e032016-06-08 17:24:37 -0700643#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800644}
645
tkchin89717aa2016-03-31 17:14:04 -0700646// Update the current session input to match what's stored in _useBackCamera.
647- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
648 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
649 block:^{
650 [_captureSession beginConfiguration];
651 AVCaptureDeviceInput *oldInput = _backCameraInput;
652 AVCaptureDeviceInput *newInput = _frontCameraInput;
653 if (useBackCamera) {
654 oldInput = _frontCameraInput;
655 newInput = _backCameraInput;
656 }
657 if (oldInput) {
658 // Ok to remove this even if it's not attached. Will be no-op.
659 [_captureSession removeInput:oldInput];
660 }
661 if (newInput) {
662 [_captureSession addInput:newInput];
663 }
664 [self updateOrientation];
denicija4f15ca52016-10-06 02:32:02 -0700665 AVCaptureDevice *newDevice = newInput.device;
666 const cricket::VideoFormat *format = _capturer->GetCaptureFormat();
667 SetFormatForCaptureDevice(newDevice, _captureSession, *format);
tkchin89717aa2016-03-31 17:14:04 -0700668 [_captureSession commitConfiguration];
669 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800670}
671
672@end
673
674namespace webrtc {
675
tkchin89717aa2016-03-31 17:14:04 -0700676enum AVFoundationVideoCapturerMessageType : uint32_t {
677 kMessageTypeFrame,
678};
679
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200680AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800681 _capturer =
682 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700683
denicija4f15ca52016-10-06 02:32:02 -0700684 std::set<cricket::VideoFormat> front_camera_video_formats =
685 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
kthelgason4a85abb2016-08-19 01:24:46 -0700686
denicija4f15ca52016-10-06 02:32:02 -0700687 std::set<cricket::VideoFormat> back_camera_video_formats =
688 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
689
690 std::vector<cricket::VideoFormat> intersection_video_formats;
691 if (back_camera_video_formats.empty()) {
692 intersection_video_formats.assign(front_camera_video_formats.begin(),
693 front_camera_video_formats.end());
694
695 } else if (front_camera_video_formats.empty()) {
696 intersection_video_formats.assign(back_camera_video_formats.begin(),
697 back_camera_video_formats.end());
698 } else {
699 std::set_intersection(
700 front_camera_video_formats.begin(), front_camera_video_formats.end(),
701 back_camera_video_formats.begin(), back_camera_video_formats.end(),
702 std::back_inserter(intersection_video_formats));
kthelgason4a85abb2016-08-19 01:24:46 -0700703 }
denicija4f15ca52016-10-06 02:32:02 -0700704 SetSupportedFormats(intersection_video_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800705}
706
707AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
708 _capturer = nil;
709}
710
711cricket::CaptureState AVFoundationVideoCapturer::Start(
712 const cricket::VideoFormat& format) {
713 if (!_capturer) {
714 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
715 return cricket::CaptureState::CS_FAILED;
716 }
717 if (_capturer.isRunning) {
718 LOG(LS_ERROR) << "The capturer is already running.";
719 return cricket::CaptureState::CS_FAILED;
720 }
kthelgason4a85abb2016-08-19 01:24:46 -0700721
denicija4f15ca52016-10-06 02:32:02 -0700722 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
723 AVCaptureSession* session = _capturer.captureSession;
kthelgason4a85abb2016-08-19 01:24:46 -0700724
denicija4f15ca52016-10-06 02:32:02 -0700725 if (!SetFormatForCaptureDevice(device, session, format)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800726 return cricket::CaptureState::CS_FAILED;
727 }
728
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800729 SetCaptureFormat(&format);
730 // This isn't super accurate because it takes a while for the AVCaptureSession
731 // to spin up, and this call returns async.
732 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700733 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800734 SetCaptureState(cricket::CaptureState::CS_RUNNING);
735
736 return cricket::CaptureState::CS_STARTING;
737}
738
739void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700740 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800741 SetCaptureFormat(NULL);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800742}
743
744bool AVFoundationVideoCapturer::IsRunning() {
745 return _capturer.isRunning;
746}
747
748AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
749 return _capturer.captureSession;
750}
751
hjona1cf3662016-03-14 20:55:22 -0700752bool AVFoundationVideoCapturer::CanUseBackCamera() const {
753 return _capturer.canUseBackCamera;
754}
755
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800756void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
757 _capturer.useBackCamera = useBackCamera;
758}
759
760bool AVFoundationVideoCapturer::GetUseBackCamera() const {
761 return _capturer.useBackCamera;
762}
763
764void AVFoundationVideoCapturer::CaptureSampleBuffer(
magjed2ab012c2016-08-25 03:25:04 -0700765 CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) {
766 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
767 !CMSampleBufferIsValid(sample_buffer) ||
768 !CMSampleBufferDataIsReady(sample_buffer)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800769 return;
770 }
771
magjed2ab012c2016-08-25 03:25:04 -0700772 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
tkchin89717aa2016-03-31 17:14:04 -0700773 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800774 return;
775 }
776
magjed39607c92016-07-14 08:12:17 -0700777 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
778 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
779
780 const int captured_width = buffer->width();
781 const int captured_height = buffer->height();
782
783 int adapted_width;
784 int adapted_height;
785 int crop_width;
786 int crop_height;
787 int crop_x;
788 int crop_y;
789 int64_t translated_camera_time_us;
790
791 if (!AdaptFrame(captured_width, captured_height,
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200792 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
magjed39607c92016-07-14 08:12:17 -0700793 rtc::TimeMicros(), &adapted_width, &adapted_height,
794 &crop_width, &crop_height, &crop_x, &crop_y,
795 &translated_camera_time_us)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800796 return;
797 }
798
magjed39607c92016-07-14 08:12:17 -0700799 if (adapted_width != captured_width || crop_width != captured_width ||
magjed2ab012c2016-08-25 03:25:04 -0700800 adapted_height != captured_height || crop_height != captured_height ||
801 (apply_rotation() && rotation != webrtc::kVideoRotation_0)) {
magjed39607c92016-07-14 08:12:17 -0700802 // TODO(magjed): Avoid converting to I420.
803 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
804 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
805 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
806 crop_y, crop_width, crop_height);
magjed2ab012c2016-08-25 03:25:04 -0700807 if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) {
808 buffer = scaled_buffer;
809 } else {
810 // Applying rotation is only supported for legacy reasons and performance
811 // is not critical here.
nisseedebf452016-09-15 07:20:40 -0700812 rtc::scoped_refptr<webrtc::I420Buffer> rotated_buffer(
813 (rotation == webrtc::kVideoRotation_180)
814 ? I420Buffer::Create(adapted_width, adapted_height)
815 : I420Buffer::Create(adapted_height, adapted_width));
816 libyuv::I420Rotate(
817 scaled_buffer->DataY(), scaled_buffer->StrideY(),
818 scaled_buffer->DataU(), scaled_buffer->StrideU(),
819 scaled_buffer->DataV(), scaled_buffer->StrideV(),
820 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
821 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
822 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
823 crop_width, crop_height,
824 static_cast<libyuv::RotationMode>(rotation));
825 buffer = rotated_buffer;
magjed2ab012c2016-08-25 03:25:04 -0700826 }
magjed39607c92016-07-14 08:12:17 -0700827 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800828
magjed2ab012c2016-08-25 03:25:04 -0700829 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700830 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700831 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800832}
833
834} // namespace webrtc