blob: 49b56817277fdc86a015fe8cc888220c4657c2c8 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
magjed2ab012c2016-08-25 03:25:04 -070025#include "libyuv/rotate.h"
26
tkchin9eeb6242016-04-27 01:54:20 -070027#include "webrtc/base/bind.h"
28#include "webrtc/base/checks.h"
nisse09347852016-10-19 00:30:30 -070029#include "webrtc/base/logging.h"
tkchin9eeb6242016-04-27 01:54:20 -070030#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070031#include "webrtc/common_video/include/corevideo_frame_buffer.h"
magjed2ab012c2016-08-25 03:25:04 -070032#include "webrtc/common_video/rotation.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080033
denicija4f15ca52016-10-06 02:32:02 -070034// TODO(denicija): add support for higher frame rates.
35// See http://crbug/webrtc/6355 for more info.
36static const int kFramesPerSecond = 30;
kthelgason4a85abb2016-08-19 01:24:46 -070037
denicija4f15ca52016-10-06 02:32:02 -070038static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
41}
kthelgason4a85abb2016-08-19 01:24:46 -070042
denicija4f15ca52016-10-06 02:32:02 -070043static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) {
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
45}
46
47// Returns filtered array of device formats based on predefined constraints our
48// stack imposes.
49static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats(
50 const AVCaptureDevice *device,
51 int supportedFps) {
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
53 [NSMutableArray array];
54
55 for (AVCaptureDeviceFormat *format in device.formats) {
56 // Filter out subTypes that we currently don't support in the stack
57 FourCharCode mediaSubType =
58 CMFormatDescriptionGetMediaSubType(format.formatDescription);
59 if (!IsMediaSubTypeSupported(mediaSubType)) {
60 continue;
61 }
62
63 // Filter out frame rate ranges that we currently don't support in the stack
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
66 [eligibleDeviceFormats addObject:format];
67 break;
68 }
kthelgason4a85abb2016-08-19 01:24:46 -070069 }
70 }
denicija4f15ca52016-10-06 02:32:02 -070071
72 return [eligibleDeviceFormats copy];
73}
74
75// Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
76static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
77 const AVCaptureDevice *device,
78 const cricket::VideoFormat &videoFormat) {
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
81 GetEligibleDeviceFormats(device, videoFormat.framerate());
82
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
84 CMVideoDimensions dimension =
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
86 FourCharCode mediaSubType =
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
88
89 if (videoFormat.width == dimension.width &&
90 videoFormat.height == dimension.height) {
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
92 // This is the preferred format so no need to wait for better option.
93 return deviceFormat;
94 } else {
95 // This is good candidate, but let's wait for something better.
96 desiredDeviceFormat = deviceFormat;
97 }
98 }
99 }
100
101 return desiredDeviceFormat;
102}
103
104// Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
105// device.
106static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
107 AVCaptureDevice *device) {
108 std::set<cricket::VideoFormat> supportedFormats;
109
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
111 GetEligibleDeviceFormats(device, kFramesPerSecond);
112
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
114 CMVideoDimensions dimension =
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
116 cricket::VideoFormat format = cricket::VideoFormat(
117 dimension.width, dimension.height,
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
119 cricket::FOURCC_NV12);
120 supportedFormats.insert(format);
121 }
122
123 return supportedFormats;
124}
125
126// Sets device format for the provided capture device. Returns YES/NO depending on success.
127// TODO(denicija): When this file is split this static method should be reconsidered.
128// Perhaps adding a category on AVCaptureDevice would be better.
129static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device,
130 AVCaptureSession *session,
131 const cricket::VideoFormat &format) {
132 AVCaptureDeviceFormat *deviceFormat =
133 GetDeviceFormatForVideoFormat(device, format);
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
135
136 NSError *error = nil;
137 BOOL success = YES;
138 [session beginConfiguration];
139 if ([device lockForConfiguration:&error]) {
140 @try {
141 device.activeFormat = deviceFormat;
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
143 } @catch (NSException *exception) {
144 RTCLogError(
145 @"Failed to set active format!\n User info:%@",
146 exception.userInfo);
147 success = NO;
148 }
149
150 [device unlockForConfiguration];
151 } else {
152 RTCLogError(
153 @"Failed to lock device %@. Error: %@",
154 device, error.userInfo);
155 success = NO;
156 }
157 [session commitConfiguration];
158
159 return success;
kthelgason4a85abb2016-08-19 01:24:46 -0700160}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800161
162// This class used to capture frames using AVFoundation APIs on iOS. It is meant
163// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
164// because other webrtc objects own cricket::VideoCapturer, which is not
165// ref counted. To prevent bad behavior we do not expose this class directly.
166@interface RTCAVFoundationVideoCapturerInternal : NSObject
167 <AVCaptureVideoDataOutputSampleBufferDelegate>
168
169@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700170@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -0700171@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800172@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
denicijaedbae5e2016-09-30 00:21:11 -0700173@property(atomic, assign) BOOL isRunning; // Whether the capture session is running.
Zeke Chin52516802016-06-03 11:59:22 -0700174@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800175
176// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
177// when we receive frames. This is safe because this object should be owned by
178// it.
179- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -0700180- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -0700181
denicija4f15ca52016-10-06 02:32:02 -0700182- (nullable AVCaptureDevice *)frontCaptureDevice;
183- (nullable AVCaptureDevice *)backCaptureDevice;
184
tkchin89717aa2016-03-31 17:14:04 -0700185// Starts and stops the capture session asynchronously. We cannot do this
186// synchronously without blocking a WebRTC thread.
187- (void)start;
188- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800189
190@end
191
192@implementation RTCAVFoundationVideoCapturerInternal {
193 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -0700194 AVCaptureDeviceInput *_frontCameraInput;
195 AVCaptureDeviceInput *_backCameraInput;
196 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800197 // The cricket::VideoCapturer that owns this class. Should never be NULL.
198 webrtc::AVFoundationVideoCapturer *_capturer;
magjed2ab012c2016-08-25 03:25:04 -0700199 webrtc::VideoRotation _rotation;
Zeke Chin52516802016-06-03 11:59:22 -0700200 BOOL _hasRetriedOnFatalError;
201 BOOL _isRunning;
202 BOOL _hasStarted;
203 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800204}
205
206@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700207@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700208@synthesize useBackCamera = _useBackCamera;
denicijaedbae5e2016-09-30 00:21:11 -0700209
210@synthesize isRunning = _isRunning;
Zeke Chin52516802016-06-03 11:59:22 -0700211@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800212
tkchin89717aa2016-03-31 17:14:04 -0700213// This is called from the thread that creates the video source, which is likely
214// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800215- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700216 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800217 if (self = [super init]) {
218 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700219 // Create the capture session and all relevant inputs and outputs. We need
220 // to do this in init because the application may want the capture session
221 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
222 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800223 if (![self setupCaptureSession]) {
224 return nil;
225 }
226 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700227#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800228 [center addObserver:self
229 selector:@selector(deviceOrientationDidChange:)
230 name:UIDeviceOrientationDidChangeNotification
231 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700232 [center addObserver:self
233 selector:@selector(handleCaptureSessionInterruption:)
234 name:AVCaptureSessionWasInterruptedNotification
235 object:_captureSession];
236 [center addObserver:self
237 selector:@selector(handleCaptureSessionInterruptionEnded:)
238 name:AVCaptureSessionInterruptionEndedNotification
239 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700240 [center addObserver:self
241 selector:@selector(handleApplicationDidBecomeActive:)
242 name:UIApplicationDidBecomeActiveNotification
243 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700244#endif
Zeke Chin52516802016-06-03 11:59:22 -0700245 [center addObserver:self
246 selector:@selector(handleCaptureSessionRuntimeError:)
247 name:AVCaptureSessionRuntimeErrorNotification
248 object:_captureSession];
249 [center addObserver:self
250 selector:@selector(handleCaptureSessionDidStartRunning:)
251 name:AVCaptureSessionDidStartRunningNotification
252 object:_captureSession];
253 [center addObserver:self
254 selector:@selector(handleCaptureSessionDidStopRunning:)
255 name:AVCaptureSessionDidStopRunningNotification
256 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800257 }
258 return self;
259}
260
261- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700262 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800263 [[NSNotificationCenter defaultCenter] removeObserver:self];
264 _capturer = nullptr;
265}
266
tkchin89717aa2016-03-31 17:14:04 -0700267- (AVCaptureSession *)captureSession {
268 return _captureSession;
269}
270
kthelgason4a85abb2016-08-19 01:24:46 -0700271- (AVCaptureDevice *)getActiveCaptureDevice {
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
273}
274
denicija4f15ca52016-10-06 02:32:02 -0700275- (AVCaptureDevice *)frontCaptureDevice {
276 return _frontCameraInput.device;
277}
278
279- (AVCaptureDevice *)backCaptureDevice {
280 return _backCameraInput.device;
281}
282
Zeke Chin52516802016-06-03 11:59:22 -0700283- (dispatch_queue_t)frameQueue {
284 if (!_frameQueue) {
285 _frameQueue =
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
287 DISPATCH_QUEUE_SERIAL);
288 dispatch_set_target_queue(
289 _frameQueue,
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
291 }
292 return _frameQueue;
293}
294
tkchin89717aa2016-03-31 17:14:04 -0700295// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700296- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700297 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700298}
299
tkchin89717aa2016-03-31 17:14:04 -0700300// Called from any thread (likely main thread).
301- (BOOL)useBackCamera {
302 @synchronized(self) {
303 return _useBackCamera;
304 }
305}
306
307// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800308- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700309 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700310 if (useBackCamera) {
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
312 "not switching.");
313 }
hjona1cf3662016-03-14 20:55:22 -0700314 return;
315 }
tkchin89717aa2016-03-31 17:14:04 -0700316 @synchronized(self) {
317 if (_useBackCamera == useBackCamera) {
318 return;
319 }
320 _useBackCamera = useBackCamera;
321 [self updateSessionInputForUseBackCamera:useBackCamera];
322 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800323}
324
tkchin89717aa2016-03-31 17:14:04 -0700325// Called from WebRTC thread.
326- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700327 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800328 return;
329 }
Zeke Chin52516802016-06-03 11:59:22 -0700330 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
332 block:^{
magjed2ab012c2016-08-25 03:25:04 -0700333#if TARGET_OS_IPHONE
334 // Default to portrait orientation on iPhone. This will be reset in
335 // updateOrientation unless orientation is unknown/faceup/facedown.
336 _rotation = webrtc::kVideoRotation_90;
337#else
338 // No rotation on Mac.
339 _rotation = webrtc::kVideoRotation_0;
340#endif
tkchin89717aa2016-03-31 17:14:04 -0700341 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700342#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700344#endif
tkchin89717aa2016-03-31 17:14:04 -0700345 AVCaptureSession *captureSession = self.captureSession;
346 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800347 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800348}
349
tkchin89717aa2016-03-31 17:14:04 -0700350// Called from same thread as start.
351- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700352 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800353 return;
354 }
Zeke Chin52516802016-06-03 11:59:22 -0700355 self.hasStarted = NO;
356 // Due to this async block, it's possible that the ObjC object outlives the
357 // C++ one. In order to not invoke functions on the C++ object, we set
358 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
360 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
362 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700363#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700365#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800366 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800367}
368
Zeke Chin52516802016-06-03 11:59:22 -0700369#pragma mark iOS notifications
370
adam.fedorfc22e032016-06-08 17:24:37 -0700371#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700372- (void)deviceOrientationDidChange:(NSNotification *)notification {
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
374 block:^{
Zeke Chin52516802016-06-03 11:59:22 -0700375 [self updateOrientation];
376 }];
377}
adam.fedorfc22e032016-06-08 17:24:37 -0700378#endif
Zeke Chin52516802016-06-03 11:59:22 -0700379
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800380#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
381
382- (void)captureOutput:(AVCaptureOutput *)captureOutput
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
384 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700385 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700386 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800387 return;
388 }
magjed2ab012c2016-08-25 03:25:04 -0700389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800390}
391
392- (void)captureOutput:(AVCaptureOutput *)captureOutput
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700394 fromConnection:(AVCaptureConnection *)connection {
395 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800396}
397
Zeke Chin52516802016-06-03 11:59:22 -0700398#pragma mark - AVCaptureSession notifications
399
400- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
401 NSString *reasonString = nil;
kwiberg77eab702016-09-28 17:42:01 -0700402#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
Zeke Chin52516802016-06-03 11:59:22 -0700404 NSNumber *reason =
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
406 if (reason) {
407 switch (reason.intValue) {
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
409 reasonString = @"VideoDeviceNotAvailableInBackground";
410 break;
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
412 reasonString = @"AudioDeviceInUseByAnotherClient";
413 break;
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
415 reasonString = @"VideoDeviceInUseByAnotherClient";
416 break;
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
419 break;
420 }
421 }
422#endif
423 RTCLog(@"Capture session interrupted: %@", reasonString);
424 // TODO(tkchin): Handle this case.
425}
426
427- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
428 RTCLog(@"Capture session interruption ended.");
429 // TODO(tkchin): Handle this case.
430}
431
432- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700433 NSError *error =
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700435 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700436
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
438 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700439#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700440 if (error.code == AVErrorMediaServicesWereReset) {
441 [self handleNonFatalError];
442 } else {
443 [self handleFatalError];
444 }
adam.fedorfc22e032016-06-08 17:24:37 -0700445#else
446 [self handleFatalError];
447#endif
Zeke Chin52516802016-06-03 11:59:22 -0700448 }];
449}
450
451- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
452 RTCLog(@"Capture session started.");
denicijaedbae5e2016-09-30 00:21:11 -0700453
Zeke Chin52516802016-06-03 11:59:22 -0700454 self.isRunning = YES;
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
456 block:^{
457 // If we successfully restarted after an unknown error, allow future
458 // retries on fatal errors.
459 _hasRetriedOnFatalError = NO;
460 }];
461}
462
463- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
464 RTCLog(@"Capture session stopped.");
465 self.isRunning = NO;
466}
467
468- (void)handleFatalError {
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
470 block:^{
471 if (!_hasRetriedOnFatalError) {
472 RTCLogWarning(@"Attempting to recover from fatal capture error.");
473 [self handleNonFatalError];
474 _hasRetriedOnFatalError = YES;
475 } else {
476 RTCLogError(@"Previous fatal error recovery failed.");
477 }
478 }];
479}
480
481- (void)handleNonFatalError {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{
484 if (self.hasStarted) {
485 RTCLog(@"Restarting capture session after error.");
486 [self.captureSession startRunning];
487 }
488 }];
489}
490
tkchin11840252016-08-24 12:05:56 -0700491#if TARGET_OS_IPHONE
492
493#pragma mark - UIApplication notifications
494
495- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
497 block:^{
498 if (self.hasStarted && !self.captureSession.isRunning) {
499 RTCLog(@"Restarting capture session on active.");
500 [self.captureSession startRunning];
501 }
502 }];
503}
504
505#endif // TARGET_OS_IPHONE
506
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800507#pragma mark - Private
508
509- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Kári Tristan Helgason15e4ec32016-09-30 08:56:33 +0200511#if defined(WEBRTC_IOS)
512 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800513#endif
tkchin89717aa2016-03-31 17:14:04 -0700514 // Add the output.
515 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
516 if (![captureSession canAddOutput:videoDataOutput]) {
517 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800518 return NO;
519 }
tkchin89717aa2016-03-31 17:14:04 -0700520 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800521
tkchin89717aa2016-03-31 17:14:04 -0700522 // Get the front and back cameras. If there isn't a front camera
523 // give up.
524 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
525 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
526 if (!frontCameraInput) {
527 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800528 return NO;
529 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800530
531 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700532 if (![captureSession canAddInput:frontCameraInput] ||
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
534 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800535 return NO;
536 }
tkchin89717aa2016-03-31 17:14:04 -0700537 AVCaptureDeviceInput *input = self.useBackCamera ?
538 backCameraInput : frontCameraInput;
539 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700540
tkchin89717aa2016-03-31 17:14:04 -0700541 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800542 return YES;
543}
544
tkchin89717aa2016-03-31 17:14:04 -0700545- (AVCaptureVideoDataOutput *)videoDataOutput {
546 if (!_videoDataOutput) {
547 // Make the capturer output NV12. Ideally we want I420 but that's not
548 // currently supported on iPhone / iPad.
549 AVCaptureVideoDataOutput *videoDataOutput =
550 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700551 videoDataOutput.videoSettings = @{
552 (NSString *)kCVPixelBufferPixelFormatTypeKey :
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
554 };
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700557 _videoDataOutput = videoDataOutput;
558 }
559 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800560}
561
tkchin89717aa2016-03-31 17:14:04 -0700562- (AVCaptureDevice *)videoCaptureDeviceForPosition:
563 (AVCaptureDevicePosition)position {
564 for (AVCaptureDevice *captureDevice in
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
566 if (captureDevice.position == position) {
567 return captureDevice;
568 }
569 }
570 return nil;
571}
572
573- (AVCaptureDeviceInput *)frontCameraInput {
574 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700575#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700576 AVCaptureDevice *frontCameraDevice =
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700578#else
579 AVCaptureDevice *frontCameraDevice =
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
581#endif
tkchin89717aa2016-03-31 17:14:04 -0700582 if (!frontCameraDevice) {
583 RTCLogWarning(@"Failed to find front capture device.");
584 return nil;
585 }
586 NSError *error = nil;
587 AVCaptureDeviceInput *frontCameraInput =
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
589 error:&error];
590 if (!frontCameraInput) {
591 RTCLogError(@"Failed to create front camera input: %@",
592 error.localizedDescription);
593 return nil;
594 }
595 _frontCameraInput = frontCameraInput;
596 }
597 return _frontCameraInput;
598}
599
600- (AVCaptureDeviceInput *)backCameraInput {
601 if (!_backCameraInput) {
602 AVCaptureDevice *backCameraDevice =
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
604 if (!backCameraDevice) {
605 RTCLogWarning(@"Failed to find front capture device.");
606 return nil;
607 }
608 NSError *error = nil;
609 AVCaptureDeviceInput *backCameraInput =
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
611 error:&error];
612 if (!backCameraInput) {
613 RTCLogError(@"Failed to create front camera input: %@",
614 error.localizedDescription);
615 return nil;
616 }
617 _backCameraInput = backCameraInput;
618 }
619 return _backCameraInput;
620}
621
tkchin89717aa2016-03-31 17:14:04 -0700622// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800623- (void)updateOrientation {
adam.fedorfc22e032016-06-08 17:24:37 -0700624#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800625 switch ([UIDevice currentDevice].orientation) {
626 case UIDeviceOrientationPortrait:
magjed2ab012c2016-08-25 03:25:04 -0700627 _rotation = webrtc::kVideoRotation_90;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800628 break;
629 case UIDeviceOrientationPortraitUpsideDown:
magjed2ab012c2016-08-25 03:25:04 -0700630 _rotation = webrtc::kVideoRotation_270;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800631 break;
632 case UIDeviceOrientationLandscapeLeft:
magjed9e31cca2016-10-08 02:57:50 -0700633 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0
634 : webrtc::kVideoRotation_180;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800635 break;
636 case UIDeviceOrientationLandscapeRight:
magjed9e31cca2016-10-08 02:57:50 -0700637 _rotation = _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180
638 : webrtc::kVideoRotation_0;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800639 break;
640 case UIDeviceOrientationFaceUp:
641 case UIDeviceOrientationFaceDown:
642 case UIDeviceOrientationUnknown:
magjed2ab012c2016-08-25 03:25:04 -0700643 // Ignore.
644 break;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800645 }
adam.fedorfc22e032016-06-08 17:24:37 -0700646#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800647}
648
tkchin89717aa2016-03-31 17:14:04 -0700649// Update the current session input to match what's stored in _useBackCamera.
650- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
651 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
652 block:^{
653 [_captureSession beginConfiguration];
654 AVCaptureDeviceInput *oldInput = _backCameraInput;
655 AVCaptureDeviceInput *newInput = _frontCameraInput;
656 if (useBackCamera) {
657 oldInput = _frontCameraInput;
658 newInput = _backCameraInput;
659 }
660 if (oldInput) {
661 // Ok to remove this even if it's not attached. Will be no-op.
662 [_captureSession removeInput:oldInput];
663 }
664 if (newInput) {
665 [_captureSession addInput:newInput];
666 }
667 [self updateOrientation];
denicija4f15ca52016-10-06 02:32:02 -0700668 AVCaptureDevice *newDevice = newInput.device;
669 const cricket::VideoFormat *format = _capturer->GetCaptureFormat();
670 SetFormatForCaptureDevice(newDevice, _captureSession, *format);
tkchin89717aa2016-03-31 17:14:04 -0700671 [_captureSession commitConfiguration];
672 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800673}
674
675@end
676
677namespace webrtc {
678
tkchin89717aa2016-03-31 17:14:04 -0700679enum AVFoundationVideoCapturerMessageType : uint32_t {
680 kMessageTypeFrame,
681};
682
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200683AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800684 _capturer =
685 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700686
denicija4f15ca52016-10-06 02:32:02 -0700687 std::set<cricket::VideoFormat> front_camera_video_formats =
688 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
kthelgason4a85abb2016-08-19 01:24:46 -0700689
denicija4f15ca52016-10-06 02:32:02 -0700690 std::set<cricket::VideoFormat> back_camera_video_formats =
691 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
692
693 std::vector<cricket::VideoFormat> intersection_video_formats;
694 if (back_camera_video_formats.empty()) {
695 intersection_video_formats.assign(front_camera_video_formats.begin(),
696 front_camera_video_formats.end());
697
698 } else if (front_camera_video_formats.empty()) {
699 intersection_video_formats.assign(back_camera_video_formats.begin(),
700 back_camera_video_formats.end());
701 } else {
702 std::set_intersection(
703 front_camera_video_formats.begin(), front_camera_video_formats.end(),
704 back_camera_video_formats.begin(), back_camera_video_formats.end(),
705 std::back_inserter(intersection_video_formats));
kthelgason4a85abb2016-08-19 01:24:46 -0700706 }
denicija4f15ca52016-10-06 02:32:02 -0700707 SetSupportedFormats(intersection_video_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800708}
709
710AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
711 _capturer = nil;
712}
713
714cricket::CaptureState AVFoundationVideoCapturer::Start(
715 const cricket::VideoFormat& format) {
716 if (!_capturer) {
717 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
718 return cricket::CaptureState::CS_FAILED;
719 }
720 if (_capturer.isRunning) {
721 LOG(LS_ERROR) << "The capturer is already running.";
722 return cricket::CaptureState::CS_FAILED;
723 }
kthelgason4a85abb2016-08-19 01:24:46 -0700724
denicija4f15ca52016-10-06 02:32:02 -0700725 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
726 AVCaptureSession* session = _capturer.captureSession;
kthelgason4a85abb2016-08-19 01:24:46 -0700727
denicija4f15ca52016-10-06 02:32:02 -0700728 if (!SetFormatForCaptureDevice(device, session, format)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800729 return cricket::CaptureState::CS_FAILED;
730 }
731
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800732 SetCaptureFormat(&format);
733 // This isn't super accurate because it takes a while for the AVCaptureSession
734 // to spin up, and this call returns async.
735 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700736 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800737 SetCaptureState(cricket::CaptureState::CS_RUNNING);
738
739 return cricket::CaptureState::CS_STARTING;
740}
741
742void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700743 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800744 SetCaptureFormat(NULL);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800745}
746
747bool AVFoundationVideoCapturer::IsRunning() {
748 return _capturer.isRunning;
749}
750
751AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
752 return _capturer.captureSession;
753}
754
hjona1cf3662016-03-14 20:55:22 -0700755bool AVFoundationVideoCapturer::CanUseBackCamera() const {
756 return _capturer.canUseBackCamera;
757}
758
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800759void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
760 _capturer.useBackCamera = useBackCamera;
761}
762
763bool AVFoundationVideoCapturer::GetUseBackCamera() const {
764 return _capturer.useBackCamera;
765}
766
767void AVFoundationVideoCapturer::CaptureSampleBuffer(
magjed5a872452016-10-20 03:34:29 -0700768 CMSampleBufferRef sample_buffer, VideoRotation rotation) {
magjed2ab012c2016-08-25 03:25:04 -0700769 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
770 !CMSampleBufferIsValid(sample_buffer) ||
771 !CMSampleBufferDataIsReady(sample_buffer)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800772 return;
773 }
774
magjed2ab012c2016-08-25 03:25:04 -0700775 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
tkchin89717aa2016-03-31 17:14:04 -0700776 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800777 return;
778 }
779
magjed5a872452016-10-20 03:34:29 -0700780 const int captured_width = CVPixelBufferGetWidth(image_buffer);
781 const int captured_height = CVPixelBufferGetHeight(image_buffer);
magjed39607c92016-07-14 08:12:17 -0700782
783 int adapted_width;
784 int adapted_height;
785 int crop_width;
786 int crop_height;
787 int crop_x;
788 int crop_y;
789 int64_t translated_camera_time_us;
790
791 if (!AdaptFrame(captured_width, captured_height,
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200792 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
magjed39607c92016-07-14 08:12:17 -0700793 rtc::TimeMicros(), &adapted_width, &adapted_height,
794 &crop_width, &crop_height, &crop_x, &crop_y,
795 &translated_camera_time_us)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800796 return;
797 }
798
magjed5a872452016-10-20 03:34:29 -0700799 rtc::scoped_refptr<VideoFrameBuffer> buffer =
800 new rtc::RefCountedObject<CoreVideoFrameBuffer>(
801 image_buffer,
802 adapted_width, adapted_height,
nisseedebf452016-09-15 07:20:40 -0700803 crop_width, crop_height,
magjed5a872452016-10-20 03:34:29 -0700804 crop_x, crop_y);
805
806 // Applying rotation is only supported for legacy reasons and performance is
807 // not critical here.
808 if (apply_rotation() && rotation != kVideoRotation_0) {
809 buffer = buffer->NativeToI420Buffer();
810 rtc::scoped_refptr<I420Buffer> rotated_buffer =
811 (rotation == kVideoRotation_180)
812 ? I420Buffer::Create(adapted_width, adapted_height)
813 : I420Buffer::Create(adapted_height, adapted_width);
814 libyuv::I420Rotate(
815 buffer->DataY(), buffer->StrideY(),
816 buffer->DataU(), buffer->StrideU(),
817 buffer->DataV(), buffer->StrideV(),
818 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
819 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
820 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
821 buffer->width(), buffer->height(),
822 static_cast<libyuv::RotationMode>(rotation));
823 buffer = rotated_buffer;
magjed39607c92016-07-14 08:12:17 -0700824 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800825
nisse09347852016-10-19 00:30:30 -0700826 OnFrame(webrtc::VideoFrame(buffer, rotation, translated_camera_time_us),
magjed39607c92016-07-14 08:12:17 -0700827 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800828}
829
830} // namespace webrtc