blob: 547cc57ca46f3c6c9235b8fac3c860ad9bf623bd [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
magjed2ab012c2016-08-25 03:25:04 -070025#include "libyuv/rotate.h"
26
tkchin9eeb6242016-04-27 01:54:20 -070027#include "webrtc/base/bind.h"
28#include "webrtc/base/checks.h"
nissedda6ec02016-10-07 08:16:54 -070029#include "webrtc/base/logging.h"
tkchin9eeb6242016-04-27 01:54:20 -070030#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070031#include "webrtc/common_video/include/corevideo_frame_buffer.h"
magjed2ab012c2016-08-25 03:25:04 -070032#include "webrtc/common_video/rotation.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080033
denicija4f15ca52016-10-06 02:32:02 -070034// TODO(denicija): add support for higher frame rates.
35// See http://crbug/webrtc/6355 for more info.
36static const int kFramesPerSecond = 30;
kthelgason4a85abb2016-08-19 01:24:46 -070037
denicija4f15ca52016-10-06 02:32:02 -070038static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
39 return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
40 mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
41}
kthelgason4a85abb2016-08-19 01:24:46 -070042
denicija4f15ca52016-10-06 02:32:02 -070043static inline BOOL IsFrameRateWithinRange(int fps, AVFrameRateRange *range) {
44 return range.minFrameRate <= fps && range.maxFrameRate >= fps;
45}
46
47// Returns filtered array of device formats based on predefined constraints our
48// stack imposes.
49static NSArray<AVCaptureDeviceFormat *> *GetEligibleDeviceFormats(
50 const AVCaptureDevice *device,
51 int supportedFps) {
52 NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats =
53 [NSMutableArray array];
54
55 for (AVCaptureDeviceFormat *format in device.formats) {
56 // Filter out subTypes that we currently don't support in the stack
57 FourCharCode mediaSubType =
58 CMFormatDescriptionGetMediaSubType(format.formatDescription);
59 if (!IsMediaSubTypeSupported(mediaSubType)) {
60 continue;
61 }
62
63 // Filter out frame rate ranges that we currently don't support in the stack
64 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
65 if (IsFrameRateWithinRange(supportedFps, frameRateRange)) {
66 [eligibleDeviceFormats addObject:format];
67 break;
68 }
kthelgason4a85abb2016-08-19 01:24:46 -070069 }
70 }
denicija4f15ca52016-10-06 02:32:02 -070071
72 return [eligibleDeviceFormats copy];
73}
74
75// Mapping from cricket::VideoFormat to AVCaptureDeviceFormat.
76static AVCaptureDeviceFormat *GetDeviceFormatForVideoFormat(
77 const AVCaptureDevice *device,
78 const cricket::VideoFormat &videoFormat) {
79 AVCaptureDeviceFormat *desiredDeviceFormat = nil;
80 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
81 GetEligibleDeviceFormats(device, videoFormat.framerate());
82
83 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
84 CMVideoDimensions dimension =
85 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
86 FourCharCode mediaSubType =
87 CMFormatDescriptionGetMediaSubType(deviceFormat.formatDescription);
88
89 if (videoFormat.width == dimension.width &&
90 videoFormat.height == dimension.height) {
91 if (mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
92 // This is the preferred format so no need to wait for better option.
93 return deviceFormat;
94 } else {
95 // This is good candidate, but let's wait for something better.
96 desiredDeviceFormat = deviceFormat;
97 }
98 }
99 }
100
101 return desiredDeviceFormat;
102}
103
104// Mapping from AVCaptureDeviceFormat to cricket::VideoFormat for given input
105// device.
106static std::set<cricket::VideoFormat> GetSupportedVideoFormatsForDevice(
107 AVCaptureDevice *device) {
108 std::set<cricket::VideoFormat> supportedFormats;
109
110 NSArray<AVCaptureDeviceFormat *> *eligibleFormats =
111 GetEligibleDeviceFormats(device, kFramesPerSecond);
112
113 for (AVCaptureDeviceFormat *deviceFormat in eligibleFormats) {
114 CMVideoDimensions dimension =
115 CMVideoFormatDescriptionGetDimensions(deviceFormat.formatDescription);
116 cricket::VideoFormat format = cricket::VideoFormat(
117 dimension.width, dimension.height,
118 cricket::VideoFormat::FpsToInterval(kFramesPerSecond),
119 cricket::FOURCC_NV12);
120 supportedFormats.insert(format);
121 }
122
123 return supportedFormats;
124}
125
126// Sets device format for the provided capture device. Returns YES/NO depending on success.
127// TODO(denicija): When this file is split this static method should be reconsidered.
128// Perhaps adding a category on AVCaptureDevice would be better.
129static BOOL SetFormatForCaptureDevice(AVCaptureDevice *device,
130 AVCaptureSession *session,
131 const cricket::VideoFormat &format) {
132 AVCaptureDeviceFormat *deviceFormat =
133 GetDeviceFormatForVideoFormat(device, format);
134 const int fps = cricket::VideoFormat::IntervalToFps(format.interval);
135
136 NSError *error = nil;
137 BOOL success = YES;
138 [session beginConfiguration];
139 if ([device lockForConfiguration:&error]) {
140 @try {
141 device.activeFormat = deviceFormat;
142 device.activeVideoMinFrameDuration = CMTimeMake(1, fps);
143 } @catch (NSException *exception) {
144 RTCLogError(
145 @"Failed to set active format!\n User info:%@",
146 exception.userInfo);
147 success = NO;
148 }
149
150 [device unlockForConfiguration];
151 } else {
152 RTCLogError(
153 @"Failed to lock device %@. Error: %@",
154 device, error.userInfo);
155 success = NO;
156 }
157 [session commitConfiguration];
158
159 return success;
kthelgason4a85abb2016-08-19 01:24:46 -0700160}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800161
162// This class used to capture frames using AVFoundation APIs on iOS. It is meant
163// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
164// because other webrtc objects own cricket::VideoCapturer, which is not
165// ref counted. To prevent bad behavior we do not expose this class directly.
166@interface RTCAVFoundationVideoCapturerInternal : NSObject
167 <AVCaptureVideoDataOutputSampleBufferDelegate>
168
169@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700170@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -0700171@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800172@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
denicijaedbae5e2016-09-30 00:21:11 -0700173@property(atomic, assign) BOOL isRunning; // Whether the capture session is running.
Zeke Chin52516802016-06-03 11:59:22 -0700174@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800175
176// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
177// when we receive frames. This is safe because this object should be owned by
178// it.
179- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -0700180- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -0700181
denicija4f15ca52016-10-06 02:32:02 -0700182- (nullable AVCaptureDevice *)frontCaptureDevice;
183- (nullable AVCaptureDevice *)backCaptureDevice;
184
tkchin89717aa2016-03-31 17:14:04 -0700185// Starts and stops the capture session asynchronously. We cannot do this
186// synchronously without blocking a WebRTC thread.
187- (void)start;
188- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800189
190@end
191
192@implementation RTCAVFoundationVideoCapturerInternal {
193 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -0700194 AVCaptureDeviceInput *_frontCameraInput;
195 AVCaptureDeviceInput *_backCameraInput;
196 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800197 // The cricket::VideoCapturer that owns this class. Should never be NULL.
198 webrtc::AVFoundationVideoCapturer *_capturer;
magjed2ab012c2016-08-25 03:25:04 -0700199 webrtc::VideoRotation _rotation;
Zeke Chin52516802016-06-03 11:59:22 -0700200 BOOL _hasRetriedOnFatalError;
201 BOOL _isRunning;
202 BOOL _hasStarted;
203 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800204}
205
206@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700207@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700208@synthesize useBackCamera = _useBackCamera;
denicijaedbae5e2016-09-30 00:21:11 -0700209
210@synthesize isRunning = _isRunning;
Zeke Chin52516802016-06-03 11:59:22 -0700211@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800212
tkchin89717aa2016-03-31 17:14:04 -0700213// This is called from the thread that creates the video source, which is likely
214// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800215- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700216 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800217 if (self = [super init]) {
218 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700219 // Create the capture session and all relevant inputs and outputs. We need
220 // to do this in init because the application may want the capture session
221 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
222 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800223 if (![self setupCaptureSession]) {
224 return nil;
225 }
226 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700227#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800228 [center addObserver:self
229 selector:@selector(deviceOrientationDidChange:)
230 name:UIDeviceOrientationDidChangeNotification
231 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700232 [center addObserver:self
233 selector:@selector(handleCaptureSessionInterruption:)
234 name:AVCaptureSessionWasInterruptedNotification
235 object:_captureSession];
236 [center addObserver:self
237 selector:@selector(handleCaptureSessionInterruptionEnded:)
238 name:AVCaptureSessionInterruptionEndedNotification
239 object:_captureSession];
tkchin11840252016-08-24 12:05:56 -0700240 [center addObserver:self
241 selector:@selector(handleApplicationDidBecomeActive:)
242 name:UIApplicationDidBecomeActiveNotification
243 object:[UIApplication sharedApplication]];
adam.fedorfc22e032016-06-08 17:24:37 -0700244#endif
Zeke Chin52516802016-06-03 11:59:22 -0700245 [center addObserver:self
246 selector:@selector(handleCaptureSessionRuntimeError:)
247 name:AVCaptureSessionRuntimeErrorNotification
248 object:_captureSession];
249 [center addObserver:self
250 selector:@selector(handleCaptureSessionDidStartRunning:)
251 name:AVCaptureSessionDidStartRunningNotification
252 object:_captureSession];
253 [center addObserver:self
254 selector:@selector(handleCaptureSessionDidStopRunning:)
255 name:AVCaptureSessionDidStopRunningNotification
256 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800257 }
258 return self;
259}
260
261- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700262 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800263 [[NSNotificationCenter defaultCenter] removeObserver:self];
264 _capturer = nullptr;
265}
266
tkchin89717aa2016-03-31 17:14:04 -0700267- (AVCaptureSession *)captureSession {
268 return _captureSession;
269}
270
kthelgason4a85abb2016-08-19 01:24:46 -0700271- (AVCaptureDevice *)getActiveCaptureDevice {
272 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
273}
274
denicija4f15ca52016-10-06 02:32:02 -0700275- (AVCaptureDevice *)frontCaptureDevice {
276 return _frontCameraInput.device;
277}
278
279- (AVCaptureDevice *)backCaptureDevice {
280 return _backCameraInput.device;
281}
282
Zeke Chin52516802016-06-03 11:59:22 -0700283- (dispatch_queue_t)frameQueue {
284 if (!_frameQueue) {
285 _frameQueue =
286 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
287 DISPATCH_QUEUE_SERIAL);
288 dispatch_set_target_queue(
289 _frameQueue,
290 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
291 }
292 return _frameQueue;
293}
294
tkchin89717aa2016-03-31 17:14:04 -0700295// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700296- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700297 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700298}
299
tkchin89717aa2016-03-31 17:14:04 -0700300// Called from any thread (likely main thread).
301- (BOOL)useBackCamera {
302 @synchronized(self) {
303 return _useBackCamera;
304 }
305}
306
307// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800308- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700309 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700310 if (useBackCamera) {
311 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
312 "not switching.");
313 }
hjona1cf3662016-03-14 20:55:22 -0700314 return;
315 }
tkchin89717aa2016-03-31 17:14:04 -0700316 @synchronized(self) {
317 if (_useBackCamera == useBackCamera) {
318 return;
319 }
320 _useBackCamera = useBackCamera;
321 [self updateSessionInputForUseBackCamera:useBackCamera];
322 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800323}
324
tkchin89717aa2016-03-31 17:14:04 -0700325// Called from WebRTC thread.
326- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700327 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800328 return;
329 }
Zeke Chin52516802016-06-03 11:59:22 -0700330 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
332 block:^{
magjed2ab012c2016-08-25 03:25:04 -0700333#if TARGET_OS_IPHONE
334 // Default to portrait orientation on iPhone. This will be reset in
335 // updateOrientation unless orientation is unknown/faceup/facedown.
336 _rotation = webrtc::kVideoRotation_90;
337#else
338 // No rotation on Mac.
339 _rotation = webrtc::kVideoRotation_0;
340#endif
tkchin89717aa2016-03-31 17:14:04 -0700341 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700342#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700343 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700344#endif
tkchin89717aa2016-03-31 17:14:04 -0700345 AVCaptureSession *captureSession = self.captureSession;
346 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800347 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800348}
349
tkchin89717aa2016-03-31 17:14:04 -0700350// Called from same thread as start.
351- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700352 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800353 return;
354 }
Zeke Chin52516802016-06-03 11:59:22 -0700355 self.hasStarted = NO;
356 // Due to this async block, it's possible that the ObjC object outlives the
357 // C++ one. In order to not invoke functions on the C++ object, we set
358 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
360 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700361 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
362 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700363#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700364 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700365#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800366 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800367}
368
Zeke Chin52516802016-06-03 11:59:22 -0700369#pragma mark iOS notifications
370
adam.fedorfc22e032016-06-08 17:24:37 -0700371#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700372- (void)deviceOrientationDidChange:(NSNotification *)notification {
373 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
374 block:^{
Zeke Chin52516802016-06-03 11:59:22 -0700375 [self updateOrientation];
376 }];
377}
adam.fedorfc22e032016-06-08 17:24:37 -0700378#endif
Zeke Chin52516802016-06-03 11:59:22 -0700379
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800380#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
381
382- (void)captureOutput:(AVCaptureOutput *)captureOutput
383 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
384 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700385 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700386 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800387 return;
388 }
magjed2ab012c2016-08-25 03:25:04 -0700389 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800390}
391
392- (void)captureOutput:(AVCaptureOutput *)captureOutput
393 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700394 fromConnection:(AVCaptureConnection *)connection {
395 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800396}
397
Zeke Chin52516802016-06-03 11:59:22 -0700398#pragma mark - AVCaptureSession notifications
399
400- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
401 NSString *reasonString = nil;
kwiberg77eab702016-09-28 17:42:01 -0700402#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) \
403 && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
Zeke Chin52516802016-06-03 11:59:22 -0700404 NSNumber *reason =
405 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
406 if (reason) {
407 switch (reason.intValue) {
408 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
409 reasonString = @"VideoDeviceNotAvailableInBackground";
410 break;
411 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
412 reasonString = @"AudioDeviceInUseByAnotherClient";
413 break;
414 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
415 reasonString = @"VideoDeviceInUseByAnotherClient";
416 break;
417 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
418 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
419 break;
420 }
421 }
422#endif
423 RTCLog(@"Capture session interrupted: %@", reasonString);
424 // TODO(tkchin): Handle this case.
425}
426
427- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
428 RTCLog(@"Capture session interruption ended.");
429 // TODO(tkchin): Handle this case.
430}
431
432- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700433 NSError *error =
434 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700435 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700436
437 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
438 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700439#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700440 if (error.code == AVErrorMediaServicesWereReset) {
441 [self handleNonFatalError];
442 } else {
443 [self handleFatalError];
444 }
adam.fedorfc22e032016-06-08 17:24:37 -0700445#else
446 [self handleFatalError];
447#endif
Zeke Chin52516802016-06-03 11:59:22 -0700448 }];
449}
450
451- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
452 RTCLog(@"Capture session started.");
denicijaedbae5e2016-09-30 00:21:11 -0700453
Zeke Chin52516802016-06-03 11:59:22 -0700454 self.isRunning = YES;
455 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
456 block:^{
457 // If we successfully restarted after an unknown error, allow future
458 // retries on fatal errors.
459 _hasRetriedOnFatalError = NO;
460 }];
461}
462
463- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
464 RTCLog(@"Capture session stopped.");
465 self.isRunning = NO;
466}
467
468- (void)handleFatalError {
469 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
470 block:^{
471 if (!_hasRetriedOnFatalError) {
472 RTCLogWarning(@"Attempting to recover from fatal capture error.");
473 [self handleNonFatalError];
474 _hasRetriedOnFatalError = YES;
475 } else {
476 RTCLogError(@"Previous fatal error recovery failed.");
477 }
478 }];
479}
480
481- (void)handleNonFatalError {
482 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
483 block:^{
484 if (self.hasStarted) {
485 RTCLog(@"Restarting capture session after error.");
486 [self.captureSession startRunning];
487 }
488 }];
489}
490
tkchin11840252016-08-24 12:05:56 -0700491#if TARGET_OS_IPHONE
492
493#pragma mark - UIApplication notifications
494
495- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
496 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
497 block:^{
498 if (self.hasStarted && !self.captureSession.isRunning) {
499 RTCLog(@"Restarting capture session on active.");
500 [self.captureSession startRunning];
501 }
502 }];
503}
504
505#endif // TARGET_OS_IPHONE
506
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800507#pragma mark - Private
508
509- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700510 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Kári Tristan Helgason15e4ec32016-09-30 08:56:33 +0200511#if defined(WEBRTC_IOS)
512 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800513#endif
tkchin89717aa2016-03-31 17:14:04 -0700514 // Add the output.
515 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
516 if (![captureSession canAddOutput:videoDataOutput]) {
517 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800518 return NO;
519 }
tkchin89717aa2016-03-31 17:14:04 -0700520 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800521
tkchin89717aa2016-03-31 17:14:04 -0700522 // Get the front and back cameras. If there isn't a front camera
523 // give up.
524 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
525 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
526 if (!frontCameraInput) {
527 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800528 return NO;
529 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800530
531 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700532 if (![captureSession canAddInput:frontCameraInput] ||
533 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
534 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800535 return NO;
536 }
tkchin89717aa2016-03-31 17:14:04 -0700537 AVCaptureDeviceInput *input = self.useBackCamera ?
538 backCameraInput : frontCameraInput;
539 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700540
tkchin89717aa2016-03-31 17:14:04 -0700541 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800542 return YES;
543}
544
tkchin89717aa2016-03-31 17:14:04 -0700545- (AVCaptureVideoDataOutput *)videoDataOutput {
546 if (!_videoDataOutput) {
547 // Make the capturer output NV12. Ideally we want I420 but that's not
548 // currently supported on iPhone / iPad.
549 AVCaptureVideoDataOutput *videoDataOutput =
550 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700551 videoDataOutput.videoSettings = @{
552 (NSString *)kCVPixelBufferPixelFormatTypeKey :
553 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
554 };
555 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700556 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700557 _videoDataOutput = videoDataOutput;
558 }
559 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800560}
561
tkchin89717aa2016-03-31 17:14:04 -0700562- (AVCaptureDevice *)videoCaptureDeviceForPosition:
563 (AVCaptureDevicePosition)position {
564 for (AVCaptureDevice *captureDevice in
565 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
566 if (captureDevice.position == position) {
567 return captureDevice;
568 }
569 }
570 return nil;
571}
572
573- (AVCaptureDeviceInput *)frontCameraInput {
574 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700575#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700576 AVCaptureDevice *frontCameraDevice =
577 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700578#else
579 AVCaptureDevice *frontCameraDevice =
580 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
581#endif
tkchin89717aa2016-03-31 17:14:04 -0700582 if (!frontCameraDevice) {
583 RTCLogWarning(@"Failed to find front capture device.");
584 return nil;
585 }
586 NSError *error = nil;
587 AVCaptureDeviceInput *frontCameraInput =
588 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
589 error:&error];
590 if (!frontCameraInput) {
591 RTCLogError(@"Failed to create front camera input: %@",
592 error.localizedDescription);
593 return nil;
594 }
595 _frontCameraInput = frontCameraInput;
596 }
597 return _frontCameraInput;
598}
599
600- (AVCaptureDeviceInput *)backCameraInput {
601 if (!_backCameraInput) {
602 AVCaptureDevice *backCameraDevice =
603 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
604 if (!backCameraDevice) {
605 RTCLogWarning(@"Failed to find front capture device.");
606 return nil;
607 }
608 NSError *error = nil;
609 AVCaptureDeviceInput *backCameraInput =
610 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
611 error:&error];
612 if (!backCameraInput) {
613 RTCLogError(@"Failed to create front camera input: %@",
614 error.localizedDescription);
615 return nil;
616 }
617 _backCameraInput = backCameraInput;
618 }
619 return _backCameraInput;
620}
621
tkchin89717aa2016-03-31 17:14:04 -0700622// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800623- (void)updateOrientation {
adam.fedorfc22e032016-06-08 17:24:37 -0700624#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800625 switch ([UIDevice currentDevice].orientation) {
626 case UIDeviceOrientationPortrait:
magjed2ab012c2016-08-25 03:25:04 -0700627 _rotation = webrtc::kVideoRotation_90;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800628 break;
629 case UIDeviceOrientationPortraitUpsideDown:
magjed2ab012c2016-08-25 03:25:04 -0700630 _rotation = webrtc::kVideoRotation_270;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800631 break;
632 case UIDeviceOrientationLandscapeLeft:
magjed2ab012c2016-08-25 03:25:04 -0700633 _rotation = webrtc::kVideoRotation_180;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800634 break;
635 case UIDeviceOrientationLandscapeRight:
magjed2ab012c2016-08-25 03:25:04 -0700636 _rotation = webrtc::kVideoRotation_0;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800637 break;
638 case UIDeviceOrientationFaceUp:
639 case UIDeviceOrientationFaceDown:
640 case UIDeviceOrientationUnknown:
magjed2ab012c2016-08-25 03:25:04 -0700641 // Ignore.
642 break;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800643 }
adam.fedorfc22e032016-06-08 17:24:37 -0700644#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800645}
646
tkchin89717aa2016-03-31 17:14:04 -0700647// Update the current session input to match what's stored in _useBackCamera.
648- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
649 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
650 block:^{
651 [_captureSession beginConfiguration];
652 AVCaptureDeviceInput *oldInput = _backCameraInput;
653 AVCaptureDeviceInput *newInput = _frontCameraInput;
654 if (useBackCamera) {
655 oldInput = _frontCameraInput;
656 newInput = _backCameraInput;
657 }
658 if (oldInput) {
659 // Ok to remove this even if it's not attached. Will be no-op.
660 [_captureSession removeInput:oldInput];
661 }
662 if (newInput) {
663 [_captureSession addInput:newInput];
664 }
665 [self updateOrientation];
denicija4f15ca52016-10-06 02:32:02 -0700666 AVCaptureDevice *newDevice = newInput.device;
667 const cricket::VideoFormat *format = _capturer->GetCaptureFormat();
668 SetFormatForCaptureDevice(newDevice, _captureSession, *format);
tkchin89717aa2016-03-31 17:14:04 -0700669 [_captureSession commitConfiguration];
670 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800671}
672
673@end
674
675namespace webrtc {
676
tkchin89717aa2016-03-31 17:14:04 -0700677enum AVFoundationVideoCapturerMessageType : uint32_t {
678 kMessageTypeFrame,
679};
680
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200681AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800682 _capturer =
683 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700684
denicija4f15ca52016-10-06 02:32:02 -0700685 std::set<cricket::VideoFormat> front_camera_video_formats =
686 GetSupportedVideoFormatsForDevice([_capturer frontCaptureDevice]);
kthelgason4a85abb2016-08-19 01:24:46 -0700687
denicija4f15ca52016-10-06 02:32:02 -0700688 std::set<cricket::VideoFormat> back_camera_video_formats =
689 GetSupportedVideoFormatsForDevice([_capturer backCaptureDevice]);
690
691 std::vector<cricket::VideoFormat> intersection_video_formats;
692 if (back_camera_video_formats.empty()) {
693 intersection_video_formats.assign(front_camera_video_formats.begin(),
694 front_camera_video_formats.end());
695
696 } else if (front_camera_video_formats.empty()) {
697 intersection_video_formats.assign(back_camera_video_formats.begin(),
698 back_camera_video_formats.end());
699 } else {
700 std::set_intersection(
701 front_camera_video_formats.begin(), front_camera_video_formats.end(),
702 back_camera_video_formats.begin(), back_camera_video_formats.end(),
703 std::back_inserter(intersection_video_formats));
kthelgason4a85abb2016-08-19 01:24:46 -0700704 }
denicija4f15ca52016-10-06 02:32:02 -0700705 SetSupportedFormats(intersection_video_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800706}
707
708AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
709 _capturer = nil;
710}
711
712cricket::CaptureState AVFoundationVideoCapturer::Start(
713 const cricket::VideoFormat& format) {
714 if (!_capturer) {
715 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
716 return cricket::CaptureState::CS_FAILED;
717 }
718 if (_capturer.isRunning) {
719 LOG(LS_ERROR) << "The capturer is already running.";
720 return cricket::CaptureState::CS_FAILED;
721 }
kthelgason4a85abb2016-08-19 01:24:46 -0700722
denicija4f15ca52016-10-06 02:32:02 -0700723 AVCaptureDevice* device = [_capturer getActiveCaptureDevice];
724 AVCaptureSession* session = _capturer.captureSession;
kthelgason4a85abb2016-08-19 01:24:46 -0700725
denicija4f15ca52016-10-06 02:32:02 -0700726 if (!SetFormatForCaptureDevice(device, session, format)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800727 return cricket::CaptureState::CS_FAILED;
728 }
729
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800730 SetCaptureFormat(&format);
731 // This isn't super accurate because it takes a while for the AVCaptureSession
732 // to spin up, and this call returns async.
733 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700734 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800735 SetCaptureState(cricket::CaptureState::CS_RUNNING);
736
737 return cricket::CaptureState::CS_STARTING;
738}
739
740void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700741 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800742 SetCaptureFormat(NULL);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800743}
744
745bool AVFoundationVideoCapturer::IsRunning() {
746 return _capturer.isRunning;
747}
748
749AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
750 return _capturer.captureSession;
751}
752
hjona1cf3662016-03-14 20:55:22 -0700753bool AVFoundationVideoCapturer::CanUseBackCamera() const {
754 return _capturer.canUseBackCamera;
755}
756
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800757void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
758 _capturer.useBackCamera = useBackCamera;
759}
760
761bool AVFoundationVideoCapturer::GetUseBackCamera() const {
762 return _capturer.useBackCamera;
763}
764
765void AVFoundationVideoCapturer::CaptureSampleBuffer(
magjed2ab012c2016-08-25 03:25:04 -0700766 CMSampleBufferRef sample_buffer, webrtc::VideoRotation rotation) {
767 if (CMSampleBufferGetNumSamples(sample_buffer) != 1 ||
768 !CMSampleBufferIsValid(sample_buffer) ||
769 !CMSampleBufferDataIsReady(sample_buffer)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800770 return;
771 }
772
magjed2ab012c2016-08-25 03:25:04 -0700773 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
tkchin89717aa2016-03-31 17:14:04 -0700774 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800775 return;
776 }
777
magjed39607c92016-07-14 08:12:17 -0700778 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
779 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
780
781 const int captured_width = buffer->width();
782 const int captured_height = buffer->height();
783
784 int adapted_width;
785 int adapted_height;
786 int crop_width;
787 int crop_height;
788 int crop_x;
789 int crop_y;
790 int64_t translated_camera_time_us;
791
792 if (!AdaptFrame(captured_width, captured_height,
Magnus Jedvert0bade0d2016-09-01 15:15:00 +0200793 rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
magjed39607c92016-07-14 08:12:17 -0700794 rtc::TimeMicros(), &adapted_width, &adapted_height,
795 &crop_width, &crop_height, &crop_x, &crop_y,
796 &translated_camera_time_us)) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800797 return;
798 }
799
magjed39607c92016-07-14 08:12:17 -0700800 if (adapted_width != captured_width || crop_width != captured_width ||
magjed2ab012c2016-08-25 03:25:04 -0700801 adapted_height != captured_height || crop_height != captured_height ||
802 (apply_rotation() && rotation != webrtc::kVideoRotation_0)) {
magjed39607c92016-07-14 08:12:17 -0700803 // TODO(magjed): Avoid converting to I420.
804 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
805 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
806 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
807 crop_y, crop_width, crop_height);
magjed2ab012c2016-08-25 03:25:04 -0700808 if (!apply_rotation() || rotation == webrtc::kVideoRotation_0) {
809 buffer = scaled_buffer;
810 } else {
811 // Applying rotation is only supported for legacy reasons and performance
812 // is not critical here.
nisseedebf452016-09-15 07:20:40 -0700813 rtc::scoped_refptr<webrtc::I420Buffer> rotated_buffer(
814 (rotation == webrtc::kVideoRotation_180)
815 ? I420Buffer::Create(adapted_width, adapted_height)
816 : I420Buffer::Create(adapted_height, adapted_width));
817 libyuv::I420Rotate(
818 scaled_buffer->DataY(), scaled_buffer->StrideY(),
819 scaled_buffer->DataU(), scaled_buffer->StrideU(),
820 scaled_buffer->DataV(), scaled_buffer->StrideV(),
821 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(),
822 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(),
823 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(),
824 crop_width, crop_height,
825 static_cast<libyuv::RotationMode>(rotation));
826 buffer = rotated_buffer;
magjed2ab012c2016-08-25 03:25:04 -0700827 }
magjed39607c92016-07-14 08:12:17 -0700828 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800829
magjed2ab012c2016-08-25 03:25:04 -0700830 OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700831 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700832 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800833}
834
835} // namespace webrtc