blob: 810bc622ac32e785dacbce3f1fdde31b61c301e6 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
25#include "webrtc/base/bind.h"
26#include "webrtc/base/checks.h"
27#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070028#include "webrtc/common_video/include/corevideo_frame_buffer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080029
30// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070031static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080032static cricket::VideoFormat const kDefaultFormat =
33 cricket::VideoFormat(640,
34 480,
35 cricket::VideoFormat::FpsToInterval(30),
36 cricket::FOURCC_NV12);
tkchind7629102016-07-28 14:52:55 -070037// iPhone4S is too slow to handle 30fps.
38static cricket::VideoFormat const kIPhone4SFormat =
39 cricket::VideoFormat(640,
40 480,
41 cricket::VideoFormat::FpsToInterval(15),
42 cricket::FOURCC_NV12);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080043
44// This class used to capture frames using AVFoundation APIs on iOS. It is meant
45// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
46// because other webrtc objects own cricket::VideoCapturer, which is not
47// ref counted. To prevent bad behavior we do not expose this class directly.
48@interface RTCAVFoundationVideoCapturerInternal : NSObject
49 <AVCaptureVideoDataOutputSampleBufferDelegate>
50
51@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070052@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070053@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080054@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070055@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
56@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080057
58// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
59// when we receive frames. This is safe because this object should be owned by
60// it.
61- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070062
63// Starts and stops the capture session asynchronously. We cannot do this
64// synchronously without blocking a WebRTC thread.
65- (void)start;
66- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080067
68@end
69
70@implementation RTCAVFoundationVideoCapturerInternal {
71 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070072 AVCaptureDeviceInput *_frontCameraInput;
73 AVCaptureDeviceInput *_backCameraInput;
74 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080075 // The cricket::VideoCapturer that owns this class. Should never be NULL.
76 webrtc::AVFoundationVideoCapturer *_capturer;
77 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -070078 BOOL _hasRetriedOnFatalError;
79 BOOL _isRunning;
80 BOOL _hasStarted;
81 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080082}
83
84@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070085@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -070086@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -070087@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080088
tkchin89717aa2016-03-31 17:14:04 -070089// This is called from the thread that creates the video source, which is likely
90// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080091- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070092 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080093 if (self = [super init]) {
94 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070095 // Create the capture session and all relevant inputs and outputs. We need
96 // to do this in init because the application may want the capture session
97 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
98 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080099 if (![self setupCaptureSession]) {
100 return nil;
101 }
102 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700103#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800104 [center addObserver:self
105 selector:@selector(deviceOrientationDidChange:)
106 name:UIDeviceOrientationDidChangeNotification
107 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700108 [center addObserver:self
109 selector:@selector(handleCaptureSessionInterruption:)
110 name:AVCaptureSessionWasInterruptedNotification
111 object:_captureSession];
112 [center addObserver:self
113 selector:@selector(handleCaptureSessionInterruptionEnded:)
114 name:AVCaptureSessionInterruptionEndedNotification
115 object:_captureSession];
adam.fedorfc22e032016-06-08 17:24:37 -0700116#endif
Zeke Chin52516802016-06-03 11:59:22 -0700117 [center addObserver:self
118 selector:@selector(handleCaptureSessionRuntimeError:)
119 name:AVCaptureSessionRuntimeErrorNotification
120 object:_captureSession];
121 [center addObserver:self
122 selector:@selector(handleCaptureSessionDidStartRunning:)
123 name:AVCaptureSessionDidStartRunningNotification
124 object:_captureSession];
125 [center addObserver:self
126 selector:@selector(handleCaptureSessionDidStopRunning:)
127 name:AVCaptureSessionDidStopRunningNotification
128 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800129 }
130 return self;
131}
132
133- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700134 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800135 [[NSNotificationCenter defaultCenter] removeObserver:self];
136 _capturer = nullptr;
137}
138
tkchin89717aa2016-03-31 17:14:04 -0700139- (AVCaptureSession *)captureSession {
140 return _captureSession;
141}
142
Zeke Chin52516802016-06-03 11:59:22 -0700143- (dispatch_queue_t)frameQueue {
144 if (!_frameQueue) {
145 _frameQueue =
146 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
147 DISPATCH_QUEUE_SERIAL);
148 dispatch_set_target_queue(
149 _frameQueue,
150 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
151 }
152 return _frameQueue;
153}
154
tkchin89717aa2016-03-31 17:14:04 -0700155// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700156- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700157 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700158}
159
tkchin89717aa2016-03-31 17:14:04 -0700160// Called from any thread (likely main thread).
161- (BOOL)useBackCamera {
162 @synchronized(self) {
163 return _useBackCamera;
164 }
165}
166
167// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800168- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700169 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700170 if (useBackCamera) {
171 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
172 "not switching.");
173 }
hjona1cf3662016-03-14 20:55:22 -0700174 return;
175 }
tkchin89717aa2016-03-31 17:14:04 -0700176 @synchronized(self) {
177 if (_useBackCamera == useBackCamera) {
178 return;
179 }
180 _useBackCamera = useBackCamera;
181 [self updateSessionInputForUseBackCamera:useBackCamera];
182 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800183}
184
Zeke Chin52516802016-06-03 11:59:22 -0700185- (BOOL)isRunning {
186 rtc::CritScope cs(&_crit);
187 return _isRunning;
188}
189
190- (void)setIsRunning:(BOOL)isRunning {
191 rtc::CritScope cs(&_crit);
192 _isRunning = isRunning;
193}
194
tkchin89717aa2016-03-31 17:14:04 -0700195// Called from WebRTC thread.
196- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700197 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800198 return;
199 }
Zeke Chin52516802016-06-03 11:59:22 -0700200 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800201 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
202 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700203 _orientationHasChanged = NO;
204 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700205#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700206 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700207#endif
tkchin89717aa2016-03-31 17:14:04 -0700208 AVCaptureSession *captureSession = self.captureSession;
209 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800210 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800211}
212
tkchin89717aa2016-03-31 17:14:04 -0700213// Called from same thread as start.
214- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700215 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800216 return;
217 }
Zeke Chin52516802016-06-03 11:59:22 -0700218 self.hasStarted = NO;
219 // Due to this async block, it's possible that the ObjC object outlives the
220 // C++ one. In order to not invoke functions on the C++ object, we set
221 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800222 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
223 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700224 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
225 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700226#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700227 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700228#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800229 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800230}
231
Zeke Chin52516802016-06-03 11:59:22 -0700232#pragma mark iOS notifications
233
adam.fedorfc22e032016-06-08 17:24:37 -0700234#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700235- (void)deviceOrientationDidChange:(NSNotification *)notification {
236 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
237 block:^{
238 _orientationHasChanged = YES;
239 [self updateOrientation];
240 }];
241}
adam.fedorfc22e032016-06-08 17:24:37 -0700242#endif
Zeke Chin52516802016-06-03 11:59:22 -0700243
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800244#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
245
246- (void)captureOutput:(AVCaptureOutput *)captureOutput
247 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
248 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700249 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700250 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800251 return;
252 }
253 _capturer->CaptureSampleBuffer(sampleBuffer);
254}
255
256- (void)captureOutput:(AVCaptureOutput *)captureOutput
257 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700258 fromConnection:(AVCaptureConnection *)connection {
259 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800260}
261
Zeke Chin52516802016-06-03 11:59:22 -0700262#pragma mark - AVCaptureSession notifications
263
264- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
265 NSString *reasonString = nil;
266#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
267 NSNumber *reason =
268 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
269 if (reason) {
270 switch (reason.intValue) {
271 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
272 reasonString = @"VideoDeviceNotAvailableInBackground";
273 break;
274 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
275 reasonString = @"AudioDeviceInUseByAnotherClient";
276 break;
277 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
278 reasonString = @"VideoDeviceInUseByAnotherClient";
279 break;
280 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
281 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
282 break;
283 }
284 }
285#endif
286 RTCLog(@"Capture session interrupted: %@", reasonString);
287 // TODO(tkchin): Handle this case.
288}
289
290- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
291 RTCLog(@"Capture session interruption ended.");
292 // TODO(tkchin): Handle this case.
293}
294
295- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700296 NSError *error =
297 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
Zeke Chin52516802016-06-03 11:59:22 -0700298 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
299
300 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
301 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700302#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700303 if (error.code == AVErrorMediaServicesWereReset) {
304 [self handleNonFatalError];
305 } else {
306 [self handleFatalError];
307 }
adam.fedorfc22e032016-06-08 17:24:37 -0700308#else
309 [self handleFatalError];
310#endif
Zeke Chin52516802016-06-03 11:59:22 -0700311 }];
312}
313
314- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
315 RTCLog(@"Capture session started.");
316 self.isRunning = YES;
317 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
318 block:^{
319 // If we successfully restarted after an unknown error, allow future
320 // retries on fatal errors.
321 _hasRetriedOnFatalError = NO;
322 }];
323}
324
325- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
326 RTCLog(@"Capture session stopped.");
327 self.isRunning = NO;
328}
329
330- (void)handleFatalError {
331 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
332 block:^{
333 if (!_hasRetriedOnFatalError) {
334 RTCLogWarning(@"Attempting to recover from fatal capture error.");
335 [self handleNonFatalError];
336 _hasRetriedOnFatalError = YES;
337 } else {
338 RTCLogError(@"Previous fatal error recovery failed.");
339 }
340 }];
341}
342
343- (void)handleNonFatalError {
344 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
345 block:^{
346 if (self.hasStarted) {
347 RTCLog(@"Restarting capture session after error.");
348 [self.captureSession startRunning];
349 }
350 }];
351}
352
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800353#pragma mark - Private
354
355- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700356 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800357#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
358 NSString *version = [[UIDevice currentDevice] systemVersion];
359 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700360 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800361 }
362#endif
tkchin89717aa2016-03-31 17:14:04 -0700363 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
364 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800365 return NO;
366 }
tkchin89717aa2016-03-31 17:14:04 -0700367 captureSession.sessionPreset = kDefaultPreset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800368
tkchin89717aa2016-03-31 17:14:04 -0700369 // Add the output.
370 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
371 if (![captureSession canAddOutput:videoDataOutput]) {
372 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800373 return NO;
374 }
tkchin89717aa2016-03-31 17:14:04 -0700375 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800376
tkchin89717aa2016-03-31 17:14:04 -0700377 // Get the front and back cameras. If there isn't a front camera
378 // give up.
379 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
380 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
381 if (!frontCameraInput) {
382 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800383 return NO;
384 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800385
386 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700387 if (![captureSession canAddInput:frontCameraInput] ||
388 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
389 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800390 return NO;
391 }
tkchin89717aa2016-03-31 17:14:04 -0700392 AVCaptureDeviceInput *input = self.useBackCamera ?
393 backCameraInput : frontCameraInput;
394 [captureSession addInput:input];
tkchind7629102016-07-28 14:52:55 -0700395#if TARGET_OS_IPHONE
396 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
397 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:input.device];
398 }
399#endif
tkchin89717aa2016-03-31 17:14:04 -0700400 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800401 return YES;
402}
403
tkchin89717aa2016-03-31 17:14:04 -0700404- (AVCaptureVideoDataOutput *)videoDataOutput {
405 if (!_videoDataOutput) {
406 // Make the capturer output NV12. Ideally we want I420 but that's not
407 // currently supported on iPhone / iPad.
408 AVCaptureVideoDataOutput *videoDataOutput =
409 [[AVCaptureVideoDataOutput alloc] init];
410 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
411 videoDataOutput.videoSettings = @{
412 (NSString *)kCVPixelBufferPixelFormatTypeKey :
413 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
414 };
415 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700416 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700417 _videoDataOutput = videoDataOutput;
418 }
419 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800420}
421
tkchin89717aa2016-03-31 17:14:04 -0700422- (AVCaptureDevice *)videoCaptureDeviceForPosition:
423 (AVCaptureDevicePosition)position {
424 for (AVCaptureDevice *captureDevice in
425 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
426 if (captureDevice.position == position) {
427 return captureDevice;
428 }
429 }
430 return nil;
431}
432
433- (AVCaptureDeviceInput *)frontCameraInput {
434 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700435#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700436 AVCaptureDevice *frontCameraDevice =
437 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700438#else
439 AVCaptureDevice *frontCameraDevice =
440 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
441#endif
tkchin89717aa2016-03-31 17:14:04 -0700442 if (!frontCameraDevice) {
443 RTCLogWarning(@"Failed to find front capture device.");
444 return nil;
445 }
446 NSError *error = nil;
447 AVCaptureDeviceInput *frontCameraInput =
448 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
449 error:&error];
450 if (!frontCameraInput) {
451 RTCLogError(@"Failed to create front camera input: %@",
452 error.localizedDescription);
453 return nil;
454 }
455 _frontCameraInput = frontCameraInput;
456 }
457 return _frontCameraInput;
458}
459
460- (AVCaptureDeviceInput *)backCameraInput {
461 if (!_backCameraInput) {
462 AVCaptureDevice *backCameraDevice =
463 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
464 if (!backCameraDevice) {
465 RTCLogWarning(@"Failed to find front capture device.");
466 return nil;
467 }
468 NSError *error = nil;
469 AVCaptureDeviceInput *backCameraInput =
470 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
471 error:&error];
472 if (!backCameraInput) {
473 RTCLogError(@"Failed to create front camera input: %@",
474 error.localizedDescription);
475 return nil;
476 }
477 _backCameraInput = backCameraInput;
478 }
479 return _backCameraInput;
480}
481
tkchind7629102016-07-28 14:52:55 -0700482- (void)setMinFrameDuration:(CMTime)minFrameDuration
483 forDevice:(AVCaptureDevice *)device {
484 NSError *error = nil;
485 if (![device lockForConfiguration:&error]) {
486 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
487 return;
488 }
489 device.activeVideoMinFrameDuration = minFrameDuration;
490 [device unlockForConfiguration];
491}
492
tkchin89717aa2016-03-31 17:14:04 -0700493// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800494- (void)updateOrientation {
495 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700496 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800497 if (!connection.supportsVideoOrientation) {
498 // TODO(tkchin): set rotation bit on frames.
499 return;
500 }
adam.fedorfc22e032016-06-08 17:24:37 -0700501#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800502 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
503 switch ([UIDevice currentDevice].orientation) {
504 case UIDeviceOrientationPortrait:
505 orientation = AVCaptureVideoOrientationPortrait;
506 break;
507 case UIDeviceOrientationPortraitUpsideDown:
508 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
509 break;
510 case UIDeviceOrientationLandscapeLeft:
511 orientation = AVCaptureVideoOrientationLandscapeRight;
512 break;
513 case UIDeviceOrientationLandscapeRight:
514 orientation = AVCaptureVideoOrientationLandscapeLeft;
515 break;
516 case UIDeviceOrientationFaceUp:
517 case UIDeviceOrientationFaceDown:
518 case UIDeviceOrientationUnknown:
519 if (!_orientationHasChanged) {
520 connection.videoOrientation = orientation;
521 }
522 return;
523 }
524 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700525#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800526}
527
tkchin89717aa2016-03-31 17:14:04 -0700528// Update the current session input to match what's stored in _useBackCamera.
529- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
530 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
531 block:^{
532 [_captureSession beginConfiguration];
533 AVCaptureDeviceInput *oldInput = _backCameraInput;
534 AVCaptureDeviceInput *newInput = _frontCameraInput;
535 if (useBackCamera) {
536 oldInput = _frontCameraInput;
537 newInput = _backCameraInput;
538 }
539 if (oldInput) {
540 // Ok to remove this even if it's not attached. Will be no-op.
541 [_captureSession removeInput:oldInput];
542 }
543 if (newInput) {
544 [_captureSession addInput:newInput];
545 }
546 [self updateOrientation];
547 [_captureSession commitConfiguration];
tkchind7629102016-07-28 14:52:55 -0700548#if TARGET_OS_IPHONE
549 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
550 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:newInput.device];
551 }
552#endif
tkchin89717aa2016-03-31 17:14:04 -0700553 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800554}
555
556@end
557
558namespace webrtc {
559
tkchin89717aa2016-03-31 17:14:04 -0700560enum AVFoundationVideoCapturerMessageType : uint32_t {
561 kMessageTypeFrame,
562};
563
564struct AVFoundationFrame {
565 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
566 : image_buffer(buffer), capture_time(time) {}
567 CVImageBufferRef image_buffer;
568 int64_t capture_time;
569};
570
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800571AVFoundationVideoCapturer::AVFoundationVideoCapturer()
572 : _capturer(nil), _startThread(nullptr) {
573 // Set our supported formats. This matches kDefaultPreset.
tkchind7629102016-07-28 14:52:55 -0700574 std::vector<cricket::VideoFormat> supported_formats;
575#if TARGET_OS_IPHONE
576 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
577 supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat));
578 } else {
579 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
580 }
581#else
582 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
583#endif
584 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800585 _capturer =
586 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
587}
588
589AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
590 _capturer = nil;
591}
592
593cricket::CaptureState AVFoundationVideoCapturer::Start(
594 const cricket::VideoFormat& format) {
595 if (!_capturer) {
596 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
597 return cricket::CaptureState::CS_FAILED;
598 }
599 if (_capturer.isRunning) {
600 LOG(LS_ERROR) << "The capturer is already running.";
601 return cricket::CaptureState::CS_FAILED;
602 }
tkchind7629102016-07-28 14:52:55 -0700603 if (format != kDefaultFormat && format != kIPhone4SFormat) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800604 LOG(LS_ERROR) << "Unsupported format provided.";
605 return cricket::CaptureState::CS_FAILED;
606 }
607
608 // Keep track of which thread capture started on. This is the thread that
609 // frames need to be sent to.
610 RTC_DCHECK(!_startThread);
611 _startThread = rtc::Thread::Current();
612
613 SetCaptureFormat(&format);
614 // This isn't super accurate because it takes a while for the AVCaptureSession
615 // to spin up, and this call returns async.
616 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700617 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800618 SetCaptureState(cricket::CaptureState::CS_RUNNING);
619
620 return cricket::CaptureState::CS_STARTING;
621}
622
623void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700624 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800625 SetCaptureFormat(NULL);
626 _startThread = nullptr;
627}
628
629bool AVFoundationVideoCapturer::IsRunning() {
630 return _capturer.isRunning;
631}
632
633AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
634 return _capturer.captureSession;
635}
636
hjona1cf3662016-03-14 20:55:22 -0700637bool AVFoundationVideoCapturer::CanUseBackCamera() const {
638 return _capturer.canUseBackCamera;
639}
640
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800641void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
642 _capturer.useBackCamera = useBackCamera;
643}
644
645bool AVFoundationVideoCapturer::GetUseBackCamera() const {
646 return _capturer.useBackCamera;
647}
648
649void AVFoundationVideoCapturer::CaptureSampleBuffer(
650 CMSampleBufferRef sampleBuffer) {
651 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
652 !CMSampleBufferIsValid(sampleBuffer) ||
653 !CMSampleBufferDataIsReady(sampleBuffer)) {
654 return;
655 }
656
tkchin89717aa2016-03-31 17:14:04 -0700657 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
658 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800659 return;
660 }
661
tkchin89717aa2016-03-31 17:14:04 -0700662 // Retain the buffer and post it to the webrtc thread. It will be released
663 // after it has successfully been signaled.
664 CVBufferRetain(image_buffer);
665 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700666 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700667 new rtc::TypedMessageData<AVFoundationFrame>(frame));
668}
669
670void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
671 switch (msg->message_id) {
672 case kMessageTypeFrame: {
673 rtc::TypedMessageData<AVFoundationFrame>* data =
674 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
675 const AVFoundationFrame& frame = data->data();
676 OnFrameMessage(frame.image_buffer, frame.capture_time);
677 delete data;
678 break;
679 }
680 }
681}
682
683void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
magjed39607c92016-07-14 08:12:17 -0700684 int64_t capture_time_ns) {
tkchin89717aa2016-03-31 17:14:04 -0700685 RTC_DCHECK(_startThread->IsCurrent());
686
magjed39607c92016-07-14 08:12:17 -0700687 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
688 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
689
690 const int captured_width = buffer->width();
691 const int captured_height = buffer->height();
692
693 int adapted_width;
694 int adapted_height;
695 int crop_width;
696 int crop_height;
697 int crop_x;
698 int crop_y;
699 int64_t translated_camera_time_us;
700
701 if (!AdaptFrame(captured_width, captured_height,
702 capture_time_ns / rtc::kNumNanosecsPerMicrosec,
703 rtc::TimeMicros(), &adapted_width, &adapted_height,
704 &crop_width, &crop_height, &crop_x, &crop_y,
705 &translated_camera_time_us)) {
706 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800707 return;
708 }
709
magjed39607c92016-07-14 08:12:17 -0700710 if (adapted_width != captured_width || crop_width != captured_width ||
711 adapted_height != captured_height || crop_height != captured_height) {
712 // TODO(magjed): Avoid converting to I420.
713 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
714 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
715 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
716 crop_y, crop_width, crop_height);
717 buffer = scaled_buffer;
718 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800719
magjed39607c92016-07-14 08:12:17 -0700720 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
721 translated_camera_time_us),
722 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800723
tkchin89717aa2016-03-31 17:14:04 -0700724 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800725}
726
727} // namespace webrtc