blob: 1915538f240c242ec392b3a5a6984016af66aa4c [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
25#include "webrtc/base/bind.h"
26#include "webrtc/base/checks.h"
27#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070028#include "webrtc/common_video/include/corevideo_frame_buffer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080029
30// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070031static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
tkchin9dfa2492016-08-03 11:39:00 -070032static NSString *const kIPhone4SPreset = AVCaptureSessionPreset352x288;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080033static cricket::VideoFormat const kDefaultFormat =
34 cricket::VideoFormat(640,
35 480,
36 cricket::VideoFormat::FpsToInterval(30),
37 cricket::FOURCC_NV12);
tkchind7629102016-07-28 14:52:55 -070038// iPhone4S is too slow to handle 30fps.
39static cricket::VideoFormat const kIPhone4SFormat =
tkchin9dfa2492016-08-03 11:39:00 -070040 cricket::VideoFormat(352,
41 288,
tkchind7629102016-07-28 14:52:55 -070042 cricket::VideoFormat::FpsToInterval(15),
43 cricket::FOURCC_NV12);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080044
45// This class used to capture frames using AVFoundation APIs on iOS. It is meant
46// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
47// because other webrtc objects own cricket::VideoCapturer, which is not
48// ref counted. To prevent bad behavior we do not expose this class directly.
49@interface RTCAVFoundationVideoCapturerInternal : NSObject
50 <AVCaptureVideoDataOutputSampleBufferDelegate>
51
52@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070053@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070054@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080055@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070056@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
57@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080058
59// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
60// when we receive frames. This is safe because this object should be owned by
61// it.
62- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070063
64// Starts and stops the capture session asynchronously. We cannot do this
65// synchronously without blocking a WebRTC thread.
66- (void)start;
67- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080068
69@end
70
71@implementation RTCAVFoundationVideoCapturerInternal {
72 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070073 AVCaptureDeviceInput *_frontCameraInput;
74 AVCaptureDeviceInput *_backCameraInput;
75 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080076 // The cricket::VideoCapturer that owns this class. Should never be NULL.
77 webrtc::AVFoundationVideoCapturer *_capturer;
78 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -070079 BOOL _hasRetriedOnFatalError;
80 BOOL _isRunning;
81 BOOL _hasStarted;
82 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080083}
84
85@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070086@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -070087@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -070088@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080089
tkchin89717aa2016-03-31 17:14:04 -070090// This is called from the thread that creates the video source, which is likely
91// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080092- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070093 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080094 if (self = [super init]) {
95 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070096 // Create the capture session and all relevant inputs and outputs. We need
97 // to do this in init because the application may want the capture session
98 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
99 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800100 if (![self setupCaptureSession]) {
101 return nil;
102 }
103 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700104#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800105 [center addObserver:self
106 selector:@selector(deviceOrientationDidChange:)
107 name:UIDeviceOrientationDidChangeNotification
108 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700109 [center addObserver:self
110 selector:@selector(handleCaptureSessionInterruption:)
111 name:AVCaptureSessionWasInterruptedNotification
112 object:_captureSession];
113 [center addObserver:self
114 selector:@selector(handleCaptureSessionInterruptionEnded:)
115 name:AVCaptureSessionInterruptionEndedNotification
116 object:_captureSession];
adam.fedorfc22e032016-06-08 17:24:37 -0700117#endif
Zeke Chin52516802016-06-03 11:59:22 -0700118 [center addObserver:self
119 selector:@selector(handleCaptureSessionRuntimeError:)
120 name:AVCaptureSessionRuntimeErrorNotification
121 object:_captureSession];
122 [center addObserver:self
123 selector:@selector(handleCaptureSessionDidStartRunning:)
124 name:AVCaptureSessionDidStartRunningNotification
125 object:_captureSession];
126 [center addObserver:self
127 selector:@selector(handleCaptureSessionDidStopRunning:)
128 name:AVCaptureSessionDidStopRunningNotification
129 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800130 }
131 return self;
132}
133
134- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700135 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800136 [[NSNotificationCenter defaultCenter] removeObserver:self];
137 _capturer = nullptr;
138}
139
tkchin89717aa2016-03-31 17:14:04 -0700140- (AVCaptureSession *)captureSession {
141 return _captureSession;
142}
143
Zeke Chin52516802016-06-03 11:59:22 -0700144- (dispatch_queue_t)frameQueue {
145 if (!_frameQueue) {
146 _frameQueue =
147 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
148 DISPATCH_QUEUE_SERIAL);
149 dispatch_set_target_queue(
150 _frameQueue,
151 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
152 }
153 return _frameQueue;
154}
155
tkchin89717aa2016-03-31 17:14:04 -0700156// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700157- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700158 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700159}
160
tkchin89717aa2016-03-31 17:14:04 -0700161// Called from any thread (likely main thread).
162- (BOOL)useBackCamera {
163 @synchronized(self) {
164 return _useBackCamera;
165 }
166}
167
168// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800169- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700170 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700171 if (useBackCamera) {
172 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
173 "not switching.");
174 }
hjona1cf3662016-03-14 20:55:22 -0700175 return;
176 }
tkchin89717aa2016-03-31 17:14:04 -0700177 @synchronized(self) {
178 if (_useBackCamera == useBackCamera) {
179 return;
180 }
181 _useBackCamera = useBackCamera;
182 [self updateSessionInputForUseBackCamera:useBackCamera];
183 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800184}
185
Zeke Chin52516802016-06-03 11:59:22 -0700186- (BOOL)isRunning {
187 rtc::CritScope cs(&_crit);
188 return _isRunning;
189}
190
191- (void)setIsRunning:(BOOL)isRunning {
192 rtc::CritScope cs(&_crit);
193 _isRunning = isRunning;
194}
195
tkchin89717aa2016-03-31 17:14:04 -0700196// Called from WebRTC thread.
197- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700198 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800199 return;
200 }
Zeke Chin52516802016-06-03 11:59:22 -0700201 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800202 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
203 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700204 _orientationHasChanged = NO;
205 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700206#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700207 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700208#endif
tkchin89717aa2016-03-31 17:14:04 -0700209 AVCaptureSession *captureSession = self.captureSession;
210 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800211 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800212}
213
tkchin89717aa2016-03-31 17:14:04 -0700214// Called from same thread as start.
215- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700216 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800217 return;
218 }
Zeke Chin52516802016-06-03 11:59:22 -0700219 self.hasStarted = NO;
220 // Due to this async block, it's possible that the ObjC object outlives the
221 // C++ one. In order to not invoke functions on the C++ object, we set
222 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800223 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
224 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700225 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
226 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700227#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700228 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700229#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800230 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800231}
232
Zeke Chin52516802016-06-03 11:59:22 -0700233#pragma mark iOS notifications
234
adam.fedorfc22e032016-06-08 17:24:37 -0700235#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700236- (void)deviceOrientationDidChange:(NSNotification *)notification {
237 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
238 block:^{
239 _orientationHasChanged = YES;
240 [self updateOrientation];
241 }];
242}
adam.fedorfc22e032016-06-08 17:24:37 -0700243#endif
Zeke Chin52516802016-06-03 11:59:22 -0700244
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800245#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
246
247- (void)captureOutput:(AVCaptureOutput *)captureOutput
248 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
249 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700250 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700251 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800252 return;
253 }
254 _capturer->CaptureSampleBuffer(sampleBuffer);
255}
256
257- (void)captureOutput:(AVCaptureOutput *)captureOutput
258 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700259 fromConnection:(AVCaptureConnection *)connection {
260 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800261}
262
Zeke Chin52516802016-06-03 11:59:22 -0700263#pragma mark - AVCaptureSession notifications
264
265- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
266 NSString *reasonString = nil;
267#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
268 NSNumber *reason =
269 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
270 if (reason) {
271 switch (reason.intValue) {
272 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
273 reasonString = @"VideoDeviceNotAvailableInBackground";
274 break;
275 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
276 reasonString = @"AudioDeviceInUseByAnotherClient";
277 break;
278 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
279 reasonString = @"VideoDeviceInUseByAnotherClient";
280 break;
281 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
282 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
283 break;
284 }
285 }
286#endif
287 RTCLog(@"Capture session interrupted: %@", reasonString);
288 // TODO(tkchin): Handle this case.
289}
290
291- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
292 RTCLog(@"Capture session interruption ended.");
293 // TODO(tkchin): Handle this case.
294}
295
296- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700297 NSError *error =
298 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700299 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700300
301 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
302 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700303#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700304 if (error.code == AVErrorMediaServicesWereReset) {
305 [self handleNonFatalError];
306 } else {
307 [self handleFatalError];
308 }
adam.fedorfc22e032016-06-08 17:24:37 -0700309#else
310 [self handleFatalError];
311#endif
Zeke Chin52516802016-06-03 11:59:22 -0700312 }];
313}
314
315- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
316 RTCLog(@"Capture session started.");
317 self.isRunning = YES;
318 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
319 block:^{
320 // If we successfully restarted after an unknown error, allow future
321 // retries on fatal errors.
322 _hasRetriedOnFatalError = NO;
323 }];
324}
325
326- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
327 RTCLog(@"Capture session stopped.");
328 self.isRunning = NO;
329}
330
331- (void)handleFatalError {
332 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
333 block:^{
334 if (!_hasRetriedOnFatalError) {
335 RTCLogWarning(@"Attempting to recover from fatal capture error.");
336 [self handleNonFatalError];
337 _hasRetriedOnFatalError = YES;
338 } else {
339 RTCLogError(@"Previous fatal error recovery failed.");
340 }
341 }];
342}
343
344- (void)handleNonFatalError {
345 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
346 block:^{
347 if (self.hasStarted) {
348 RTCLog(@"Restarting capture session after error.");
349 [self.captureSession startRunning];
350 }
351 }];
352}
353
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800354#pragma mark - Private
355
356- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700357 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800358#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
359 NSString *version = [[UIDevice currentDevice] systemVersion];
360 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700361 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800362 }
363#endif
tkchin9dfa2492016-08-03 11:39:00 -0700364 NSString *preset = kDefaultPreset;
365#if TARGET_OS_IPHONE
366 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
367 preset = kIPhone4SPreset;
368 }
369#endif
370 if (![captureSession canSetSessionPreset:preset]) {
tkchin89717aa2016-03-31 17:14:04 -0700371 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800372 return NO;
373 }
tkchin9dfa2492016-08-03 11:39:00 -0700374 captureSession.sessionPreset = preset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800375
tkchin89717aa2016-03-31 17:14:04 -0700376 // Add the output.
377 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
378 if (![captureSession canAddOutput:videoDataOutput]) {
379 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800380 return NO;
381 }
tkchin89717aa2016-03-31 17:14:04 -0700382 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800383
tkchin89717aa2016-03-31 17:14:04 -0700384 // Get the front and back cameras. If there isn't a front camera
385 // give up.
386 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
387 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
388 if (!frontCameraInput) {
389 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800390 return NO;
391 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800392
393 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700394 if (![captureSession canAddInput:frontCameraInput] ||
395 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
396 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800397 return NO;
398 }
tkchin89717aa2016-03-31 17:14:04 -0700399 AVCaptureDeviceInput *input = self.useBackCamera ?
400 backCameraInput : frontCameraInput;
401 [captureSession addInput:input];
tkchind7629102016-07-28 14:52:55 -0700402#if TARGET_OS_IPHONE
403 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
404 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:input.device];
405 }
406#endif
tkchin89717aa2016-03-31 17:14:04 -0700407 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800408 return YES;
409}
410
tkchin89717aa2016-03-31 17:14:04 -0700411- (AVCaptureVideoDataOutput *)videoDataOutput {
412 if (!_videoDataOutput) {
413 // Make the capturer output NV12. Ideally we want I420 but that's not
414 // currently supported on iPhone / iPad.
415 AVCaptureVideoDataOutput *videoDataOutput =
416 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700417 videoDataOutput.videoSettings = @{
418 (NSString *)kCVPixelBufferPixelFormatTypeKey :
419 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
420 };
421 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700422 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700423 _videoDataOutput = videoDataOutput;
424 }
425 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800426}
427
tkchin89717aa2016-03-31 17:14:04 -0700428- (AVCaptureDevice *)videoCaptureDeviceForPosition:
429 (AVCaptureDevicePosition)position {
430 for (AVCaptureDevice *captureDevice in
431 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
432 if (captureDevice.position == position) {
433 return captureDevice;
434 }
435 }
436 return nil;
437}
438
439- (AVCaptureDeviceInput *)frontCameraInput {
440 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700441#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700442 AVCaptureDevice *frontCameraDevice =
443 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700444#else
445 AVCaptureDevice *frontCameraDevice =
446 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
447#endif
tkchin89717aa2016-03-31 17:14:04 -0700448 if (!frontCameraDevice) {
449 RTCLogWarning(@"Failed to find front capture device.");
450 return nil;
451 }
452 NSError *error = nil;
453 AVCaptureDeviceInput *frontCameraInput =
454 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
455 error:&error];
456 if (!frontCameraInput) {
457 RTCLogError(@"Failed to create front camera input: %@",
458 error.localizedDescription);
459 return nil;
460 }
461 _frontCameraInput = frontCameraInput;
462 }
463 return _frontCameraInput;
464}
465
466- (AVCaptureDeviceInput *)backCameraInput {
467 if (!_backCameraInput) {
468 AVCaptureDevice *backCameraDevice =
469 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
470 if (!backCameraDevice) {
471 RTCLogWarning(@"Failed to find front capture device.");
472 return nil;
473 }
474 NSError *error = nil;
475 AVCaptureDeviceInput *backCameraInput =
476 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
477 error:&error];
478 if (!backCameraInput) {
479 RTCLogError(@"Failed to create front camera input: %@",
480 error.localizedDescription);
481 return nil;
482 }
483 _backCameraInput = backCameraInput;
484 }
485 return _backCameraInput;
486}
487
tkchind7629102016-07-28 14:52:55 -0700488- (void)setMinFrameDuration:(CMTime)minFrameDuration
489 forDevice:(AVCaptureDevice *)device {
490 NSError *error = nil;
491 if (![device lockForConfiguration:&error]) {
492 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
493 return;
494 }
495 device.activeVideoMinFrameDuration = minFrameDuration;
496 [device unlockForConfiguration];
497}
498
tkchin89717aa2016-03-31 17:14:04 -0700499// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800500- (void)updateOrientation {
501 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700502 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800503 if (!connection.supportsVideoOrientation) {
504 // TODO(tkchin): set rotation bit on frames.
505 return;
506 }
adam.fedorfc22e032016-06-08 17:24:37 -0700507#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800508 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
509 switch ([UIDevice currentDevice].orientation) {
510 case UIDeviceOrientationPortrait:
511 orientation = AVCaptureVideoOrientationPortrait;
512 break;
513 case UIDeviceOrientationPortraitUpsideDown:
514 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
515 break;
516 case UIDeviceOrientationLandscapeLeft:
517 orientation = AVCaptureVideoOrientationLandscapeRight;
518 break;
519 case UIDeviceOrientationLandscapeRight:
520 orientation = AVCaptureVideoOrientationLandscapeLeft;
521 break;
522 case UIDeviceOrientationFaceUp:
523 case UIDeviceOrientationFaceDown:
524 case UIDeviceOrientationUnknown:
525 if (!_orientationHasChanged) {
526 connection.videoOrientation = orientation;
527 }
528 return;
529 }
530 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700531#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800532}
533
tkchin89717aa2016-03-31 17:14:04 -0700534// Update the current session input to match what's stored in _useBackCamera.
535- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
536 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
537 block:^{
538 [_captureSession beginConfiguration];
539 AVCaptureDeviceInput *oldInput = _backCameraInput;
540 AVCaptureDeviceInput *newInput = _frontCameraInput;
541 if (useBackCamera) {
542 oldInput = _frontCameraInput;
543 newInput = _backCameraInput;
544 }
545 if (oldInput) {
546 // Ok to remove this even if it's not attached. Will be no-op.
547 [_captureSession removeInput:oldInput];
548 }
549 if (newInput) {
550 [_captureSession addInput:newInput];
551 }
552 [self updateOrientation];
553 [_captureSession commitConfiguration];
tkchind7629102016-07-28 14:52:55 -0700554#if TARGET_OS_IPHONE
555 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
556 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:newInput.device];
557 }
558#endif
tkchin89717aa2016-03-31 17:14:04 -0700559 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800560}
561
562@end
563
564namespace webrtc {
565
tkchin89717aa2016-03-31 17:14:04 -0700566enum AVFoundationVideoCapturerMessageType : uint32_t {
567 kMessageTypeFrame,
568};
569
570struct AVFoundationFrame {
571 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
572 : image_buffer(buffer), capture_time(time) {}
573 CVImageBufferRef image_buffer;
574 int64_t capture_time;
575};
576
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800577AVFoundationVideoCapturer::AVFoundationVideoCapturer()
578 : _capturer(nil), _startThread(nullptr) {
tkchin9dfa2492016-08-03 11:39:00 -0700579 // Set our supported formats. This matches preset.
tkchind7629102016-07-28 14:52:55 -0700580 std::vector<cricket::VideoFormat> supported_formats;
581#if TARGET_OS_IPHONE
582 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
583 supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat));
tkchin6ce738d2016-08-03 12:57:09 -0700584 set_enable_video_adapter(false);
tkchind7629102016-07-28 14:52:55 -0700585 } else {
586 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
587 }
588#else
589 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
590#endif
591 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800592 _capturer =
593 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
594}
595
596AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
597 _capturer = nil;
598}
599
600cricket::CaptureState AVFoundationVideoCapturer::Start(
601 const cricket::VideoFormat& format) {
602 if (!_capturer) {
603 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
604 return cricket::CaptureState::CS_FAILED;
605 }
606 if (_capturer.isRunning) {
607 LOG(LS_ERROR) << "The capturer is already running.";
608 return cricket::CaptureState::CS_FAILED;
609 }
tkchind7629102016-07-28 14:52:55 -0700610 if (format != kDefaultFormat && format != kIPhone4SFormat) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800611 LOG(LS_ERROR) << "Unsupported format provided.";
612 return cricket::CaptureState::CS_FAILED;
613 }
614
615 // Keep track of which thread capture started on. This is the thread that
616 // frames need to be sent to.
617 RTC_DCHECK(!_startThread);
618 _startThread = rtc::Thread::Current();
619
620 SetCaptureFormat(&format);
621 // This isn't super accurate because it takes a while for the AVCaptureSession
622 // to spin up, and this call returns async.
623 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700624 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800625 SetCaptureState(cricket::CaptureState::CS_RUNNING);
626
627 return cricket::CaptureState::CS_STARTING;
628}
629
630void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700631 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800632 SetCaptureFormat(NULL);
633 _startThread = nullptr;
634}
635
636bool AVFoundationVideoCapturer::IsRunning() {
637 return _capturer.isRunning;
638}
639
640AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
641 return _capturer.captureSession;
642}
643
hjona1cf3662016-03-14 20:55:22 -0700644bool AVFoundationVideoCapturer::CanUseBackCamera() const {
645 return _capturer.canUseBackCamera;
646}
647
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800648void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
649 _capturer.useBackCamera = useBackCamera;
650}
651
652bool AVFoundationVideoCapturer::GetUseBackCamera() const {
653 return _capturer.useBackCamera;
654}
655
656void AVFoundationVideoCapturer::CaptureSampleBuffer(
657 CMSampleBufferRef sampleBuffer) {
658 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
659 !CMSampleBufferIsValid(sampleBuffer) ||
660 !CMSampleBufferDataIsReady(sampleBuffer)) {
661 return;
662 }
663
tkchin89717aa2016-03-31 17:14:04 -0700664 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
665 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800666 return;
667 }
668
tkchin89717aa2016-03-31 17:14:04 -0700669 // Retain the buffer and post it to the webrtc thread. It will be released
670 // after it has successfully been signaled.
671 CVBufferRetain(image_buffer);
672 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700673 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700674 new rtc::TypedMessageData<AVFoundationFrame>(frame));
675}
676
677void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
678 switch (msg->message_id) {
679 case kMessageTypeFrame: {
680 rtc::TypedMessageData<AVFoundationFrame>* data =
681 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
682 const AVFoundationFrame& frame = data->data();
683 OnFrameMessage(frame.image_buffer, frame.capture_time);
684 delete data;
685 break;
686 }
687 }
688}
689
690void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
magjed39607c92016-07-14 08:12:17 -0700691 int64_t capture_time_ns) {
tkchin89717aa2016-03-31 17:14:04 -0700692 RTC_DCHECK(_startThread->IsCurrent());
693
magjed39607c92016-07-14 08:12:17 -0700694 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
695 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
696
697 const int captured_width = buffer->width();
698 const int captured_height = buffer->height();
699
700 int adapted_width;
701 int adapted_height;
702 int crop_width;
703 int crop_height;
704 int crop_x;
705 int crop_y;
706 int64_t translated_camera_time_us;
707
708 if (!AdaptFrame(captured_width, captured_height,
709 capture_time_ns / rtc::kNumNanosecsPerMicrosec,
710 rtc::TimeMicros(), &adapted_width, &adapted_height,
711 &crop_width, &crop_height, &crop_x, &crop_y,
712 &translated_camera_time_us)) {
713 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800714 return;
715 }
716
magjed39607c92016-07-14 08:12:17 -0700717 if (adapted_width != captured_width || crop_width != captured_width ||
718 adapted_height != captured_height || crop_height != captured_height) {
719 // TODO(magjed): Avoid converting to I420.
720 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
721 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
722 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
723 crop_y, crop_width, crop_height);
724 buffer = scaled_buffer;
725 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800726
magjed39607c92016-07-14 08:12:17 -0700727 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700728 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700729 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800730
tkchin89717aa2016-03-31 17:14:04 -0700731 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800732}
733
734} // namespace webrtc