blob: a9cf89f8d42294053d174ba860ebd0e4766fb29a [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
25#include "webrtc/base/bind.h"
26#include "webrtc/base/checks.h"
27#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070028#include "webrtc/common_video/include/corevideo_frame_buffer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080029
30// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070031static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
tkchin9dfa2492016-08-03 11:39:00 -070032static NSString *const kIPhone4SPreset = AVCaptureSessionPreset352x288;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080033static cricket::VideoFormat const kDefaultFormat =
34 cricket::VideoFormat(640,
35 480,
36 cricket::VideoFormat::FpsToInterval(30),
37 cricket::FOURCC_NV12);
tkchind7629102016-07-28 14:52:55 -070038// iPhone4S is too slow to handle 30fps.
39static cricket::VideoFormat const kIPhone4SFormat =
tkchin9dfa2492016-08-03 11:39:00 -070040 cricket::VideoFormat(352,
41 288,
tkchind7629102016-07-28 14:52:55 -070042 cricket::VideoFormat::FpsToInterval(15),
43 cricket::FOURCC_NV12);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080044
45// This class used to capture frames using AVFoundation APIs on iOS. It is meant
46// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
47// because other webrtc objects own cricket::VideoCapturer, which is not
48// ref counted. To prevent bad behavior we do not expose this class directly.
49@interface RTCAVFoundationVideoCapturerInternal : NSObject
50 <AVCaptureVideoDataOutputSampleBufferDelegate>
51
52@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070053@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070054@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080055@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070056@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
57@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080058
59// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
60// when we receive frames. This is safe because this object should be owned by
61// it.
62- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070063
64// Starts and stops the capture session asynchronously. We cannot do this
65// synchronously without blocking a WebRTC thread.
66- (void)start;
67- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080068
69@end
70
71@implementation RTCAVFoundationVideoCapturerInternal {
72 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070073 AVCaptureDeviceInput *_frontCameraInput;
74 AVCaptureDeviceInput *_backCameraInput;
75 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080076 // The cricket::VideoCapturer that owns this class. Should never be NULL.
77 webrtc::AVFoundationVideoCapturer *_capturer;
78 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -070079 BOOL _hasRetriedOnFatalError;
80 BOOL _isRunning;
81 BOOL _hasStarted;
82 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080083}
84
85@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070086@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -070087@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -070088@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080089
tkchin89717aa2016-03-31 17:14:04 -070090// This is called from the thread that creates the video source, which is likely
91// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080092- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070093 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080094 if (self = [super init]) {
95 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070096 // Create the capture session and all relevant inputs and outputs. We need
97 // to do this in init because the application may want the capture session
98 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
99 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800100 if (![self setupCaptureSession]) {
101 return nil;
102 }
103 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700104#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800105 [center addObserver:self
106 selector:@selector(deviceOrientationDidChange:)
107 name:UIDeviceOrientationDidChangeNotification
108 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700109 [center addObserver:self
110 selector:@selector(handleCaptureSessionInterruption:)
111 name:AVCaptureSessionWasInterruptedNotification
112 object:_captureSession];
113 [center addObserver:self
114 selector:@selector(handleCaptureSessionInterruptionEnded:)
115 name:AVCaptureSessionInterruptionEndedNotification
116 object:_captureSession];
adam.fedorfc22e032016-06-08 17:24:37 -0700117#endif
Zeke Chin52516802016-06-03 11:59:22 -0700118 [center addObserver:self
119 selector:@selector(handleCaptureSessionRuntimeError:)
120 name:AVCaptureSessionRuntimeErrorNotification
121 object:_captureSession];
122 [center addObserver:self
123 selector:@selector(handleCaptureSessionDidStartRunning:)
124 name:AVCaptureSessionDidStartRunningNotification
125 object:_captureSession];
126 [center addObserver:self
127 selector:@selector(handleCaptureSessionDidStopRunning:)
128 name:AVCaptureSessionDidStopRunningNotification
129 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800130 }
131 return self;
132}
133
134- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700135 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800136 [[NSNotificationCenter defaultCenter] removeObserver:self];
137 _capturer = nullptr;
138}
139
tkchin89717aa2016-03-31 17:14:04 -0700140- (AVCaptureSession *)captureSession {
141 return _captureSession;
142}
143
Zeke Chin52516802016-06-03 11:59:22 -0700144- (dispatch_queue_t)frameQueue {
145 if (!_frameQueue) {
146 _frameQueue =
147 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
148 DISPATCH_QUEUE_SERIAL);
149 dispatch_set_target_queue(
150 _frameQueue,
151 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
152 }
153 return _frameQueue;
154}
155
tkchin89717aa2016-03-31 17:14:04 -0700156// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700157- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700158 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700159}
160
tkchin89717aa2016-03-31 17:14:04 -0700161// Called from any thread (likely main thread).
162- (BOOL)useBackCamera {
163 @synchronized(self) {
164 return _useBackCamera;
165 }
166}
167
168// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800169- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700170 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700171 if (useBackCamera) {
172 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
173 "not switching.");
174 }
hjona1cf3662016-03-14 20:55:22 -0700175 return;
176 }
tkchin89717aa2016-03-31 17:14:04 -0700177 @synchronized(self) {
178 if (_useBackCamera == useBackCamera) {
179 return;
180 }
181 _useBackCamera = useBackCamera;
182 [self updateSessionInputForUseBackCamera:useBackCamera];
183 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800184}
185
Zeke Chin52516802016-06-03 11:59:22 -0700186- (BOOL)isRunning {
187 rtc::CritScope cs(&_crit);
188 return _isRunning;
189}
190
191- (void)setIsRunning:(BOOL)isRunning {
192 rtc::CritScope cs(&_crit);
193 _isRunning = isRunning;
194}
195
tkchin89717aa2016-03-31 17:14:04 -0700196// Called from WebRTC thread.
197- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700198 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800199 return;
200 }
Zeke Chin52516802016-06-03 11:59:22 -0700201 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800202 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
203 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700204 _orientationHasChanged = NO;
205 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700206#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700207 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700208#endif
tkchin89717aa2016-03-31 17:14:04 -0700209 AVCaptureSession *captureSession = self.captureSession;
210 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800211 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800212}
213
tkchin89717aa2016-03-31 17:14:04 -0700214// Called from same thread as start.
215- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700216 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800217 return;
218 }
Zeke Chin52516802016-06-03 11:59:22 -0700219 self.hasStarted = NO;
220 // Due to this async block, it's possible that the ObjC object outlives the
221 // C++ one. In order to not invoke functions on the C++ object, we set
222 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800223 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
224 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700225 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
226 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700227#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700228 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700229#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800230 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800231}
232
Zeke Chin52516802016-06-03 11:59:22 -0700233#pragma mark iOS notifications
234
adam.fedorfc22e032016-06-08 17:24:37 -0700235#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700236- (void)deviceOrientationDidChange:(NSNotification *)notification {
237 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
238 block:^{
239 _orientationHasChanged = YES;
240 [self updateOrientation];
241 }];
242}
adam.fedorfc22e032016-06-08 17:24:37 -0700243#endif
Zeke Chin52516802016-06-03 11:59:22 -0700244
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800245#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
246
247- (void)captureOutput:(AVCaptureOutput *)captureOutput
248 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
249 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700250 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700251 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800252 return;
253 }
254 _capturer->CaptureSampleBuffer(sampleBuffer);
255}
256
257- (void)captureOutput:(AVCaptureOutput *)captureOutput
258 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700259 fromConnection:(AVCaptureConnection *)connection {
260 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800261}
262
Zeke Chin52516802016-06-03 11:59:22 -0700263#pragma mark - AVCaptureSession notifications
264
265- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
266 NSString *reasonString = nil;
267#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
268 NSNumber *reason =
269 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
270 if (reason) {
271 switch (reason.intValue) {
272 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
273 reasonString = @"VideoDeviceNotAvailableInBackground";
274 break;
275 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
276 reasonString = @"AudioDeviceInUseByAnotherClient";
277 break;
278 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
279 reasonString = @"VideoDeviceInUseByAnotherClient";
280 break;
281 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
282 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
283 break;
284 }
285 }
286#endif
287 RTCLog(@"Capture session interrupted: %@", reasonString);
288 // TODO(tkchin): Handle this case.
289}
290
291- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
292 RTCLog(@"Capture session interruption ended.");
293 // TODO(tkchin): Handle this case.
294}
295
296- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700297 NSError *error =
298 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700299 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700300
301 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
302 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700303#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700304 if (error.code == AVErrorMediaServicesWereReset) {
305 [self handleNonFatalError];
306 } else {
307 [self handleFatalError];
308 }
adam.fedorfc22e032016-06-08 17:24:37 -0700309#else
310 [self handleFatalError];
311#endif
Zeke Chin52516802016-06-03 11:59:22 -0700312 }];
313}
314
315- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
316 RTCLog(@"Capture session started.");
317 self.isRunning = YES;
318 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
319 block:^{
320 // If we successfully restarted after an unknown error, allow future
321 // retries on fatal errors.
322 _hasRetriedOnFatalError = NO;
323 }];
324}
325
326- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
327 RTCLog(@"Capture session stopped.");
328 self.isRunning = NO;
329}
330
331- (void)handleFatalError {
332 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
333 block:^{
334 if (!_hasRetriedOnFatalError) {
335 RTCLogWarning(@"Attempting to recover from fatal capture error.");
336 [self handleNonFatalError];
337 _hasRetriedOnFatalError = YES;
338 } else {
339 RTCLogError(@"Previous fatal error recovery failed.");
340 }
341 }];
342}
343
344- (void)handleNonFatalError {
345 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
346 block:^{
347 if (self.hasStarted) {
348 RTCLog(@"Restarting capture session after error.");
349 [self.captureSession startRunning];
350 }
351 }];
352}
353
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800354#pragma mark - Private
355
356- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700357 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800358#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
359 NSString *version = [[UIDevice currentDevice] systemVersion];
360 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700361 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800362 }
363#endif
tkchin9dfa2492016-08-03 11:39:00 -0700364 NSString *preset = kDefaultPreset;
365#if TARGET_OS_IPHONE
366 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
367 preset = kIPhone4SPreset;
368 }
369#endif
370 if (![captureSession canSetSessionPreset:preset]) {
tkchin89717aa2016-03-31 17:14:04 -0700371 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800372 return NO;
373 }
tkchin9dfa2492016-08-03 11:39:00 -0700374 captureSession.sessionPreset = preset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800375
tkchin89717aa2016-03-31 17:14:04 -0700376 // Add the output.
377 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
378 if (![captureSession canAddOutput:videoDataOutput]) {
379 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800380 return NO;
381 }
tkchin89717aa2016-03-31 17:14:04 -0700382 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800383
tkchin89717aa2016-03-31 17:14:04 -0700384 // Get the front and back cameras. If there isn't a front camera
385 // give up.
386 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
387 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
388 if (!frontCameraInput) {
389 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800390 return NO;
391 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800392
393 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700394 if (![captureSession canAddInput:frontCameraInput] ||
395 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
396 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800397 return NO;
398 }
tkchin89717aa2016-03-31 17:14:04 -0700399 AVCaptureDeviceInput *input = self.useBackCamera ?
400 backCameraInput : frontCameraInput;
401 [captureSession addInput:input];
tkchind7629102016-07-28 14:52:55 -0700402#if TARGET_OS_IPHONE
403 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
404 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:input.device];
405 }
406#endif
tkchin89717aa2016-03-31 17:14:04 -0700407 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800408 return YES;
409}
410
tkchin89717aa2016-03-31 17:14:04 -0700411- (AVCaptureVideoDataOutput *)videoDataOutput {
412 if (!_videoDataOutput) {
413 // Make the capturer output NV12. Ideally we want I420 but that's not
414 // currently supported on iPhone / iPad.
415 AVCaptureVideoDataOutput *videoDataOutput =
416 [[AVCaptureVideoDataOutput alloc] init];
417 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
418 videoDataOutput.videoSettings = @{
419 (NSString *)kCVPixelBufferPixelFormatTypeKey :
420 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
421 };
422 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700423 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700424 _videoDataOutput = videoDataOutput;
425 }
426 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800427}
428
tkchin89717aa2016-03-31 17:14:04 -0700429- (AVCaptureDevice *)videoCaptureDeviceForPosition:
430 (AVCaptureDevicePosition)position {
431 for (AVCaptureDevice *captureDevice in
432 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
433 if (captureDevice.position == position) {
434 return captureDevice;
435 }
436 }
437 return nil;
438}
439
440- (AVCaptureDeviceInput *)frontCameraInput {
441 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700442#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700443 AVCaptureDevice *frontCameraDevice =
444 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700445#else
446 AVCaptureDevice *frontCameraDevice =
447 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
448#endif
tkchin89717aa2016-03-31 17:14:04 -0700449 if (!frontCameraDevice) {
450 RTCLogWarning(@"Failed to find front capture device.");
451 return nil;
452 }
453 NSError *error = nil;
454 AVCaptureDeviceInput *frontCameraInput =
455 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
456 error:&error];
457 if (!frontCameraInput) {
458 RTCLogError(@"Failed to create front camera input: %@",
459 error.localizedDescription);
460 return nil;
461 }
462 _frontCameraInput = frontCameraInput;
463 }
464 return _frontCameraInput;
465}
466
467- (AVCaptureDeviceInput *)backCameraInput {
468 if (!_backCameraInput) {
469 AVCaptureDevice *backCameraDevice =
470 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
471 if (!backCameraDevice) {
472 RTCLogWarning(@"Failed to find front capture device.");
473 return nil;
474 }
475 NSError *error = nil;
476 AVCaptureDeviceInput *backCameraInput =
477 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
478 error:&error];
479 if (!backCameraInput) {
480 RTCLogError(@"Failed to create front camera input: %@",
481 error.localizedDescription);
482 return nil;
483 }
484 _backCameraInput = backCameraInput;
485 }
486 return _backCameraInput;
487}
488
tkchind7629102016-07-28 14:52:55 -0700489- (void)setMinFrameDuration:(CMTime)minFrameDuration
490 forDevice:(AVCaptureDevice *)device {
491 NSError *error = nil;
492 if (![device lockForConfiguration:&error]) {
493 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
494 return;
495 }
496 device.activeVideoMinFrameDuration = minFrameDuration;
497 [device unlockForConfiguration];
498}
499
tkchin89717aa2016-03-31 17:14:04 -0700500// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800501- (void)updateOrientation {
502 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700503 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800504 if (!connection.supportsVideoOrientation) {
505 // TODO(tkchin): set rotation bit on frames.
506 return;
507 }
adam.fedorfc22e032016-06-08 17:24:37 -0700508#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800509 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
510 switch ([UIDevice currentDevice].orientation) {
511 case UIDeviceOrientationPortrait:
512 orientation = AVCaptureVideoOrientationPortrait;
513 break;
514 case UIDeviceOrientationPortraitUpsideDown:
515 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
516 break;
517 case UIDeviceOrientationLandscapeLeft:
518 orientation = AVCaptureVideoOrientationLandscapeRight;
519 break;
520 case UIDeviceOrientationLandscapeRight:
521 orientation = AVCaptureVideoOrientationLandscapeLeft;
522 break;
523 case UIDeviceOrientationFaceUp:
524 case UIDeviceOrientationFaceDown:
525 case UIDeviceOrientationUnknown:
526 if (!_orientationHasChanged) {
527 connection.videoOrientation = orientation;
528 }
529 return;
530 }
531 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700532#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800533}
534
tkchin89717aa2016-03-31 17:14:04 -0700535// Update the current session input to match what's stored in _useBackCamera.
536- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
537 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
538 block:^{
539 [_captureSession beginConfiguration];
540 AVCaptureDeviceInput *oldInput = _backCameraInput;
541 AVCaptureDeviceInput *newInput = _frontCameraInput;
542 if (useBackCamera) {
543 oldInput = _frontCameraInput;
544 newInput = _backCameraInput;
545 }
546 if (oldInput) {
547 // Ok to remove this even if it's not attached. Will be no-op.
548 [_captureSession removeInput:oldInput];
549 }
550 if (newInput) {
551 [_captureSession addInput:newInput];
552 }
553 [self updateOrientation];
554 [_captureSession commitConfiguration];
tkchind7629102016-07-28 14:52:55 -0700555#if TARGET_OS_IPHONE
556 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
557 [self setMinFrameDuration:CMTimeMake(1, 15) forDevice:newInput.device];
558 }
559#endif
tkchin89717aa2016-03-31 17:14:04 -0700560 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800561}
562
563@end
564
565namespace webrtc {
566
tkchin89717aa2016-03-31 17:14:04 -0700567enum AVFoundationVideoCapturerMessageType : uint32_t {
568 kMessageTypeFrame,
569};
570
571struct AVFoundationFrame {
572 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
573 : image_buffer(buffer), capture_time(time) {}
574 CVImageBufferRef image_buffer;
575 int64_t capture_time;
576};
577
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800578AVFoundationVideoCapturer::AVFoundationVideoCapturer()
579 : _capturer(nil), _startThread(nullptr) {
tkchin9dfa2492016-08-03 11:39:00 -0700580 // Set our supported formats. This matches preset.
tkchind7629102016-07-28 14:52:55 -0700581 std::vector<cricket::VideoFormat> supported_formats;
582#if TARGET_OS_IPHONE
583 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
584 supported_formats.push_back(cricket::VideoFormat(kIPhone4SFormat));
585 } else {
586 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
587 }
588#else
589 supported_formats.push_back(cricket::VideoFormat(kDefaultFormat));
590#endif
591 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800592 _capturer =
593 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
594}
595
596AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
597 _capturer = nil;
598}
599
600cricket::CaptureState AVFoundationVideoCapturer::Start(
601 const cricket::VideoFormat& format) {
602 if (!_capturer) {
603 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
604 return cricket::CaptureState::CS_FAILED;
605 }
606 if (_capturer.isRunning) {
607 LOG(LS_ERROR) << "The capturer is already running.";
608 return cricket::CaptureState::CS_FAILED;
609 }
tkchind7629102016-07-28 14:52:55 -0700610 if (format != kDefaultFormat && format != kIPhone4SFormat) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800611 LOG(LS_ERROR) << "Unsupported format provided.";
612 return cricket::CaptureState::CS_FAILED;
613 }
614
615 // Keep track of which thread capture started on. This is the thread that
616 // frames need to be sent to.
617 RTC_DCHECK(!_startThread);
618 _startThread = rtc::Thread::Current();
619
620 SetCaptureFormat(&format);
621 // This isn't super accurate because it takes a while for the AVCaptureSession
622 // to spin up, and this call returns async.
623 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700624 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800625 SetCaptureState(cricket::CaptureState::CS_RUNNING);
626
627 return cricket::CaptureState::CS_STARTING;
628}
629
630void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700631 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800632 SetCaptureFormat(NULL);
633 _startThread = nullptr;
634}
635
636bool AVFoundationVideoCapturer::IsRunning() {
637 return _capturer.isRunning;
638}
639
640AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
641 return _capturer.captureSession;
642}
643
hjona1cf3662016-03-14 20:55:22 -0700644bool AVFoundationVideoCapturer::CanUseBackCamera() const {
645 return _capturer.canUseBackCamera;
646}
647
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800648void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
649 _capturer.useBackCamera = useBackCamera;
650}
651
652bool AVFoundationVideoCapturer::GetUseBackCamera() const {
653 return _capturer.useBackCamera;
654}
655
656void AVFoundationVideoCapturer::CaptureSampleBuffer(
657 CMSampleBufferRef sampleBuffer) {
658 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
659 !CMSampleBufferIsValid(sampleBuffer) ||
660 !CMSampleBufferDataIsReady(sampleBuffer)) {
661 return;
662 }
663
tkchin89717aa2016-03-31 17:14:04 -0700664 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
665 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800666 return;
667 }
668
tkchin89717aa2016-03-31 17:14:04 -0700669 // Retain the buffer and post it to the webrtc thread. It will be released
670 // after it has successfully been signaled.
671 CVBufferRetain(image_buffer);
672 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700673 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700674 new rtc::TypedMessageData<AVFoundationFrame>(frame));
675}
676
677void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
678 switch (msg->message_id) {
679 case kMessageTypeFrame: {
680 rtc::TypedMessageData<AVFoundationFrame>* data =
681 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
682 const AVFoundationFrame& frame = data->data();
683 OnFrameMessage(frame.image_buffer, frame.capture_time);
684 delete data;
685 break;
686 }
687 }
688}
689
690void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
magjed39607c92016-07-14 08:12:17 -0700691 int64_t capture_time_ns) {
tkchin89717aa2016-03-31 17:14:04 -0700692 RTC_DCHECK(_startThread->IsCurrent());
693
magjed39607c92016-07-14 08:12:17 -0700694 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
695 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
696
697 const int captured_width = buffer->width();
698 const int captured_height = buffer->height();
699
700 int adapted_width;
701 int adapted_height;
702 int crop_width;
703 int crop_height;
704 int crop_x;
705 int crop_y;
706 int64_t translated_camera_time_us;
707
708 if (!AdaptFrame(captured_width, captured_height,
709 capture_time_ns / rtc::kNumNanosecsPerMicrosec,
710 rtc::TimeMicros(), &adapted_width, &adapted_height,
711 &crop_width, &crop_height, &crop_x, &crop_y,
712 &translated_camera_time_us)) {
713 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800714 return;
715 }
716
magjed39607c92016-07-14 08:12:17 -0700717 if (adapted_width != captured_width || crop_width != captured_width ||
718 adapted_height != captured_height || crop_height != captured_height) {
719 // TODO(magjed): Avoid converting to I420.
720 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
721 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
722 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
723 crop_y, crop_width, crop_height);
724 buffer = scaled_buffer;
725 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800726
magjed39607c92016-07-14 08:12:17 -0700727 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700728 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700729 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800730
tkchin89717aa2016-03-31 17:14:04 -0700731 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800732}
733
734} // namespace webrtc