blob: 9b1a7832d248bdfa85dc6a5c01f2a222f8dcc92c [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
21
22#include "webrtc/base/bind.h"
23#include "webrtc/base/checks.h"
24#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070025#include "webrtc/common_video/include/corevideo_frame_buffer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080026
27// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070028static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080029static cricket::VideoFormat const kDefaultFormat =
30 cricket::VideoFormat(640,
31 480,
32 cricket::VideoFormat::FpsToInterval(30),
33 cricket::FOURCC_NV12);
34
35// This class used to capture frames using AVFoundation APIs on iOS. It is meant
36// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
37// because other webrtc objects own cricket::VideoCapturer, which is not
38// ref counted. To prevent bad behavior we do not expose this class directly.
39@interface RTCAVFoundationVideoCapturerInternal : NSObject
40 <AVCaptureVideoDataOutputSampleBufferDelegate>
41
42@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070043@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070044@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080045@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070046@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
47@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080048
49// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
50// when we receive frames. This is safe because this object should be owned by
51// it.
52- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070053
54// Starts and stops the capture session asynchronously. We cannot do this
55// synchronously without blocking a WebRTC thread.
56- (void)start;
57- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080058
59@end
60
61@implementation RTCAVFoundationVideoCapturerInternal {
62 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070063 AVCaptureDeviceInput *_frontCameraInput;
64 AVCaptureDeviceInput *_backCameraInput;
65 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080066 // The cricket::VideoCapturer that owns this class. Should never be NULL.
67 webrtc::AVFoundationVideoCapturer *_capturer;
68 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -070069 BOOL _hasRetriedOnFatalError;
70 BOOL _isRunning;
71 BOOL _hasStarted;
72 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080073}
74
75@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070076@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -070077@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -070078@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080079
tkchin89717aa2016-03-31 17:14:04 -070080// This is called from the thread that creates the video source, which is likely
81// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080082- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070083 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080084 if (self = [super init]) {
85 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070086 // Create the capture session and all relevant inputs and outputs. We need
87 // to do this in init because the application may want the capture session
88 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
89 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080090 if (![self setupCaptureSession]) {
91 return nil;
92 }
93 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -070094#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080095 [center addObserver:self
96 selector:@selector(deviceOrientationDidChange:)
97 name:UIDeviceOrientationDidChangeNotification
98 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -070099 [center addObserver:self
100 selector:@selector(handleCaptureSessionInterruption:)
101 name:AVCaptureSessionWasInterruptedNotification
102 object:_captureSession];
103 [center addObserver:self
104 selector:@selector(handleCaptureSessionInterruptionEnded:)
105 name:AVCaptureSessionInterruptionEndedNotification
106 object:_captureSession];
adam.fedorfc22e032016-06-08 17:24:37 -0700107#endif
Zeke Chin52516802016-06-03 11:59:22 -0700108 [center addObserver:self
109 selector:@selector(handleCaptureSessionRuntimeError:)
110 name:AVCaptureSessionRuntimeErrorNotification
111 object:_captureSession];
112 [center addObserver:self
113 selector:@selector(handleCaptureSessionDidStartRunning:)
114 name:AVCaptureSessionDidStartRunningNotification
115 object:_captureSession];
116 [center addObserver:self
117 selector:@selector(handleCaptureSessionDidStopRunning:)
118 name:AVCaptureSessionDidStopRunningNotification
119 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800120 }
121 return self;
122}
123
124- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700125 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800126 [[NSNotificationCenter defaultCenter] removeObserver:self];
127 _capturer = nullptr;
128}
129
tkchin89717aa2016-03-31 17:14:04 -0700130- (AVCaptureSession *)captureSession {
131 return _captureSession;
132}
133
Zeke Chin52516802016-06-03 11:59:22 -0700134- (dispatch_queue_t)frameQueue {
135 if (!_frameQueue) {
136 _frameQueue =
137 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
138 DISPATCH_QUEUE_SERIAL);
139 dispatch_set_target_queue(
140 _frameQueue,
141 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
142 }
143 return _frameQueue;
144}
145
tkchin89717aa2016-03-31 17:14:04 -0700146// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700147- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700148 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700149}
150
tkchin89717aa2016-03-31 17:14:04 -0700151// Called from any thread (likely main thread).
152- (BOOL)useBackCamera {
153 @synchronized(self) {
154 return _useBackCamera;
155 }
156}
157
158// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800159- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700160 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700161 if (useBackCamera) {
162 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
163 "not switching.");
164 }
hjona1cf3662016-03-14 20:55:22 -0700165 return;
166 }
tkchin89717aa2016-03-31 17:14:04 -0700167 @synchronized(self) {
168 if (_useBackCamera == useBackCamera) {
169 return;
170 }
171 _useBackCamera = useBackCamera;
172 [self updateSessionInputForUseBackCamera:useBackCamera];
173 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800174}
175
Zeke Chin52516802016-06-03 11:59:22 -0700176- (BOOL)isRunning {
177 rtc::CritScope cs(&_crit);
178 return _isRunning;
179}
180
181- (void)setIsRunning:(BOOL)isRunning {
182 rtc::CritScope cs(&_crit);
183 _isRunning = isRunning;
184}
185
tkchin89717aa2016-03-31 17:14:04 -0700186// Called from WebRTC thread.
187- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700188 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800189 return;
190 }
Zeke Chin52516802016-06-03 11:59:22 -0700191 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800192 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
193 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700194 _orientationHasChanged = NO;
195 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700196#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700197 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700198#endif
tkchin89717aa2016-03-31 17:14:04 -0700199 AVCaptureSession *captureSession = self.captureSession;
200 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800201 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800202}
203
tkchin89717aa2016-03-31 17:14:04 -0700204// Called from same thread as start.
205- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700206 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800207 return;
208 }
Zeke Chin52516802016-06-03 11:59:22 -0700209 self.hasStarted = NO;
210 // Due to this async block, it's possible that the ObjC object outlives the
211 // C++ one. In order to not invoke functions on the C++ object, we set
212 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800213 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
214 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700215 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
216 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700217#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700218 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700219#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800220 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800221}
222
Zeke Chin52516802016-06-03 11:59:22 -0700223#pragma mark iOS notifications
224
adam.fedorfc22e032016-06-08 17:24:37 -0700225#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700226- (void)deviceOrientationDidChange:(NSNotification *)notification {
227 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
228 block:^{
229 _orientationHasChanged = YES;
230 [self updateOrientation];
231 }];
232}
adam.fedorfc22e032016-06-08 17:24:37 -0700233#endif
Zeke Chin52516802016-06-03 11:59:22 -0700234
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800235#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
236
237- (void)captureOutput:(AVCaptureOutput *)captureOutput
238 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
239 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700240 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700241 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800242 return;
243 }
244 _capturer->CaptureSampleBuffer(sampleBuffer);
245}
246
247- (void)captureOutput:(AVCaptureOutput *)captureOutput
248 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700249 fromConnection:(AVCaptureConnection *)connection {
250 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800251}
252
Zeke Chin52516802016-06-03 11:59:22 -0700253#pragma mark - AVCaptureSession notifications
254
255- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
256 NSString *reasonString = nil;
257#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
258 NSNumber *reason =
259 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
260 if (reason) {
261 switch (reason.intValue) {
262 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
263 reasonString = @"VideoDeviceNotAvailableInBackground";
264 break;
265 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
266 reasonString = @"AudioDeviceInUseByAnotherClient";
267 break;
268 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
269 reasonString = @"VideoDeviceInUseByAnotherClient";
270 break;
271 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
272 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
273 break;
274 }
275 }
276#endif
277 RTCLog(@"Capture session interrupted: %@", reasonString);
278 // TODO(tkchin): Handle this case.
279}
280
281- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
282 RTCLog(@"Capture session interruption ended.");
283 // TODO(tkchin): Handle this case.
284}
285
286- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700287 NSError *error =
288 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
Zeke Chin52516802016-06-03 11:59:22 -0700289 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
290
291 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
292 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700293#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700294 if (error.code == AVErrorMediaServicesWereReset) {
295 [self handleNonFatalError];
296 } else {
297 [self handleFatalError];
298 }
adam.fedorfc22e032016-06-08 17:24:37 -0700299#else
300 [self handleFatalError];
301#endif
Zeke Chin52516802016-06-03 11:59:22 -0700302 }];
303}
304
305- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
306 RTCLog(@"Capture session started.");
307 self.isRunning = YES;
308 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
309 block:^{
310 // If we successfully restarted after an unknown error, allow future
311 // retries on fatal errors.
312 _hasRetriedOnFatalError = NO;
313 }];
314}
315
316- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
317 RTCLog(@"Capture session stopped.");
318 self.isRunning = NO;
319}
320
321- (void)handleFatalError {
322 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
323 block:^{
324 if (!_hasRetriedOnFatalError) {
325 RTCLogWarning(@"Attempting to recover from fatal capture error.");
326 [self handleNonFatalError];
327 _hasRetriedOnFatalError = YES;
328 } else {
329 RTCLogError(@"Previous fatal error recovery failed.");
330 }
331 }];
332}
333
334- (void)handleNonFatalError {
335 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
336 block:^{
337 if (self.hasStarted) {
338 RTCLog(@"Restarting capture session after error.");
339 [self.captureSession startRunning];
340 }
341 }];
342}
343
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800344#pragma mark - Private
345
346- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700347 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800348#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
349 NSString *version = [[UIDevice currentDevice] systemVersion];
350 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700351 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800352 }
353#endif
tkchin89717aa2016-03-31 17:14:04 -0700354 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
355 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800356 return NO;
357 }
tkchin89717aa2016-03-31 17:14:04 -0700358 captureSession.sessionPreset = kDefaultPreset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800359
tkchin89717aa2016-03-31 17:14:04 -0700360 // Add the output.
361 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
362 if (![captureSession canAddOutput:videoDataOutput]) {
363 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800364 return NO;
365 }
tkchin89717aa2016-03-31 17:14:04 -0700366 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800367
tkchin89717aa2016-03-31 17:14:04 -0700368 // Get the front and back cameras. If there isn't a front camera
369 // give up.
370 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
371 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
372 if (!frontCameraInput) {
373 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800374 return NO;
375 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800376
377 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700378 if (![captureSession canAddInput:frontCameraInput] ||
379 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
380 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800381 return NO;
382 }
tkchin89717aa2016-03-31 17:14:04 -0700383 AVCaptureDeviceInput *input = self.useBackCamera ?
384 backCameraInput : frontCameraInput;
385 [captureSession addInput:input];
386 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800387 return YES;
388}
389
tkchin89717aa2016-03-31 17:14:04 -0700390- (AVCaptureVideoDataOutput *)videoDataOutput {
391 if (!_videoDataOutput) {
392 // Make the capturer output NV12. Ideally we want I420 but that's not
393 // currently supported on iPhone / iPad.
394 AVCaptureVideoDataOutput *videoDataOutput =
395 [[AVCaptureVideoDataOutput alloc] init];
396 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
397 videoDataOutput.videoSettings = @{
398 (NSString *)kCVPixelBufferPixelFormatTypeKey :
399 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
400 };
401 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700402 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700403 _videoDataOutput = videoDataOutput;
404 }
405 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800406}
407
tkchin89717aa2016-03-31 17:14:04 -0700408- (AVCaptureDevice *)videoCaptureDeviceForPosition:
409 (AVCaptureDevicePosition)position {
410 for (AVCaptureDevice *captureDevice in
411 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
412 if (captureDevice.position == position) {
413 return captureDevice;
414 }
415 }
416 return nil;
417}
418
419- (AVCaptureDeviceInput *)frontCameraInput {
420 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700421#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700422 AVCaptureDevice *frontCameraDevice =
423 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700424#else
425 AVCaptureDevice *frontCameraDevice =
426 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
427#endif
tkchin89717aa2016-03-31 17:14:04 -0700428 if (!frontCameraDevice) {
429 RTCLogWarning(@"Failed to find front capture device.");
430 return nil;
431 }
432 NSError *error = nil;
433 AVCaptureDeviceInput *frontCameraInput =
434 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
435 error:&error];
436 if (!frontCameraInput) {
437 RTCLogError(@"Failed to create front camera input: %@",
438 error.localizedDescription);
439 return nil;
440 }
441 _frontCameraInput = frontCameraInput;
442 }
443 return _frontCameraInput;
444}
445
446- (AVCaptureDeviceInput *)backCameraInput {
447 if (!_backCameraInput) {
448 AVCaptureDevice *backCameraDevice =
449 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
450 if (!backCameraDevice) {
451 RTCLogWarning(@"Failed to find front capture device.");
452 return nil;
453 }
454 NSError *error = nil;
455 AVCaptureDeviceInput *backCameraInput =
456 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
457 error:&error];
458 if (!backCameraInput) {
459 RTCLogError(@"Failed to create front camera input: %@",
460 error.localizedDescription);
461 return nil;
462 }
463 _backCameraInput = backCameraInput;
464 }
465 return _backCameraInput;
466}
467
tkchin89717aa2016-03-31 17:14:04 -0700468// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800469- (void)updateOrientation {
470 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700471 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800472 if (!connection.supportsVideoOrientation) {
473 // TODO(tkchin): set rotation bit on frames.
474 return;
475 }
adam.fedorfc22e032016-06-08 17:24:37 -0700476#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800477 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
478 switch ([UIDevice currentDevice].orientation) {
479 case UIDeviceOrientationPortrait:
480 orientation = AVCaptureVideoOrientationPortrait;
481 break;
482 case UIDeviceOrientationPortraitUpsideDown:
483 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
484 break;
485 case UIDeviceOrientationLandscapeLeft:
486 orientation = AVCaptureVideoOrientationLandscapeRight;
487 break;
488 case UIDeviceOrientationLandscapeRight:
489 orientation = AVCaptureVideoOrientationLandscapeLeft;
490 break;
491 case UIDeviceOrientationFaceUp:
492 case UIDeviceOrientationFaceDown:
493 case UIDeviceOrientationUnknown:
494 if (!_orientationHasChanged) {
495 connection.videoOrientation = orientation;
496 }
497 return;
498 }
499 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700500#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800501}
502
tkchin89717aa2016-03-31 17:14:04 -0700503// Update the current session input to match what's stored in _useBackCamera.
504- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
505 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
506 block:^{
507 [_captureSession beginConfiguration];
508 AVCaptureDeviceInput *oldInput = _backCameraInput;
509 AVCaptureDeviceInput *newInput = _frontCameraInput;
510 if (useBackCamera) {
511 oldInput = _frontCameraInput;
512 newInput = _backCameraInput;
513 }
514 if (oldInput) {
515 // Ok to remove this even if it's not attached. Will be no-op.
516 [_captureSession removeInput:oldInput];
517 }
518 if (newInput) {
519 [_captureSession addInput:newInput];
520 }
521 [self updateOrientation];
522 [_captureSession commitConfiguration];
523 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800524}
525
526@end
527
528namespace webrtc {
529
tkchin89717aa2016-03-31 17:14:04 -0700530enum AVFoundationVideoCapturerMessageType : uint32_t {
531 kMessageTypeFrame,
532};
533
534struct AVFoundationFrame {
535 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
536 : image_buffer(buffer), capture_time(time) {}
537 CVImageBufferRef image_buffer;
538 int64_t capture_time;
539};
540
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800541AVFoundationVideoCapturer::AVFoundationVideoCapturer()
542 : _capturer(nil), _startThread(nullptr) {
543 // Set our supported formats. This matches kDefaultPreset.
544 std::vector<cricket::VideoFormat> supportedFormats;
545 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
546 SetSupportedFormats(supportedFormats);
547 _capturer =
548 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
549}
550
551AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
552 _capturer = nil;
553}
554
555cricket::CaptureState AVFoundationVideoCapturer::Start(
556 const cricket::VideoFormat& format) {
557 if (!_capturer) {
558 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
559 return cricket::CaptureState::CS_FAILED;
560 }
561 if (_capturer.isRunning) {
562 LOG(LS_ERROR) << "The capturer is already running.";
563 return cricket::CaptureState::CS_FAILED;
564 }
565 if (format != kDefaultFormat) {
566 LOG(LS_ERROR) << "Unsupported format provided.";
567 return cricket::CaptureState::CS_FAILED;
568 }
569
570 // Keep track of which thread capture started on. This is the thread that
571 // frames need to be sent to.
572 RTC_DCHECK(!_startThread);
573 _startThread = rtc::Thread::Current();
574
575 SetCaptureFormat(&format);
576 // This isn't super accurate because it takes a while for the AVCaptureSession
577 // to spin up, and this call returns async.
578 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700579 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800580 SetCaptureState(cricket::CaptureState::CS_RUNNING);
581
582 return cricket::CaptureState::CS_STARTING;
583}
584
585void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700586 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800587 SetCaptureFormat(NULL);
588 _startThread = nullptr;
589}
590
591bool AVFoundationVideoCapturer::IsRunning() {
592 return _capturer.isRunning;
593}
594
595AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
596 return _capturer.captureSession;
597}
598
hjona1cf3662016-03-14 20:55:22 -0700599bool AVFoundationVideoCapturer::CanUseBackCamera() const {
600 return _capturer.canUseBackCamera;
601}
602
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800603void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
604 _capturer.useBackCamera = useBackCamera;
605}
606
607bool AVFoundationVideoCapturer::GetUseBackCamera() const {
608 return _capturer.useBackCamera;
609}
610
611void AVFoundationVideoCapturer::CaptureSampleBuffer(
612 CMSampleBufferRef sampleBuffer) {
613 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
614 !CMSampleBufferIsValid(sampleBuffer) ||
615 !CMSampleBufferDataIsReady(sampleBuffer)) {
616 return;
617 }
618
tkchin89717aa2016-03-31 17:14:04 -0700619 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
620 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800621 return;
622 }
623
tkchin89717aa2016-03-31 17:14:04 -0700624 // Retain the buffer and post it to the webrtc thread. It will be released
625 // after it has successfully been signaled.
626 CVBufferRetain(image_buffer);
627 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700628 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700629 new rtc::TypedMessageData<AVFoundationFrame>(frame));
630}
631
632void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
633 switch (msg->message_id) {
634 case kMessageTypeFrame: {
635 rtc::TypedMessageData<AVFoundationFrame>* data =
636 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
637 const AVFoundationFrame& frame = data->data();
638 OnFrameMessage(frame.image_buffer, frame.capture_time);
639 delete data;
640 break;
641 }
642 }
643}
644
645void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
magjed39607c92016-07-14 08:12:17 -0700646 int64_t capture_time_ns) {
tkchin89717aa2016-03-31 17:14:04 -0700647 RTC_DCHECK(_startThread->IsCurrent());
648
magjed39607c92016-07-14 08:12:17 -0700649 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
650 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
651
652 const int captured_width = buffer->width();
653 const int captured_height = buffer->height();
654
655 int adapted_width;
656 int adapted_height;
657 int crop_width;
658 int crop_height;
659 int crop_x;
660 int crop_y;
661 int64_t translated_camera_time_us;
662
663 if (!AdaptFrame(captured_width, captured_height,
664 capture_time_ns / rtc::kNumNanosecsPerMicrosec,
665 rtc::TimeMicros(), &adapted_width, &adapted_height,
666 &crop_width, &crop_height, &crop_x, &crop_y,
667 &translated_camera_time_us)) {
668 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800669 return;
670 }
671
magjed39607c92016-07-14 08:12:17 -0700672 if (adapted_width != captured_width || crop_width != captured_width ||
673 adapted_height != captured_height || crop_height != captured_height) {
674 // TODO(magjed): Avoid converting to I420.
675 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
676 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
677 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
678 crop_y, crop_width, crop_height);
679 buffer = scaled_buffer;
680 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800681
magjed39607c92016-07-14 08:12:17 -0700682 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
683 translated_camera_time_us),
684 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800685
tkchin89717aa2016-03-31 17:14:04 -0700686 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800687}
688
689} // namespace webrtc