blob: ddeedb5ab6ffe19aedcc48627b571bd59880cc7d [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
tkchind7629102016-07-28 14:52:55 -070021#if TARGET_OS_IPHONE
22#import "WebRTC/UIDevice+RTCDevice.h"
23#endif
tkchin9eeb6242016-04-27 01:54:20 -070024
25#include "webrtc/base/bind.h"
26#include "webrtc/base/checks.h"
27#include "webrtc/base/thread.h"
magjed39607c92016-07-14 08:12:17 -070028#include "webrtc/common_video/include/corevideo_frame_buffer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080029
kthelgason4a85abb2016-08-19 01:24:46 -070030struct AVCaptureSessionPresetResolution {
31 NSString *sessionPreset;
32 int width;
33 int height;
34};
35
36#if TARGET_OS_IPHONE
37static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
38 { AVCaptureSessionPreset352x288, 352, 288},
39 { AVCaptureSessionPreset640x480, 640, 480},
40 { AVCaptureSessionPreset1280x720, 1280, 720},
41 { AVCaptureSessionPreset1920x1080, 1920, 1080},
42};
43#else // macOS
44static const AVCaptureSessionPresetResolution kAvailablePresets[] = {
45 { AVCaptureSessionPreset320x240, 320, 240},
46 { AVCaptureSessionPreset352x288, 352, 288},
47 { AVCaptureSessionPreset640x480, 640, 480},
48 { AVCaptureSessionPreset960x540, 960, 540},
49 { AVCaptureSessionPreset1280x720, 1280, 720},
50};
51#endif
52
53// Mapping from cricket::VideoFormat to AVCaptureSession presets.
54static NSString *GetSessionPresetForVideoFormat(
55 const cricket::VideoFormat& format) {
56 for (const auto preset : kAvailablePresets) {
57 // Check both orientations
58 if ((format.width == preset.width && format.height == preset.height) ||
59 (format.width == preset.height && format.height == preset.width)) {
60 return preset.sessionPreset;
61 }
62 }
63 // If no matching preset is found, use a default one.
64 return AVCaptureSessionPreset640x480;
65}
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080066
67// This class used to capture frames using AVFoundation APIs on iOS. It is meant
68// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
69// because other webrtc objects own cricket::VideoCapturer, which is not
70// ref counted. To prevent bad behavior we do not expose this class directly.
71@interface RTCAVFoundationVideoCapturerInternal : NSObject
72 <AVCaptureVideoDataOutputSampleBufferDelegate>
73
74@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070075@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070076@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080077@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070078@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
79@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080080
81// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
82// when we receive frames. This is safe because this object should be owned by
83// it.
84- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
kthelgason4a85abb2016-08-19 01:24:46 -070085- (AVCaptureDevice *)getActiveCaptureDevice;
tkchin89717aa2016-03-31 17:14:04 -070086
87// Starts and stops the capture session asynchronously. We cannot do this
88// synchronously without blocking a WebRTC thread.
89- (void)start;
90- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080091
92@end
93
94@implementation RTCAVFoundationVideoCapturerInternal {
95 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070096 AVCaptureDeviceInput *_frontCameraInput;
97 AVCaptureDeviceInput *_backCameraInput;
98 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080099 // The cricket::VideoCapturer that owns this class. Should never be NULL.
100 webrtc::AVFoundationVideoCapturer *_capturer;
101 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -0700102 BOOL _hasRetriedOnFatalError;
103 BOOL _isRunning;
104 BOOL _hasStarted;
105 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800106}
107
108@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -0700109@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -0700110@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -0700111@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800112
tkchin89717aa2016-03-31 17:14:04 -0700113// This is called from the thread that creates the video source, which is likely
114// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800115- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -0700116 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800117 if (self = [super init]) {
118 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -0700119 // Create the capture session and all relevant inputs and outputs. We need
120 // to do this in init because the application may want the capture session
121 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
122 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800123 if (![self setupCaptureSession]) {
124 return nil;
125 }
126 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -0700127#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800128 [center addObserver:self
129 selector:@selector(deviceOrientationDidChange:)
130 name:UIDeviceOrientationDidChangeNotification
131 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -0700132 [center addObserver:self
133 selector:@selector(handleCaptureSessionInterruption:)
134 name:AVCaptureSessionWasInterruptedNotification
135 object:_captureSession];
136 [center addObserver:self
137 selector:@selector(handleCaptureSessionInterruptionEnded:)
138 name:AVCaptureSessionInterruptionEndedNotification
139 object:_captureSession];
adam.fedorfc22e032016-06-08 17:24:37 -0700140#endif
Zeke Chin52516802016-06-03 11:59:22 -0700141 [center addObserver:self
142 selector:@selector(handleCaptureSessionRuntimeError:)
143 name:AVCaptureSessionRuntimeErrorNotification
144 object:_captureSession];
145 [center addObserver:self
146 selector:@selector(handleCaptureSessionDidStartRunning:)
147 name:AVCaptureSessionDidStartRunningNotification
148 object:_captureSession];
149 [center addObserver:self
150 selector:@selector(handleCaptureSessionDidStopRunning:)
151 name:AVCaptureSessionDidStopRunningNotification
152 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800153 }
154 return self;
155}
156
157- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700158 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800159 [[NSNotificationCenter defaultCenter] removeObserver:self];
160 _capturer = nullptr;
161}
162
tkchin89717aa2016-03-31 17:14:04 -0700163- (AVCaptureSession *)captureSession {
164 return _captureSession;
165}
166
kthelgason4a85abb2016-08-19 01:24:46 -0700167- (AVCaptureDevice *)getActiveCaptureDevice {
168 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
169}
170
Zeke Chin52516802016-06-03 11:59:22 -0700171- (dispatch_queue_t)frameQueue {
172 if (!_frameQueue) {
173 _frameQueue =
174 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
175 DISPATCH_QUEUE_SERIAL);
176 dispatch_set_target_queue(
177 _frameQueue,
178 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
179 }
180 return _frameQueue;
181}
182
tkchin89717aa2016-03-31 17:14:04 -0700183// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700184- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700185 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700186}
187
tkchin89717aa2016-03-31 17:14:04 -0700188// Called from any thread (likely main thread).
189- (BOOL)useBackCamera {
190 @synchronized(self) {
191 return _useBackCamera;
192 }
193}
194
195// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800196- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700197 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700198 if (useBackCamera) {
199 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
200 "not switching.");
201 }
hjona1cf3662016-03-14 20:55:22 -0700202 return;
203 }
tkchin89717aa2016-03-31 17:14:04 -0700204 @synchronized(self) {
205 if (_useBackCamera == useBackCamera) {
206 return;
207 }
208 _useBackCamera = useBackCamera;
209 [self updateSessionInputForUseBackCamera:useBackCamera];
210 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800211}
212
Zeke Chin52516802016-06-03 11:59:22 -0700213- (BOOL)isRunning {
214 rtc::CritScope cs(&_crit);
215 return _isRunning;
216}
217
218- (void)setIsRunning:(BOOL)isRunning {
219 rtc::CritScope cs(&_crit);
220 _isRunning = isRunning;
221}
222
tkchin89717aa2016-03-31 17:14:04 -0700223// Called from WebRTC thread.
224- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700225 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800226 return;
227 }
Zeke Chin52516802016-06-03 11:59:22 -0700228 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800229 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
230 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700231 _orientationHasChanged = NO;
232 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700233#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700234 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700235#endif
tkchin89717aa2016-03-31 17:14:04 -0700236 AVCaptureSession *captureSession = self.captureSession;
237 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800238 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800239}
240
tkchin89717aa2016-03-31 17:14:04 -0700241// Called from same thread as start.
242- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700243 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800244 return;
245 }
Zeke Chin52516802016-06-03 11:59:22 -0700246 self.hasStarted = NO;
247 // Due to this async block, it's possible that the ObjC object outlives the
248 // C++ one. In order to not invoke functions on the C++ object, we set
249 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800250 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
251 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700252 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
253 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700254#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700255 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700256#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800257 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800258}
259
Zeke Chin52516802016-06-03 11:59:22 -0700260#pragma mark iOS notifications
261
adam.fedorfc22e032016-06-08 17:24:37 -0700262#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700263- (void)deviceOrientationDidChange:(NSNotification *)notification {
264 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
265 block:^{
266 _orientationHasChanged = YES;
267 [self updateOrientation];
268 }];
269}
adam.fedorfc22e032016-06-08 17:24:37 -0700270#endif
Zeke Chin52516802016-06-03 11:59:22 -0700271
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800272#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
273
274- (void)captureOutput:(AVCaptureOutput *)captureOutput
275 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
276 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700277 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700278 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800279 return;
280 }
281 _capturer->CaptureSampleBuffer(sampleBuffer);
282}
283
284- (void)captureOutput:(AVCaptureOutput *)captureOutput
285 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700286 fromConnection:(AVCaptureConnection *)connection {
287 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800288}
289
Zeke Chin52516802016-06-03 11:59:22 -0700290#pragma mark - AVCaptureSession notifications
291
292- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
293 NSString *reasonString = nil;
294#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
295 NSNumber *reason =
296 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
297 if (reason) {
298 switch (reason.intValue) {
299 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
300 reasonString = @"VideoDeviceNotAvailableInBackground";
301 break;
302 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
303 reasonString = @"AudioDeviceInUseByAnotherClient";
304 break;
305 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
306 reasonString = @"VideoDeviceInUseByAnotherClient";
307 break;
308 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
309 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
310 break;
311 }
312 }
313#endif
314 RTCLog(@"Capture session interrupted: %@", reasonString);
315 // TODO(tkchin): Handle this case.
316}
317
318- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
319 RTCLog(@"Capture session interruption ended.");
320 // TODO(tkchin): Handle this case.
321}
322
323- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700324 NSError *error =
325 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
haysc7a11ae32016-07-29 12:03:51 -0700326 RTCLogError(@"Capture session runtime error: %@", error);
Zeke Chin52516802016-06-03 11:59:22 -0700327
328 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
329 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700330#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700331 if (error.code == AVErrorMediaServicesWereReset) {
332 [self handleNonFatalError];
333 } else {
334 [self handleFatalError];
335 }
adam.fedorfc22e032016-06-08 17:24:37 -0700336#else
337 [self handleFatalError];
338#endif
Zeke Chin52516802016-06-03 11:59:22 -0700339 }];
340}
341
342- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
343 RTCLog(@"Capture session started.");
344 self.isRunning = YES;
345 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
346 block:^{
347 // If we successfully restarted after an unknown error, allow future
348 // retries on fatal errors.
349 _hasRetriedOnFatalError = NO;
350 }];
351}
352
353- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
354 RTCLog(@"Capture session stopped.");
355 self.isRunning = NO;
356}
357
358- (void)handleFatalError {
359 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
360 block:^{
361 if (!_hasRetriedOnFatalError) {
362 RTCLogWarning(@"Attempting to recover from fatal capture error.");
363 [self handleNonFatalError];
364 _hasRetriedOnFatalError = YES;
365 } else {
366 RTCLogError(@"Previous fatal error recovery failed.");
367 }
368 }];
369}
370
371- (void)handleNonFatalError {
372 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
373 block:^{
374 if (self.hasStarted) {
375 RTCLog(@"Restarting capture session after error.");
376 [self.captureSession startRunning];
377 }
378 }];
379}
380
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800381#pragma mark - Private
382
383- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700384 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800385#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
386 NSString *version = [[UIDevice currentDevice] systemVersion];
387 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700388 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800389 }
390#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800391
tkchin89717aa2016-03-31 17:14:04 -0700392 // Add the output.
393 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
394 if (![captureSession canAddOutput:videoDataOutput]) {
395 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800396 return NO;
397 }
tkchin89717aa2016-03-31 17:14:04 -0700398 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800399
tkchin89717aa2016-03-31 17:14:04 -0700400 // Get the front and back cameras. If there isn't a front camera
401 // give up.
402 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
403 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
404 if (!frontCameraInput) {
405 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800406 return NO;
407 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800408
409 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700410 if (![captureSession canAddInput:frontCameraInput] ||
411 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
412 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800413 return NO;
414 }
tkchin89717aa2016-03-31 17:14:04 -0700415 AVCaptureDeviceInput *input = self.useBackCamera ?
416 backCameraInput : frontCameraInput;
417 [captureSession addInput:input];
kthelgason4a85abb2016-08-19 01:24:46 -0700418
tkchin89717aa2016-03-31 17:14:04 -0700419 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800420 return YES;
421}
422
tkchin89717aa2016-03-31 17:14:04 -0700423- (AVCaptureVideoDataOutput *)videoDataOutput {
424 if (!_videoDataOutput) {
425 // Make the capturer output NV12. Ideally we want I420 but that's not
426 // currently supported on iPhone / iPad.
427 AVCaptureVideoDataOutput *videoDataOutput =
428 [[AVCaptureVideoDataOutput alloc] init];
tkchin89717aa2016-03-31 17:14:04 -0700429 videoDataOutput.videoSettings = @{
430 (NSString *)kCVPixelBufferPixelFormatTypeKey :
431 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
432 };
433 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700434 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700435 _videoDataOutput = videoDataOutput;
436 }
437 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800438}
439
tkchin89717aa2016-03-31 17:14:04 -0700440- (AVCaptureDevice *)videoCaptureDeviceForPosition:
441 (AVCaptureDevicePosition)position {
442 for (AVCaptureDevice *captureDevice in
443 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
444 if (captureDevice.position == position) {
445 return captureDevice;
446 }
447 }
448 return nil;
449}
450
451- (AVCaptureDeviceInput *)frontCameraInput {
452 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700453#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700454 AVCaptureDevice *frontCameraDevice =
455 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700456#else
457 AVCaptureDevice *frontCameraDevice =
458 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
459#endif
tkchin89717aa2016-03-31 17:14:04 -0700460 if (!frontCameraDevice) {
461 RTCLogWarning(@"Failed to find front capture device.");
462 return nil;
463 }
464 NSError *error = nil;
465 AVCaptureDeviceInput *frontCameraInput =
466 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
467 error:&error];
468 if (!frontCameraInput) {
469 RTCLogError(@"Failed to create front camera input: %@",
470 error.localizedDescription);
471 return nil;
472 }
473 _frontCameraInput = frontCameraInput;
474 }
475 return _frontCameraInput;
476}
477
478- (AVCaptureDeviceInput *)backCameraInput {
479 if (!_backCameraInput) {
480 AVCaptureDevice *backCameraDevice =
481 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
482 if (!backCameraDevice) {
483 RTCLogWarning(@"Failed to find front capture device.");
484 return nil;
485 }
486 NSError *error = nil;
487 AVCaptureDeviceInput *backCameraInput =
488 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
489 error:&error];
490 if (!backCameraInput) {
491 RTCLogError(@"Failed to create front camera input: %@",
492 error.localizedDescription);
493 return nil;
494 }
495 _backCameraInput = backCameraInput;
496 }
497 return _backCameraInput;
498}
499
tkchind7629102016-07-28 14:52:55 -0700500- (void)setMinFrameDuration:(CMTime)minFrameDuration
501 forDevice:(AVCaptureDevice *)device {
502 NSError *error = nil;
503 if (![device lockForConfiguration:&error]) {
504 RTCLogError(@"Failed to lock device for configuration. Error: %@", error.localizedDescription);
505 return;
506 }
507 device.activeVideoMinFrameDuration = minFrameDuration;
508 [device unlockForConfiguration];
509}
510
tkchin89717aa2016-03-31 17:14:04 -0700511// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800512- (void)updateOrientation {
513 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700514 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800515 if (!connection.supportsVideoOrientation) {
516 // TODO(tkchin): set rotation bit on frames.
517 return;
518 }
adam.fedorfc22e032016-06-08 17:24:37 -0700519#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800520 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
521 switch ([UIDevice currentDevice].orientation) {
522 case UIDeviceOrientationPortrait:
523 orientation = AVCaptureVideoOrientationPortrait;
524 break;
525 case UIDeviceOrientationPortraitUpsideDown:
526 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
527 break;
528 case UIDeviceOrientationLandscapeLeft:
529 orientation = AVCaptureVideoOrientationLandscapeRight;
530 break;
531 case UIDeviceOrientationLandscapeRight:
532 orientation = AVCaptureVideoOrientationLandscapeLeft;
533 break;
534 case UIDeviceOrientationFaceUp:
535 case UIDeviceOrientationFaceDown:
536 case UIDeviceOrientationUnknown:
537 if (!_orientationHasChanged) {
538 connection.videoOrientation = orientation;
539 }
540 return;
541 }
542 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700543#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800544}
545
tkchin89717aa2016-03-31 17:14:04 -0700546// Update the current session input to match what's stored in _useBackCamera.
547- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
548 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
549 block:^{
550 [_captureSession beginConfiguration];
551 AVCaptureDeviceInput *oldInput = _backCameraInput;
552 AVCaptureDeviceInput *newInput = _frontCameraInput;
553 if (useBackCamera) {
554 oldInput = _frontCameraInput;
555 newInput = _backCameraInput;
556 }
557 if (oldInput) {
558 // Ok to remove this even if it's not attached. Will be no-op.
559 [_captureSession removeInput:oldInput];
560 }
561 if (newInput) {
562 [_captureSession addInput:newInput];
563 }
564 [self updateOrientation];
565 [_captureSession commitConfiguration];
kthelgason4a85abb2016-08-19 01:24:46 -0700566
567 const auto fps = cricket::VideoFormat::IntervalToFps(_capturer->GetCaptureFormat()->interval);
568 [self setMinFrameDuration:CMTimeMake(1, fps)forDevice:newInput.device];
tkchin89717aa2016-03-31 17:14:04 -0700569 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800570}
571
572@end
573
574namespace webrtc {
575
tkchin89717aa2016-03-31 17:14:04 -0700576enum AVFoundationVideoCapturerMessageType : uint32_t {
577 kMessageTypeFrame,
578};
579
580struct AVFoundationFrame {
581 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
582 : image_buffer(buffer), capture_time(time) {}
583 CVImageBufferRef image_buffer;
584 int64_t capture_time;
585};
586
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800587AVFoundationVideoCapturer::AVFoundationVideoCapturer()
588 : _capturer(nil), _startThread(nullptr) {
kthelgason4a85abb2016-08-19 01:24:46 -0700589 // Set our supported formats. This matches kAvailablePresets.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800590 _capturer =
591 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
kthelgason4a85abb2016-08-19 01:24:46 -0700592
593 std::vector<cricket::VideoFormat> supported_formats;
594 int framerate = 30;
595
596#if TARGET_OS_IPHONE
597 if ([UIDevice deviceType] == RTCDeviceTypeIPhone4S) {
598 set_enable_video_adapter(false);
599 framerate = 15;
600 }
601#endif
602
603 for (const auto preset : kAvailablePresets) {
604 if ([_capturer.captureSession canSetSessionPreset:preset.sessionPreset]) {
605 const auto format = cricket::VideoFormat(
606 preset.width,
607 preset.height,
608 cricket::VideoFormat::FpsToInterval(framerate),
609 cricket::FOURCC_NV12);
610 supported_formats.push_back(format);
611 }
612 }
613
614 SetSupportedFormats(supported_formats);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800615}
616
617AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
618 _capturer = nil;
619}
620
621cricket::CaptureState AVFoundationVideoCapturer::Start(
622 const cricket::VideoFormat& format) {
623 if (!_capturer) {
624 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
625 return cricket::CaptureState::CS_FAILED;
626 }
627 if (_capturer.isRunning) {
628 LOG(LS_ERROR) << "The capturer is already running.";
629 return cricket::CaptureState::CS_FAILED;
630 }
kthelgason4a85abb2016-08-19 01:24:46 -0700631
632 NSString *desiredPreset = GetSessionPresetForVideoFormat(format);
633 RTC_DCHECK(desiredPreset);
634
635 [_capturer.captureSession beginConfiguration];
636 if (![_capturer.captureSession canSetSessionPreset:desiredPreset]) {
637 LOG(LS_ERROR) << "Unsupported video format.";
638 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800639 return cricket::CaptureState::CS_FAILED;
640 }
kthelgason4a85abb2016-08-19 01:24:46 -0700641 _capturer.captureSession.sessionPreset = desiredPreset;
642 [_capturer.captureSession commitConfiguration];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800643
644 // Keep track of which thread capture started on. This is the thread that
645 // frames need to be sent to.
646 RTC_DCHECK(!_startThread);
647 _startThread = rtc::Thread::Current();
648
649 SetCaptureFormat(&format);
650 // This isn't super accurate because it takes a while for the AVCaptureSession
651 // to spin up, and this call returns async.
652 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700653 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800654 SetCaptureState(cricket::CaptureState::CS_RUNNING);
655
kthelgason4a85abb2016-08-19 01:24:46 -0700656 // Adjust the framerate for all capture devices.
657 const auto fps = cricket::VideoFormat::IntervalToFps(format.interval);
658 AVCaptureDevice *activeDevice = [_capturer getActiveCaptureDevice];
659 [_capturer setMinFrameDuration:CMTimeMake(1, fps)forDevice:activeDevice];
660
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800661 return cricket::CaptureState::CS_STARTING;
662}
663
664void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700665 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800666 SetCaptureFormat(NULL);
667 _startThread = nullptr;
668}
669
670bool AVFoundationVideoCapturer::IsRunning() {
671 return _capturer.isRunning;
672}
673
674AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
675 return _capturer.captureSession;
676}
677
hjona1cf3662016-03-14 20:55:22 -0700678bool AVFoundationVideoCapturer::CanUseBackCamera() const {
679 return _capturer.canUseBackCamera;
680}
681
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800682void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
683 _capturer.useBackCamera = useBackCamera;
684}
685
686bool AVFoundationVideoCapturer::GetUseBackCamera() const {
687 return _capturer.useBackCamera;
688}
689
690void AVFoundationVideoCapturer::CaptureSampleBuffer(
691 CMSampleBufferRef sampleBuffer) {
692 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
693 !CMSampleBufferIsValid(sampleBuffer) ||
694 !CMSampleBufferDataIsReady(sampleBuffer)) {
695 return;
696 }
697
tkchin89717aa2016-03-31 17:14:04 -0700698 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
699 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800700 return;
701 }
702
tkchin89717aa2016-03-31 17:14:04 -0700703 // Retain the buffer and post it to the webrtc thread. It will be released
704 // after it has successfully been signaled.
705 CVBufferRetain(image_buffer);
706 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700707 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700708 new rtc::TypedMessageData<AVFoundationFrame>(frame));
709}
710
711void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
712 switch (msg->message_id) {
713 case kMessageTypeFrame: {
714 rtc::TypedMessageData<AVFoundationFrame>* data =
715 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
716 const AVFoundationFrame& frame = data->data();
717 OnFrameMessage(frame.image_buffer, frame.capture_time);
718 delete data;
719 break;
720 }
721 }
722}
723
724void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
magjed39607c92016-07-14 08:12:17 -0700725 int64_t capture_time_ns) {
tkchin89717aa2016-03-31 17:14:04 -0700726 RTC_DCHECK(_startThread->IsCurrent());
727
magjed39607c92016-07-14 08:12:17 -0700728 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
729 new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
730
731 const int captured_width = buffer->width();
732 const int captured_height = buffer->height();
733
734 int adapted_width;
735 int adapted_height;
736 int crop_width;
737 int crop_height;
738 int crop_x;
739 int crop_y;
740 int64_t translated_camera_time_us;
741
742 if (!AdaptFrame(captured_width, captured_height,
743 capture_time_ns / rtc::kNumNanosecsPerMicrosec,
744 rtc::TimeMicros(), &adapted_width, &adapted_height,
745 &crop_width, &crop_height, &crop_x, &crop_y,
746 &translated_camera_time_us)) {
747 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800748 return;
749 }
750
magjed39607c92016-07-14 08:12:17 -0700751 if (adapted_width != captured_width || crop_width != captured_width ||
752 adapted_height != captured_height || crop_height != captured_height) {
753 // TODO(magjed): Avoid converting to I420.
754 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
755 _buffer_pool.CreateBuffer(adapted_width, adapted_height));
756 scaled_buffer->CropAndScaleFrom(buffer->NativeToI420Buffer(), crop_x,
757 crop_y, crop_width, crop_height);
758 buffer = scaled_buffer;
759 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800760
magjed39607c92016-07-14 08:12:17 -0700761 OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
Sergey Ulanov19ee1e6eb2016-08-01 13:35:55 -0700762 translated_camera_time_us, 0),
magjed39607c92016-07-14 08:12:17 -0700763 captured_width, captured_height);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800764
tkchin89717aa2016-03-31 17:14:04 -0700765 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800766}
767
768} // namespace webrtc