blob: 778d70b897cd9777b2cd8aa13ced4ae6f9d0c47a [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
tkchin9eeb6242016-04-27 01:54:20 -070011#include "avfoundationvideocapturer.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080012
13#import <AVFoundation/AVFoundation.h>
14#import <Foundation/Foundation.h>
adam.fedorfc22e032016-06-08 17:24:37 -070015#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080016#import <UIKit/UIKit.h>
adam.fedorfc22e032016-06-08 17:24:37 -070017#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080018
tkchin9eeb6242016-04-27 01:54:20 -070019#import "RTCDispatcher+Private.h"
20#import "WebRTC/RTCLogging.h"
21
22#include "webrtc/base/bind.h"
23#include "webrtc/base/checks.h"
24#include "webrtc/base/thread.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080025
26// TODO(tkchin): support other formats.
tkchin89717aa2016-03-31 17:14:04 -070027static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080028static cricket::VideoFormat const kDefaultFormat =
29 cricket::VideoFormat(640,
30 480,
31 cricket::VideoFormat::FpsToInterval(30),
32 cricket::FOURCC_NV12);
33
34// This class used to capture frames using AVFoundation APIs on iOS. It is meant
35// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
36// because other webrtc objects own cricket::VideoCapturer, which is not
37// ref counted. To prevent bad behavior we do not expose this class directly.
38@interface RTCAVFoundationVideoCapturerInternal : NSObject
39 <AVCaptureVideoDataOutputSampleBufferDelegate>
40
41@property(nonatomic, readonly) AVCaptureSession *captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070042@property(nonatomic, readonly) dispatch_queue_t frameQueue;
hjona1cf3662016-03-14 20:55:22 -070043@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080044@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
Zeke Chin52516802016-06-03 11:59:22 -070045@property(nonatomic, assign) BOOL isRunning; // Whether the capture session is running.
46@property(atomic, assign) BOOL hasStarted; // Whether we have an unmatched start.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080047
48// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
49// when we receive frames. This is safe because this object should be owned by
50// it.
51- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
tkchin89717aa2016-03-31 17:14:04 -070052
53// Starts and stops the capture session asynchronously. We cannot do this
54// synchronously without blocking a WebRTC thread.
55- (void)start;
56- (void)stop;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080057
58@end
59
60@implementation RTCAVFoundationVideoCapturerInternal {
61 // Keep pointers to inputs for convenience.
tkchin89717aa2016-03-31 17:14:04 -070062 AVCaptureDeviceInput *_frontCameraInput;
63 AVCaptureDeviceInput *_backCameraInput;
64 AVCaptureVideoDataOutput *_videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080065 // The cricket::VideoCapturer that owns this class. Should never be NULL.
66 webrtc::AVFoundationVideoCapturer *_capturer;
67 BOOL _orientationHasChanged;
Zeke Chin52516802016-06-03 11:59:22 -070068 BOOL _hasRetriedOnFatalError;
69 BOOL _isRunning;
70 BOOL _hasStarted;
71 rtc::CriticalSection _crit;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080072}
73
74@synthesize captureSession = _captureSession;
Zeke Chin52516802016-06-03 11:59:22 -070075@synthesize frameQueue = _frameQueue;
tkchin89717aa2016-03-31 17:14:04 -070076@synthesize useBackCamera = _useBackCamera;
Zeke Chin52516802016-06-03 11:59:22 -070077@synthesize hasStarted = _hasStarted;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080078
tkchin89717aa2016-03-31 17:14:04 -070079// This is called from the thread that creates the video source, which is likely
80// the main thread.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080081- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
tkchin89717aa2016-03-31 17:14:04 -070082 RTC_DCHECK(capturer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080083 if (self = [super init]) {
84 _capturer = capturer;
tkchin89717aa2016-03-31 17:14:04 -070085 // Create the capture session and all relevant inputs and outputs. We need
86 // to do this in init because the application may want the capture session
87 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
88 // created here are retained until dealloc and never recreated.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080089 if (![self setupCaptureSession]) {
90 return nil;
91 }
92 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
adam.fedorfc22e032016-06-08 17:24:37 -070093#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080094 [center addObserver:self
95 selector:@selector(deviceOrientationDidChange:)
96 name:UIDeviceOrientationDidChangeNotification
97 object:nil];
Zeke Chin52516802016-06-03 11:59:22 -070098 [center addObserver:self
99 selector:@selector(handleCaptureSessionInterruption:)
100 name:AVCaptureSessionWasInterruptedNotification
101 object:_captureSession];
102 [center addObserver:self
103 selector:@selector(handleCaptureSessionInterruptionEnded:)
104 name:AVCaptureSessionInterruptionEndedNotification
105 object:_captureSession];
adam.fedorfc22e032016-06-08 17:24:37 -0700106#endif
Zeke Chin52516802016-06-03 11:59:22 -0700107 [center addObserver:self
108 selector:@selector(handleCaptureSessionRuntimeError:)
109 name:AVCaptureSessionRuntimeErrorNotification
110 object:_captureSession];
111 [center addObserver:self
112 selector:@selector(handleCaptureSessionDidStartRunning:)
113 name:AVCaptureSessionDidStartRunningNotification
114 object:_captureSession];
115 [center addObserver:self
116 selector:@selector(handleCaptureSessionDidStopRunning:)
117 name:AVCaptureSessionDidStopRunningNotification
118 object:_captureSession];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800119 }
120 return self;
121}
122
123- (void)dealloc {
Zeke Chin52516802016-06-03 11:59:22 -0700124 RTC_DCHECK(!self.hasStarted);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800125 [[NSNotificationCenter defaultCenter] removeObserver:self];
126 _capturer = nullptr;
127}
128
tkchin89717aa2016-03-31 17:14:04 -0700129- (AVCaptureSession *)captureSession {
130 return _captureSession;
131}
132
Zeke Chin52516802016-06-03 11:59:22 -0700133- (dispatch_queue_t)frameQueue {
134 if (!_frameQueue) {
135 _frameQueue =
136 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video",
137 DISPATCH_QUEUE_SERIAL);
138 dispatch_set_target_queue(
139 _frameQueue,
140 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
141 }
142 return _frameQueue;
143}
144
tkchin89717aa2016-03-31 17:14:04 -0700145// Called from any thread (likely main thread).
hjona1cf3662016-03-14 20:55:22 -0700146- (BOOL)canUseBackCamera {
tkchin89717aa2016-03-31 17:14:04 -0700147 return _backCameraInput != nil;
hjona1cf3662016-03-14 20:55:22 -0700148}
149
tkchin89717aa2016-03-31 17:14:04 -0700150// Called from any thread (likely main thread).
151- (BOOL)useBackCamera {
152 @synchronized(self) {
153 return _useBackCamera;
154 }
155}
156
157// Called from any thread (likely main thread).
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800158- (void)setUseBackCamera:(BOOL)useBackCamera {
hjona1cf3662016-03-14 20:55:22 -0700159 if (!self.canUseBackCamera) {
tkchin89717aa2016-03-31 17:14:04 -0700160 if (useBackCamera) {
161 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
162 "not switching.");
163 }
hjona1cf3662016-03-14 20:55:22 -0700164 return;
165 }
tkchin89717aa2016-03-31 17:14:04 -0700166 @synchronized(self) {
167 if (_useBackCamera == useBackCamera) {
168 return;
169 }
170 _useBackCamera = useBackCamera;
171 [self updateSessionInputForUseBackCamera:useBackCamera];
172 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800173}
174
Zeke Chin52516802016-06-03 11:59:22 -0700175- (BOOL)isRunning {
176 rtc::CritScope cs(&_crit);
177 return _isRunning;
178}
179
180- (void)setIsRunning:(BOOL)isRunning {
181 rtc::CritScope cs(&_crit);
182 _isRunning = isRunning;
183}
184
tkchin89717aa2016-03-31 17:14:04 -0700185// Called from WebRTC thread.
186- (void)start {
Zeke Chin52516802016-06-03 11:59:22 -0700187 if (self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800188 return;
189 }
Zeke Chin52516802016-06-03 11:59:22 -0700190 self.hasStarted = YES;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800191 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
192 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700193 _orientationHasChanged = NO;
194 [self updateOrientation];
adam.fedorfc22e032016-06-08 17:24:37 -0700195#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700196 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700197#endif
tkchin89717aa2016-03-31 17:14:04 -0700198 AVCaptureSession *captureSession = self.captureSession;
199 [captureSession startRunning];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800200 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800201}
202
tkchin89717aa2016-03-31 17:14:04 -0700203// Called from same thread as start.
204- (void)stop {
Zeke Chin52516802016-06-03 11:59:22 -0700205 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800206 return;
207 }
Zeke Chin52516802016-06-03 11:59:22 -0700208 self.hasStarted = NO;
209 // Due to this async block, it's possible that the ObjC object outlives the
210 // C++ one. In order to not invoke functions on the C++ object, we set
211 // hasStarted immediately instead of dispatching it async.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800212 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
213 block:^{
tkchin89717aa2016-03-31 17:14:04 -0700214 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
215 [_captureSession stopRunning];
adam.fedorfc22e032016-06-08 17:24:37 -0700216#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700217 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
adam.fedorfc22e032016-06-08 17:24:37 -0700218#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800219 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800220}
221
Zeke Chin52516802016-06-03 11:59:22 -0700222#pragma mark iOS notifications
223
adam.fedorfc22e032016-06-08 17:24:37 -0700224#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700225- (void)deviceOrientationDidChange:(NSNotification *)notification {
226 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
227 block:^{
228 _orientationHasChanged = YES;
229 [self updateOrientation];
230 }];
231}
adam.fedorfc22e032016-06-08 17:24:37 -0700232#endif
Zeke Chin52516802016-06-03 11:59:22 -0700233
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800234#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
235
236- (void)captureOutput:(AVCaptureOutput *)captureOutput
237 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
238 fromConnection:(AVCaptureConnection *)connection {
tkchin89717aa2016-03-31 17:14:04 -0700239 NSParameterAssert(captureOutput == _videoDataOutput);
Zeke Chin52516802016-06-03 11:59:22 -0700240 if (!self.hasStarted) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800241 return;
242 }
243 _capturer->CaptureSampleBuffer(sampleBuffer);
244}
245
246- (void)captureOutput:(AVCaptureOutput *)captureOutput
247 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
tkchin89717aa2016-03-31 17:14:04 -0700248 fromConnection:(AVCaptureConnection *)connection {
249 RTCLogError(@"Dropped sample buffer.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800250}
251
Zeke Chin52516802016-06-03 11:59:22 -0700252#pragma mark - AVCaptureSession notifications
253
254- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
255 NSString *reasonString = nil;
256#if defined(__IPHONE_9_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
257 NSNumber *reason =
258 notification.userInfo[AVCaptureSessionInterruptionReasonKey];
259 if (reason) {
260 switch (reason.intValue) {
261 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
262 reasonString = @"VideoDeviceNotAvailableInBackground";
263 break;
264 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
265 reasonString = @"AudioDeviceInUseByAnotherClient";
266 break;
267 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
268 reasonString = @"VideoDeviceInUseByAnotherClient";
269 break;
270 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
271 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
272 break;
273 }
274 }
275#endif
276 RTCLog(@"Capture session interrupted: %@", reasonString);
277 // TODO(tkchin): Handle this case.
278}
279
280- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
281 RTCLog(@"Capture session interruption ended.");
282 // TODO(tkchin): Handle this case.
283}
284
285- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
adam.fedorfc22e032016-06-08 17:24:37 -0700286 NSError *error =
287 [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
Zeke Chin52516802016-06-03 11:59:22 -0700288 RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
289
290 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
291 block:^{
adam.fedorfc22e032016-06-08 17:24:37 -0700292#if TARGET_OS_IPHONE
Zeke Chin52516802016-06-03 11:59:22 -0700293 if (error.code == AVErrorMediaServicesWereReset) {
294 [self handleNonFatalError];
295 } else {
296 [self handleFatalError];
297 }
adam.fedorfc22e032016-06-08 17:24:37 -0700298#else
299 [self handleFatalError];
300#endif
Zeke Chin52516802016-06-03 11:59:22 -0700301 }];
302}
303
304- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
305 RTCLog(@"Capture session started.");
306 self.isRunning = YES;
307 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
308 block:^{
309 // If we successfully restarted after an unknown error, allow future
310 // retries on fatal errors.
311 _hasRetriedOnFatalError = NO;
312 }];
313}
314
315- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
316 RTCLog(@"Capture session stopped.");
317 self.isRunning = NO;
318}
319
320- (void)handleFatalError {
321 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
322 block:^{
323 if (!_hasRetriedOnFatalError) {
324 RTCLogWarning(@"Attempting to recover from fatal capture error.");
325 [self handleNonFatalError];
326 _hasRetriedOnFatalError = YES;
327 } else {
328 RTCLogError(@"Previous fatal error recovery failed.");
329 }
330 }];
331}
332
333- (void)handleNonFatalError {
334 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
335 block:^{
336 if (self.hasStarted) {
337 RTCLog(@"Restarting capture session after error.");
338 [self.captureSession startRunning];
339 }
340 }];
341}
342
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800343#pragma mark - Private
344
345- (BOOL)setupCaptureSession {
tkchin89717aa2016-03-31 17:14:04 -0700346 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800347#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
348 NSString *version = [[UIDevice currentDevice] systemVersion];
349 if ([version integerValue] >= 7) {
tkchin89717aa2016-03-31 17:14:04 -0700350 captureSession.usesApplicationAudioSession = NO;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800351 }
352#endif
tkchin89717aa2016-03-31 17:14:04 -0700353 if (![captureSession canSetSessionPreset:kDefaultPreset]) {
354 RTCLogError(@"Session preset unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800355 return NO;
356 }
tkchin89717aa2016-03-31 17:14:04 -0700357 captureSession.sessionPreset = kDefaultPreset;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800358
tkchin89717aa2016-03-31 17:14:04 -0700359 // Add the output.
360 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
361 if (![captureSession canAddOutput:videoDataOutput]) {
362 RTCLogError(@"Video data output unsupported.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800363 return NO;
364 }
tkchin89717aa2016-03-31 17:14:04 -0700365 [captureSession addOutput:videoDataOutput];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800366
tkchin89717aa2016-03-31 17:14:04 -0700367 // Get the front and back cameras. If there isn't a front camera
368 // give up.
369 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
370 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
371 if (!frontCameraInput) {
372 RTCLogError(@"No front camera for capture session.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800373 return NO;
374 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800375
376 // Add the inputs.
tkchin89717aa2016-03-31 17:14:04 -0700377 if (![captureSession canAddInput:frontCameraInput] ||
378 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
379 RTCLogError(@"Session does not support capture inputs.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800380 return NO;
381 }
tkchin89717aa2016-03-31 17:14:04 -0700382 AVCaptureDeviceInput *input = self.useBackCamera ?
383 backCameraInput : frontCameraInput;
384 [captureSession addInput:input];
385 _captureSession = captureSession;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800386 return YES;
387}
388
tkchin89717aa2016-03-31 17:14:04 -0700389- (AVCaptureVideoDataOutput *)videoDataOutput {
390 if (!_videoDataOutput) {
391 // Make the capturer output NV12. Ideally we want I420 but that's not
392 // currently supported on iPhone / iPad.
393 AVCaptureVideoDataOutput *videoDataOutput =
394 [[AVCaptureVideoDataOutput alloc] init];
395 videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
396 videoDataOutput.videoSettings = @{
397 (NSString *)kCVPixelBufferPixelFormatTypeKey :
398 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
399 };
400 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
Zeke Chin52516802016-06-03 11:59:22 -0700401 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
tkchin89717aa2016-03-31 17:14:04 -0700402 _videoDataOutput = videoDataOutput;
403 }
404 return _videoDataOutput;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800405}
406
tkchin89717aa2016-03-31 17:14:04 -0700407- (AVCaptureDevice *)videoCaptureDeviceForPosition:
408 (AVCaptureDevicePosition)position {
409 for (AVCaptureDevice *captureDevice in
410 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
411 if (captureDevice.position == position) {
412 return captureDevice;
413 }
414 }
415 return nil;
416}
417
418- (AVCaptureDeviceInput *)frontCameraInput {
419 if (!_frontCameraInput) {
adam.fedorfc22e032016-06-08 17:24:37 -0700420#if TARGET_OS_IPHONE
tkchin89717aa2016-03-31 17:14:04 -0700421 AVCaptureDevice *frontCameraDevice =
422 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
adam.fedorfc22e032016-06-08 17:24:37 -0700423#else
424 AVCaptureDevice *frontCameraDevice =
425 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
426#endif
tkchin89717aa2016-03-31 17:14:04 -0700427 if (!frontCameraDevice) {
428 RTCLogWarning(@"Failed to find front capture device.");
429 return nil;
430 }
431 NSError *error = nil;
432 AVCaptureDeviceInput *frontCameraInput =
433 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice
434 error:&error];
435 if (!frontCameraInput) {
436 RTCLogError(@"Failed to create front camera input: %@",
437 error.localizedDescription);
438 return nil;
439 }
440 _frontCameraInput = frontCameraInput;
441 }
442 return _frontCameraInput;
443}
444
445- (AVCaptureDeviceInput *)backCameraInput {
446 if (!_backCameraInput) {
447 AVCaptureDevice *backCameraDevice =
448 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
449 if (!backCameraDevice) {
450 RTCLogWarning(@"Failed to find front capture device.");
451 return nil;
452 }
453 NSError *error = nil;
454 AVCaptureDeviceInput *backCameraInput =
455 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice
456 error:&error];
457 if (!backCameraInput) {
458 RTCLogError(@"Failed to create front camera input: %@",
459 error.localizedDescription);
460 return nil;
461 }
462 _backCameraInput = backCameraInput;
463 }
464 return _backCameraInput;
465}
466
tkchin89717aa2016-03-31 17:14:04 -0700467// Called from capture session queue.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800468- (void)updateOrientation {
469 AVCaptureConnection *connection =
tkchin89717aa2016-03-31 17:14:04 -0700470 [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800471 if (!connection.supportsVideoOrientation) {
472 // TODO(tkchin): set rotation bit on frames.
473 return;
474 }
adam.fedorfc22e032016-06-08 17:24:37 -0700475#if TARGET_OS_IPHONE
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800476 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
477 switch ([UIDevice currentDevice].orientation) {
478 case UIDeviceOrientationPortrait:
479 orientation = AVCaptureVideoOrientationPortrait;
480 break;
481 case UIDeviceOrientationPortraitUpsideDown:
482 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
483 break;
484 case UIDeviceOrientationLandscapeLeft:
485 orientation = AVCaptureVideoOrientationLandscapeRight;
486 break;
487 case UIDeviceOrientationLandscapeRight:
488 orientation = AVCaptureVideoOrientationLandscapeLeft;
489 break;
490 case UIDeviceOrientationFaceUp:
491 case UIDeviceOrientationFaceDown:
492 case UIDeviceOrientationUnknown:
493 if (!_orientationHasChanged) {
494 connection.videoOrientation = orientation;
495 }
496 return;
497 }
498 connection.videoOrientation = orientation;
adam.fedorfc22e032016-06-08 17:24:37 -0700499#endif
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800500}
501
tkchin89717aa2016-03-31 17:14:04 -0700502// Update the current session input to match what's stored in _useBackCamera.
503- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
504 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
505 block:^{
506 [_captureSession beginConfiguration];
507 AVCaptureDeviceInput *oldInput = _backCameraInput;
508 AVCaptureDeviceInput *newInput = _frontCameraInput;
509 if (useBackCamera) {
510 oldInput = _frontCameraInput;
511 newInput = _backCameraInput;
512 }
513 if (oldInput) {
514 // Ok to remove this even if it's not attached. Will be no-op.
515 [_captureSession removeInput:oldInput];
516 }
517 if (newInput) {
518 [_captureSession addInput:newInput];
519 }
520 [self updateOrientation];
521 [_captureSession commitConfiguration];
522 }];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800523}
524
525@end
526
527namespace webrtc {
528
tkchin89717aa2016-03-31 17:14:04 -0700529enum AVFoundationVideoCapturerMessageType : uint32_t {
530 kMessageTypeFrame,
531};
532
533struct AVFoundationFrame {
534 AVFoundationFrame(CVImageBufferRef buffer, int64_t time)
535 : image_buffer(buffer), capture_time(time) {}
536 CVImageBufferRef image_buffer;
537 int64_t capture_time;
538};
539
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800540AVFoundationVideoCapturer::AVFoundationVideoCapturer()
541 : _capturer(nil), _startThread(nullptr) {
542 // Set our supported formats. This matches kDefaultPreset.
543 std::vector<cricket::VideoFormat> supportedFormats;
544 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
545 SetSupportedFormats(supportedFormats);
546 _capturer =
547 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
548}
549
550AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
551 _capturer = nil;
552}
553
554cricket::CaptureState AVFoundationVideoCapturer::Start(
555 const cricket::VideoFormat& format) {
556 if (!_capturer) {
557 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
558 return cricket::CaptureState::CS_FAILED;
559 }
560 if (_capturer.isRunning) {
561 LOG(LS_ERROR) << "The capturer is already running.";
562 return cricket::CaptureState::CS_FAILED;
563 }
564 if (format != kDefaultFormat) {
565 LOG(LS_ERROR) << "Unsupported format provided.";
566 return cricket::CaptureState::CS_FAILED;
567 }
568
569 // Keep track of which thread capture started on. This is the thread that
570 // frames need to be sent to.
571 RTC_DCHECK(!_startThread);
572 _startThread = rtc::Thread::Current();
573
574 SetCaptureFormat(&format);
575 // This isn't super accurate because it takes a while for the AVCaptureSession
576 // to spin up, and this call returns async.
577 // TODO(tkchin): make this better.
tkchin89717aa2016-03-31 17:14:04 -0700578 [_capturer start];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800579 SetCaptureState(cricket::CaptureState::CS_RUNNING);
580
581 return cricket::CaptureState::CS_STARTING;
582}
583
584void AVFoundationVideoCapturer::Stop() {
tkchin89717aa2016-03-31 17:14:04 -0700585 [_capturer stop];
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800586 SetCaptureFormat(NULL);
587 _startThread = nullptr;
588}
589
590bool AVFoundationVideoCapturer::IsRunning() {
591 return _capturer.isRunning;
592}
593
594AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
595 return _capturer.captureSession;
596}
597
hjona1cf3662016-03-14 20:55:22 -0700598bool AVFoundationVideoCapturer::CanUseBackCamera() const {
599 return _capturer.canUseBackCamera;
600}
601
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800602void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
603 _capturer.useBackCamera = useBackCamera;
604}
605
606bool AVFoundationVideoCapturer::GetUseBackCamera() const {
607 return _capturer.useBackCamera;
608}
609
610void AVFoundationVideoCapturer::CaptureSampleBuffer(
611 CMSampleBufferRef sampleBuffer) {
612 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
613 !CMSampleBufferIsValid(sampleBuffer) ||
614 !CMSampleBufferDataIsReady(sampleBuffer)) {
615 return;
616 }
617
tkchin89717aa2016-03-31 17:14:04 -0700618 CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
619 if (image_buffer == NULL) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800620 return;
621 }
622
tkchin89717aa2016-03-31 17:14:04 -0700623 // Retain the buffer and post it to the webrtc thread. It will be released
624 // after it has successfully been signaled.
625 CVBufferRetain(image_buffer);
626 AVFoundationFrame frame(image_buffer, rtc::TimeNanos());
Taylor Brandstetter5d97a9a2016-06-10 14:17:27 -0700627 _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
tkchin89717aa2016-03-31 17:14:04 -0700628 new rtc::TypedMessageData<AVFoundationFrame>(frame));
629}
630
631void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
632 switch (msg->message_id) {
633 case kMessageTypeFrame: {
634 rtc::TypedMessageData<AVFoundationFrame>* data =
635 static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
636 const AVFoundationFrame& frame = data->data();
637 OnFrameMessage(frame.image_buffer, frame.capture_time);
638 delete data;
639 break;
640 }
641 }
642}
643
644void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
645 int64_t capture_time) {
646 RTC_DCHECK(_startThread->IsCurrent());
647
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800648 // Base address must be unlocked to access frame data.
tkchin89717aa2016-03-31 17:14:04 -0700649 CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly;
650 CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800651 if (ret != kCVReturnSuccess) {
652 return;
653 }
654
655 static size_t const kYPlaneIndex = 0;
656 static size_t const kUVPlaneIndex = 1;
tkchin89717aa2016-03-31 17:14:04 -0700657 uint8_t* y_plane_address =
658 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
659 kYPlaneIndex));
660 size_t y_plane_height =
661 CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex);
662 size_t y_plane_width =
663 CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex);
664 size_t y_plane_bytes_per_row =
665 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex);
666 size_t uv_plane_height =
667 CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex);
668 size_t uv_plane_bytes_per_row =
669 CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex);
670 size_t frame_size = y_plane_bytes_per_row * y_plane_height +
671 uv_plane_bytes_per_row * uv_plane_height;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800672
673 // Sanity check assumption that planar bytes are contiguous.
tkchin89717aa2016-03-31 17:14:04 -0700674 uint8_t* uv_plane_address =
675 static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer,
676 kUVPlaneIndex));
677 RTC_DCHECK(uv_plane_address ==
678 y_plane_address + y_plane_height * y_plane_bytes_per_row);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800679
680 // Stuff data into a cricket::CapturedFrame.
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800681 cricket::CapturedFrame frame;
tkchin89717aa2016-03-31 17:14:04 -0700682 frame.width = y_plane_width;
683 frame.height = y_plane_height;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800684 frame.pixel_width = 1;
685 frame.pixel_height = 1;
686 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
tkchin89717aa2016-03-31 17:14:04 -0700687 frame.time_stamp = capture_time;
688 frame.data = y_plane_address;
689 frame.data_size = frame_size;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800690
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800691 // This will call a superclass method that will perform the frame conversion
692 // to I420.
tkchin89717aa2016-03-31 17:14:04 -0700693 SignalFrameCaptured(this, &frame);
694
695 CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags);
696 CVBufferRelease(image_buffer);
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800697}
698
699} // namespace webrtc