blob: 5c276fc254aa73f79f618d95899903ddd588ebbe [file] [log] [blame]
denicija71caaca2016-11-23 00:41:57 -08001/*
2 * Copyright 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#import "RTCAVFoundationVideoCapturerInternal.h"
12
13#import <Foundation/Foundation.h>
14#if TARGET_OS_IPHONE
15#import <UIKit/UIKit.h>
16#endif
17
18#import "RTCDispatcher+Private.h"
19#import "WebRTC/RTCLogging.h"
20
21#include "avfoundationformatmapper.h"
22
23@implementation RTCAVFoundationVideoCapturerInternal {
24 // Keep pointers to inputs for convenience.
25 AVCaptureDeviceInput *_frontCameraInput;
26 AVCaptureDeviceInput *_backCameraInput;
27 AVCaptureVideoDataOutput *_videoDataOutput;
28 // The cricket::VideoCapturer that owns this class. Should never be NULL.
29 webrtc::AVFoundationVideoCapturer *_capturer;
30 webrtc::VideoRotation _rotation;
31 BOOL _hasRetriedOnFatalError;
32 BOOL _isRunning;
33 BOOL _hasStarted;
34 rtc::CriticalSection _crit;
35}
36
37@synthesize captureSession = _captureSession;
38@synthesize frameQueue = _frameQueue;
39@synthesize useBackCamera = _useBackCamera;
40
41@synthesize isRunning = _isRunning;
42@synthesize hasStarted = _hasStarted;
43
44// This is called from the thread that creates the video source, which is likely
45// the main thread.
46- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
47 RTC_DCHECK(capturer);
48 if (self = [super init]) {
49 _capturer = capturer;
50 // Create the capture session and all relevant inputs and outputs. We need
51 // to do this in init because the application may want the capture session
52 // before we start the capturer for e.g. AVCapturePreviewLayer. All objects
53 // created here are retained until dealloc and never recreated.
54 if (![self setupCaptureSession]) {
55 return nil;
56 }
57 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
58#if TARGET_OS_IPHONE
59 [center addObserver:self
60 selector:@selector(deviceOrientationDidChange:)
61 name:UIDeviceOrientationDidChangeNotification
62 object:nil];
63 [center addObserver:self
64 selector:@selector(handleCaptureSessionInterruption:)
65 name:AVCaptureSessionWasInterruptedNotification
66 object:_captureSession];
67 [center addObserver:self
68 selector:@selector(handleCaptureSessionInterruptionEnded:)
69 name:AVCaptureSessionInterruptionEndedNotification
70 object:_captureSession];
71 [center addObserver:self
72 selector:@selector(handleApplicationDidBecomeActive:)
73 name:UIApplicationDidBecomeActiveNotification
74 object:[UIApplication sharedApplication]];
75#endif
76 [center addObserver:self
77 selector:@selector(handleCaptureSessionRuntimeError:)
78 name:AVCaptureSessionRuntimeErrorNotification
79 object:_captureSession];
80 [center addObserver:self
81 selector:@selector(handleCaptureSessionDidStartRunning:)
82 name:AVCaptureSessionDidStartRunningNotification
83 object:_captureSession];
84 [center addObserver:self
85 selector:@selector(handleCaptureSessionDidStopRunning:)
86 name:AVCaptureSessionDidStopRunningNotification
87 object:_captureSession];
88 }
89 return self;
90}
91
92- (void)dealloc {
93 RTC_DCHECK(!self.hasStarted);
94 [[NSNotificationCenter defaultCenter] removeObserver:self];
95 _capturer = nullptr;
96}
97
98- (AVCaptureSession *)captureSession {
99 return _captureSession;
100}
101
102- (AVCaptureDevice *)getActiveCaptureDevice {
103 return self.useBackCamera ? _backCameraInput.device : _frontCameraInput.device;
104}
105
106- (nullable AVCaptureDevice *)frontCaptureDevice {
107 return _frontCameraInput.device;
108}
109
110- (nullable AVCaptureDevice *)backCaptureDevice {
111 return _backCameraInput.device;
112}
113
114- (dispatch_queue_t)frameQueue {
115 if (!_frameQueue) {
116 _frameQueue =
117 dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL);
118 dispatch_set_target_queue(_frameQueue,
119 dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
120 }
121 return _frameQueue;
122}
123
124// Called from any thread (likely main thread).
125- (BOOL)canUseBackCamera {
126 return _backCameraInput != nil;
127}
128
129// Called from any thread (likely main thread).
130- (BOOL)useBackCamera {
131 @synchronized(self) {
132 return _useBackCamera;
133 }
134}
135
136// Called from any thread (likely main thread).
137- (void)setUseBackCamera:(BOOL)useBackCamera {
138 if (!self.canUseBackCamera) {
139 if (useBackCamera) {
140 RTCLogWarning(@"No rear-facing camera exists or it cannot be used;"
141 "not switching.");
142 }
143 return;
144 }
145 @synchronized(self) {
146 if (_useBackCamera == useBackCamera) {
147 return;
148 }
149 _useBackCamera = useBackCamera;
150 [self updateSessionInputForUseBackCamera:useBackCamera];
151 }
152}
153
154// Called from WebRTC thread.
155- (void)start {
156 if (self.hasStarted) {
157 return;
158 }
159 self.hasStarted = YES;
160 [RTCDispatcher
161 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
162 block:^{
163#if TARGET_OS_IPHONE
164 // Default to portrait orientation on iPhone. This will be reset in
165 // updateOrientation unless orientation is unknown/faceup/facedown.
166 _rotation = webrtc::kVideoRotation_90;
167#else
168 // No rotation on Mac.
169 _rotation = webrtc::kVideoRotation_0;
170#endif
171 [self updateOrientation];
172#if TARGET_OS_IPHONE
173 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
174#endif
175 AVCaptureSession *captureSession = self.captureSession;
176 [captureSession startRunning];
177 }];
178}
179
180// Called from same thread as start.
181- (void)stop {
182 if (!self.hasStarted) {
183 return;
184 }
185 self.hasStarted = NO;
186 // Due to this async block, it's possible that the ObjC object outlives the
187 // C++ one. In order to not invoke functions on the C++ object, we set
188 // hasStarted immediately instead of dispatching it async.
189 [RTCDispatcher
190 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
191 block:^{
192 [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
193 [_captureSession stopRunning];
194#if TARGET_OS_IPHONE
195 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
196#endif
197 }];
198}
199
200#pragma mark iOS notifications
201
202#if TARGET_OS_IPHONE
203- (void)deviceOrientationDidChange:(NSNotification *)notification {
204 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
205 block:^{
206 [self updateOrientation];
207 }];
208}
209#endif
210
211#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
212
213- (void)captureOutput:(AVCaptureOutput *)captureOutput
214 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
215 fromConnection:(AVCaptureConnection *)connection {
216 NSParameterAssert(captureOutput == _videoDataOutput);
217 if (!self.hasStarted) {
218 return;
219 }
220 _capturer->CaptureSampleBuffer(sampleBuffer, _rotation);
221}
222
223- (void)captureOutput:(AVCaptureOutput *)captureOutput
224 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
225 fromConnection:(AVCaptureConnection *)connection {
226 RTCLogError(@"Dropped sample buffer.");
227}
228
229#pragma mark - AVCaptureSession notifications
230
231- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
232 NSString *reasonString = nil;
233#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
234 __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
235 NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
236 if (reason) {
237 switch (reason.intValue) {
238 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
239 reasonString = @"VideoDeviceNotAvailableInBackground";
240 break;
241 case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
242 reasonString = @"AudioDeviceInUseByAnotherClient";
243 break;
244 case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
245 reasonString = @"VideoDeviceInUseByAnotherClient";
246 break;
247 case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
248 reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
249 break;
250 }
251 }
252#endif
253 RTCLog(@"Capture session interrupted: %@", reasonString);
254 // TODO(tkchin): Handle this case.
255}
256
257- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
258 RTCLog(@"Capture session interruption ended.");
259 // TODO(tkchin): Handle this case.
260}
261
262- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
263 NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
264 RTCLogError(@"Capture session runtime error: %@", error);
265
266 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
267 block:^{
268#if TARGET_OS_IPHONE
269 if (error.code == AVErrorMediaServicesWereReset) {
270 [self handleNonFatalError];
271 } else {
272 [self handleFatalError];
273 }
274#else
275 [self handleFatalError];
276#endif
277 }];
278}
279
280- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
281 RTCLog(@"Capture session started.");
282
283 self.isRunning = YES;
284 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
285 block:^{
286 // If we successfully restarted after an unknown error,
287 // allow future retries on fatal errors.
288 _hasRetriedOnFatalError = NO;
289 }];
290}
291
292- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
293 RTCLog(@"Capture session stopped.");
294 self.isRunning = NO;
295}
296
297- (void)handleFatalError {
298 [RTCDispatcher
299 dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
300 block:^{
301 if (!_hasRetriedOnFatalError) {
302 RTCLogWarning(@"Attempting to recover from fatal capture error.");
303 [self handleNonFatalError];
304 _hasRetriedOnFatalError = YES;
305 } else {
306 RTCLogError(@"Previous fatal error recovery failed.");
307 }
308 }];
309}
310
311- (void)handleNonFatalError {
312 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
313 block:^{
314 if (self.hasStarted) {
315 RTCLog(@"Restarting capture session after error.");
316 [self.captureSession startRunning];
317 }
318 }];
319}
320
321#if TARGET_OS_IPHONE
322
323#pragma mark - UIApplication notifications
324
325- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
326 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
327 block:^{
328 if (self.hasStarted && !self.captureSession.isRunning) {
329 RTCLog(@"Restarting capture session on active.");
330 [self.captureSession startRunning];
331 }
332 }];
333}
334
335#endif // TARGET_OS_IPHONE
336
337#pragma mark - Private
338
339- (BOOL)setupCaptureSession {
340 AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
341#if defined(WEBRTC_IOS)
342 captureSession.usesApplicationAudioSession = NO;
343#endif
344 // Add the output.
345 AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
346 if (![captureSession canAddOutput:videoDataOutput]) {
347 RTCLogError(@"Video data output unsupported.");
348 return NO;
349 }
350 [captureSession addOutput:videoDataOutput];
351
352 // Get the front and back cameras. If there isn't a front camera
353 // give up.
354 AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput];
355 AVCaptureDeviceInput *backCameraInput = [self backCameraInput];
356 if (!frontCameraInput) {
357 RTCLogError(@"No front camera for capture session.");
358 return NO;
359 }
360
361 // Add the inputs.
362 if (![captureSession canAddInput:frontCameraInput] ||
363 (backCameraInput && ![captureSession canAddInput:backCameraInput])) {
364 RTCLogError(@"Session does not support capture inputs.");
365 return NO;
366 }
367 AVCaptureDeviceInput *input = self.useBackCamera ? backCameraInput : frontCameraInput;
368 [captureSession addInput:input];
369
370 _captureSession = captureSession;
371 return YES;
372}
373
374- (AVCaptureVideoDataOutput *)videoDataOutput {
375 if (!_videoDataOutput) {
376 // Make the capturer output NV12. Ideally we want I420 but that's not
377 // currently supported on iPhone / iPad.
378 AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
379 videoDataOutput.videoSettings = @{
380 (NSString *)
381 // TODO(denicija): Remove this color conversion and use the original capture format directly.
382 kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
383 };
384 videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
385 [videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
386 _videoDataOutput = videoDataOutput;
387 }
388 return _videoDataOutput;
389}
390
391- (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)position {
392 for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
393 if (captureDevice.position == position) {
394 return captureDevice;
395 }
396 }
397 return nil;
398}
399
400- (AVCaptureDeviceInput *)frontCameraInput {
401 if (!_frontCameraInput) {
402#if TARGET_OS_IPHONE
403 AVCaptureDevice *frontCameraDevice =
404 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
405#else
406 AVCaptureDevice *frontCameraDevice =
407 [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
408#endif
409 if (!frontCameraDevice) {
410 RTCLogWarning(@"Failed to find front capture device.");
411 return nil;
412 }
413 NSError *error = nil;
414 AVCaptureDeviceInput *frontCameraInput =
415 [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice error:&error];
416 if (!frontCameraInput) {
417 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
418 return nil;
419 }
420 _frontCameraInput = frontCameraInput;
421 }
422 return _frontCameraInput;
423}
424
425- (AVCaptureDeviceInput *)backCameraInput {
426 if (!_backCameraInput) {
427 AVCaptureDevice *backCameraDevice =
428 [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack];
429 if (!backCameraDevice) {
430 RTCLogWarning(@"Failed to find front capture device.");
431 return nil;
432 }
433 NSError *error = nil;
434 AVCaptureDeviceInput *backCameraInput =
435 [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice error:&error];
436 if (!backCameraInput) {
437 RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
438 return nil;
439 }
440 _backCameraInput = backCameraInput;
441 }
442 return _backCameraInput;
443}
444
445// Called from capture session queue.
446- (void)updateOrientation {
447#if TARGET_OS_IPHONE
448 switch ([UIDevice currentDevice].orientation) {
449 case UIDeviceOrientationPortrait:
450 _rotation = webrtc::kVideoRotation_90;
451 break;
452 case UIDeviceOrientationPortraitUpsideDown:
453 _rotation = webrtc::kVideoRotation_270;
454 break;
455 case UIDeviceOrientationLandscapeLeft:
456 _rotation =
457 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_0 : webrtc::kVideoRotation_180;
458 break;
459 case UIDeviceOrientationLandscapeRight:
460 _rotation =
461 _capturer->GetUseBackCamera() ? webrtc::kVideoRotation_180 : webrtc::kVideoRotation_0;
462 break;
463 case UIDeviceOrientationFaceUp:
464 case UIDeviceOrientationFaceDown:
465 case UIDeviceOrientationUnknown:
466 // Ignore.
467 break;
468 }
469#endif
470}
471
472// Update the current session input to match what's stored in _useBackCamera.
473- (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera {
474 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
475 block:^{
476 [_captureSession beginConfiguration];
477 AVCaptureDeviceInput *oldInput = _backCameraInput;
478 AVCaptureDeviceInput *newInput = _frontCameraInput;
479 if (useBackCamera) {
480 oldInput = _frontCameraInput;
481 newInput = _backCameraInput;
482 }
483 if (oldInput) {
484 // Ok to remove this even if it's not attached. Will be no-op.
485 [_captureSession removeInput:oldInput];
486 }
487 if (newInput) {
488 [_captureSession addInput:newInput];
489 }
490 [self updateOrientation];
491 AVCaptureDevice *newDevice = newInput.device;
492 const cricket::VideoFormat *format =
493 _capturer->GetCaptureFormat();
494 webrtc::SetFormatForCaptureDevice(
495 newDevice, _captureSession, *format);
496 [_captureSession commitConfiguration];
497 }];
498}
499
500@end