blob: 54b06c0264c5460835844cc63e202f00bc1d5d48 [file] [log] [blame]
Jon Hjelle7ac8bab2016-01-21 11:44:55 -08001/*
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/api/objc/avfoundationvideocapturer.h"
12
13#include "webrtc/base/bind.h"
Niels Möller505945a2016-03-17 12:20:41 +010014#include "webrtc/base/thread.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080015
16#import <AVFoundation/AVFoundation.h>
17#import <Foundation/Foundation.h>
18#import <UIKit/UIKit.h>
19
20#import "webrtc/base/objc/RTCDispatcher.h"
hjona1cf3662016-03-14 20:55:22 -070021#import "webrtc/base/objc/RTCLogging.h"
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080022
23// TODO(tkchin): support other formats.
24static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
25static cricket::VideoFormat const kDefaultFormat =
26 cricket::VideoFormat(640,
27 480,
28 cricket::VideoFormat::FpsToInterval(30),
29 cricket::FOURCC_NV12);
30
31// This class used to capture frames using AVFoundation APIs on iOS. It is meant
32// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
33// because other webrtc objects own cricket::VideoCapturer, which is not
34// ref counted. To prevent bad behavior we do not expose this class directly.
35@interface RTCAVFoundationVideoCapturerInternal : NSObject
36 <AVCaptureVideoDataOutputSampleBufferDelegate>
37
38@property(nonatomic, readonly) AVCaptureSession *captureSession;
39@property(nonatomic, readonly) BOOL isRunning;
hjona1cf3662016-03-14 20:55:22 -070040@property(nonatomic, readonly) BOOL canUseBackCamera;
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080041@property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO.
42
43// We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it
44// when we receive frames. This is safe because this object should be owned by
45// it.
46- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer;
47- (void)startCaptureAsync;
48- (void)stopCaptureAsync;
49
50@end
51
52@implementation RTCAVFoundationVideoCapturerInternal {
53 // Keep pointers to inputs for convenience.
54 AVCaptureDeviceInput *_frontDeviceInput;
55 AVCaptureDeviceInput *_backDeviceInput;
56 AVCaptureVideoDataOutput *_videoOutput;
57 // The cricket::VideoCapturer that owns this class. Should never be NULL.
58 webrtc::AVFoundationVideoCapturer *_capturer;
59 BOOL _orientationHasChanged;
60}
61
62@synthesize captureSession = _captureSession;
63@synthesize useBackCamera = _useBackCamera;
64@synthesize isRunning = _isRunning;
65
66- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer {
67 NSParameterAssert(capturer);
68 if (self = [super init]) {
69 _capturer = capturer;
70 if (![self setupCaptureSession]) {
71 return nil;
72 }
73 NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
74 [center addObserver:self
75 selector:@selector(deviceOrientationDidChange:)
76 name:UIDeviceOrientationDidChangeNotification
77 object:nil];
78 [center addObserverForName:AVCaptureSessionRuntimeErrorNotification
79 object:nil
80 queue:nil
81 usingBlock:^(NSNotification *notification) {
82 NSLog(@"Capture session error: %@", notification.userInfo);
83 }];
84 }
85 return self;
86}
87
88- (void)dealloc {
89 [self stopCaptureAsync];
90 [[NSNotificationCenter defaultCenter] removeObserver:self];
91 _capturer = nullptr;
92}
93
hjona1cf3662016-03-14 20:55:22 -070094- (BOOL)canUseBackCamera {
95 return _backDeviceInput != nil;
96}
97
Jon Hjelle7ac8bab2016-01-21 11:44:55 -080098- (void)setUseBackCamera:(BOOL)useBackCamera {
99 if (_useBackCamera == useBackCamera) {
100 return;
101 }
hjona1cf3662016-03-14 20:55:22 -0700102 if (!self.canUseBackCamera) {
103 RTCLog(@"No rear-facing camera exists or it cannot be used;"
104 "not switching.");
105 return;
106 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800107 _useBackCamera = useBackCamera;
108 [self updateSessionInput];
109}
110
111- (void)startCaptureAsync {
112 if (_isRunning) {
113 return;
114 }
115 _orientationHasChanged = NO;
116 [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
117 AVCaptureSession* session = _captureSession;
118 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
119 block:^{
120 [session startRunning];
121 }];
122 _isRunning = YES;
123}
124
125- (void)stopCaptureAsync {
126 if (!_isRunning) {
127 return;
128 }
129 [_videoOutput setSampleBufferDelegate:nil queue:nullptr];
130 AVCaptureSession* session = _captureSession;
131 [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
132 block:^{
133 [session stopRunning];
134 }];
135 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
136 _isRunning = NO;
137}
138
139#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
140
141- (void)captureOutput:(AVCaptureOutput *)captureOutput
142 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
143 fromConnection:(AVCaptureConnection *)connection {
144 NSParameterAssert(captureOutput == _videoOutput);
145 if (!_isRunning) {
146 return;
147 }
148 _capturer->CaptureSampleBuffer(sampleBuffer);
149}
150
151- (void)captureOutput:(AVCaptureOutput *)captureOutput
152 didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
153 fromConnection:(AVCaptureConnection *)connection {
154 NSLog(@"Dropped sample buffer.");
155}
156
157#pragma mark - Private
158
159- (BOOL)setupCaptureSession {
160 _captureSession = [[AVCaptureSession alloc] init];
161#if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0
162 NSString *version = [[UIDevice currentDevice] systemVersion];
163 if ([version integerValue] >= 7) {
164 _captureSession.usesApplicationAudioSession = NO;
165 }
166#endif
167 if (![_captureSession canSetSessionPreset:kDefaultPreset]) {
168 NSLog(@"Default video capture preset unsupported.");
169 return NO;
170 }
171 _captureSession.sessionPreset = kDefaultPreset;
172
173 // Make the capturer output NV12. Ideally we want I420 but that's not
174 // currently supported on iPhone / iPad.
175 _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
176 _videoOutput.videoSettings = @{
177 (NSString *)kCVPixelBufferPixelFormatTypeKey :
178 @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
179 };
180 _videoOutput.alwaysDiscardsLateVideoFrames = NO;
181 [_videoOutput setSampleBufferDelegate:self
182 queue:dispatch_get_main_queue()];
183 if (![_captureSession canAddOutput:_videoOutput]) {
184 NSLog(@"Default video capture output unsupported.");
185 return NO;
186 }
187 [_captureSession addOutput:_videoOutput];
188
189 // Find the capture devices.
190 AVCaptureDevice *frontCaptureDevice = nil;
191 AVCaptureDevice *backCaptureDevice = nil;
192 for (AVCaptureDevice *captureDevice in
193 [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
194 if (captureDevice.position == AVCaptureDevicePositionBack) {
195 backCaptureDevice = captureDevice;
196 }
197 if (captureDevice.position == AVCaptureDevicePositionFront) {
198 frontCaptureDevice = captureDevice;
199 }
200 }
hjona1cf3662016-03-14 20:55:22 -0700201 if (!frontCaptureDevice) {
202 RTCLog(@"Failed to get front capture device.");
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800203 return NO;
204 }
hjona1cf3662016-03-14 20:55:22 -0700205 if (!backCaptureDevice) {
206 RTCLog(@"Failed to get back capture device");
207 // Don't return NO here because devices exist (16GB 5th generation iPod
208 // Touch) that don't have a rear-facing camera.
209 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800210
211 // Set up the session inputs.
212 NSError *error = nil;
213 _frontDeviceInput =
214 [AVCaptureDeviceInput deviceInputWithDevice:frontCaptureDevice
215 error:&error];
216 if (!_frontDeviceInput) {
217 NSLog(@"Failed to get capture device input: %@",
218 error.localizedDescription);
219 return NO;
220 }
hjona1cf3662016-03-14 20:55:22 -0700221 if (backCaptureDevice) {
222 error = nil;
223 _backDeviceInput =
224 [AVCaptureDeviceInput deviceInputWithDevice:backCaptureDevice
225 error:&error];
226 if (error) {
227 RTCLog(@"Failed to get capture device input: %@",
228 error.localizedDescription);
229 _backDeviceInput = nil;
230 }
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800231 }
232
233 // Add the inputs.
234 if (![_captureSession canAddInput:_frontDeviceInput] ||
hjona1cf3662016-03-14 20:55:22 -0700235 (_backDeviceInput && ![_captureSession canAddInput:_backDeviceInput])) {
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800236 NSLog(@"Session does not support capture inputs.");
237 return NO;
238 }
239 [self updateSessionInput];
240
241 return YES;
242}
243
244- (void)deviceOrientationDidChange:(NSNotification *)notification {
245 _orientationHasChanged = YES;
246 [self updateOrientation];
247}
248
249- (void)updateOrientation {
250 AVCaptureConnection *connection =
251 [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
252 if (!connection.supportsVideoOrientation) {
253 // TODO(tkchin): set rotation bit on frames.
254 return;
255 }
256 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
257 switch ([UIDevice currentDevice].orientation) {
258 case UIDeviceOrientationPortrait:
259 orientation = AVCaptureVideoOrientationPortrait;
260 break;
261 case UIDeviceOrientationPortraitUpsideDown:
262 orientation = AVCaptureVideoOrientationPortraitUpsideDown;
263 break;
264 case UIDeviceOrientationLandscapeLeft:
265 orientation = AVCaptureVideoOrientationLandscapeRight;
266 break;
267 case UIDeviceOrientationLandscapeRight:
268 orientation = AVCaptureVideoOrientationLandscapeLeft;
269 break;
270 case UIDeviceOrientationFaceUp:
271 case UIDeviceOrientationFaceDown:
272 case UIDeviceOrientationUnknown:
273 if (!_orientationHasChanged) {
274 connection.videoOrientation = orientation;
275 }
276 return;
277 }
278 connection.videoOrientation = orientation;
279}
280
281- (void)updateSessionInput {
282 // Update the current session input to match what's stored in _useBackCamera.
283 [_captureSession beginConfiguration];
284 AVCaptureDeviceInput *oldInput = _backDeviceInput;
285 AVCaptureDeviceInput *newInput = _frontDeviceInput;
286 if (_useBackCamera) {
287 oldInput = _frontDeviceInput;
288 newInput = _backDeviceInput;
289 }
290 // Ok to remove this even if it's not attached. Will be no-op.
291 [_captureSession removeInput:oldInput];
292 [_captureSession addInput:newInput];
293 [self updateOrientation];
294 [_captureSession commitConfiguration];
295}
296
297@end
298
299namespace webrtc {
300
301AVFoundationVideoCapturer::AVFoundationVideoCapturer()
302 : _capturer(nil), _startThread(nullptr) {
303 // Set our supported formats. This matches kDefaultPreset.
304 std::vector<cricket::VideoFormat> supportedFormats;
305 supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
306 SetSupportedFormats(supportedFormats);
307 _capturer =
308 [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
309}
310
311AVFoundationVideoCapturer::~AVFoundationVideoCapturer() {
312 _capturer = nil;
313}
314
315cricket::CaptureState AVFoundationVideoCapturer::Start(
316 const cricket::VideoFormat& format) {
317 if (!_capturer) {
318 LOG(LS_ERROR) << "Failed to create AVFoundation capturer.";
319 return cricket::CaptureState::CS_FAILED;
320 }
321 if (_capturer.isRunning) {
322 LOG(LS_ERROR) << "The capturer is already running.";
323 return cricket::CaptureState::CS_FAILED;
324 }
325 if (format != kDefaultFormat) {
326 LOG(LS_ERROR) << "Unsupported format provided.";
327 return cricket::CaptureState::CS_FAILED;
328 }
329
330 // Keep track of which thread capture started on. This is the thread that
331 // frames need to be sent to.
332 RTC_DCHECK(!_startThread);
333 _startThread = rtc::Thread::Current();
334
335 SetCaptureFormat(&format);
336 // This isn't super accurate because it takes a while for the AVCaptureSession
337 // to spin up, and this call returns async.
338 // TODO(tkchin): make this better.
339 [_capturer startCaptureAsync];
340 SetCaptureState(cricket::CaptureState::CS_RUNNING);
341
342 return cricket::CaptureState::CS_STARTING;
343}
344
345void AVFoundationVideoCapturer::Stop() {
346 [_capturer stopCaptureAsync];
347 SetCaptureFormat(NULL);
348 _startThread = nullptr;
349}
350
351bool AVFoundationVideoCapturer::IsRunning() {
352 return _capturer.isRunning;
353}
354
355AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() {
356 return _capturer.captureSession;
357}
358
hjona1cf3662016-03-14 20:55:22 -0700359bool AVFoundationVideoCapturer::CanUseBackCamera() const {
360 return _capturer.canUseBackCamera;
361}
362
Jon Hjelle7ac8bab2016-01-21 11:44:55 -0800363void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) {
364 _capturer.useBackCamera = useBackCamera;
365}
366
367bool AVFoundationVideoCapturer::GetUseBackCamera() const {
368 return _capturer.useBackCamera;
369}
370
371void AVFoundationVideoCapturer::CaptureSampleBuffer(
372 CMSampleBufferRef sampleBuffer) {
373 if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
374 !CMSampleBufferIsValid(sampleBuffer) ||
375 !CMSampleBufferDataIsReady(sampleBuffer)) {
376 return;
377 }
378
379 CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
380 if (imageBuffer == NULL) {
381 return;
382 }
383
384 // Base address must be unlocked to access frame data.
385 CVOptionFlags lockFlags = kCVPixelBufferLock_ReadOnly;
386 CVReturn ret = CVPixelBufferLockBaseAddress(imageBuffer, lockFlags);
387 if (ret != kCVReturnSuccess) {
388 return;
389 }
390
391 static size_t const kYPlaneIndex = 0;
392 static size_t const kUVPlaneIndex = 1;
393 uint8_t *yPlaneAddress =
394 (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kYPlaneIndex);
395 size_t yPlaneHeight =
396 CVPixelBufferGetHeightOfPlane(imageBuffer, kYPlaneIndex);
397 size_t yPlaneWidth =
398 CVPixelBufferGetWidthOfPlane(imageBuffer, kYPlaneIndex);
399 size_t yPlaneBytesPerRow =
400 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kYPlaneIndex);
401 size_t uvPlaneHeight =
402 CVPixelBufferGetHeightOfPlane(imageBuffer, kUVPlaneIndex);
403 size_t uvPlaneBytesPerRow =
404 CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, kUVPlaneIndex);
405 size_t frameSize =
406 yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
407
408 // Sanity check assumption that planar bytes are contiguous.
409 uint8_t *uvPlaneAddress =
410 (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, kUVPlaneIndex);
411 RTC_DCHECK(
412 uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow);
413
414 // Stuff data into a cricket::CapturedFrame.
415 int64_t currentTime = rtc::TimeNanos();
416 cricket::CapturedFrame frame;
417 frame.width = yPlaneWidth;
418 frame.height = yPlaneHeight;
419 frame.pixel_width = 1;
420 frame.pixel_height = 1;
421 frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
422 frame.time_stamp = currentTime;
423 frame.data = yPlaneAddress;
424 frame.data_size = frameSize;
425
426 if (_startThread->IsCurrent()) {
427 SignalFrameCaptured(this, &frame);
428 } else {
429 _startThread->Invoke<void>(
430 rtc::Bind(&AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread,
431 this, &frame));
432 }
433 CVPixelBufferUnlockBaseAddress(imageBuffer, lockFlags);
434}
435
436void AVFoundationVideoCapturer::SignalFrameCapturedOnStartThread(
437 const cricket::CapturedFrame *frame) {
438 RTC_DCHECK(_startThread->IsCurrent());
439 // This will call a superclass method that will perform the frame conversion
440 // to I420.
441 SignalFrameCaptured(this, frame);
442}
443
444} // namespace webrtc