Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
tkchin | 9eeb624 | 2016-04-27 01:54:20 -0700 | [diff] [blame] | 11 | #include "avfoundationvideocapturer.h" |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 12 | |
| 13 | #import <AVFoundation/AVFoundation.h> |
| 14 | #import <Foundation/Foundation.h> |
| 15 | #import <UIKit/UIKit.h> |
| 16 | |
tkchin | 9eeb624 | 2016-04-27 01:54:20 -0700 | [diff] [blame] | 17 | #import "RTCDispatcher+Private.h" |
| 18 | #import "WebRTC/RTCLogging.h" |
| 19 | |
| 20 | #include "webrtc/base/bind.h" |
| 21 | #include "webrtc/base/checks.h" |
| 22 | #include "webrtc/base/thread.h" |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 23 | |
| 24 | // TODO(tkchin): support other formats. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 25 | static NSString *const kDefaultPreset = AVCaptureSessionPreset640x480; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 26 | static cricket::VideoFormat const kDefaultFormat = |
| 27 | cricket::VideoFormat(640, |
| 28 | 480, |
| 29 | cricket::VideoFormat::FpsToInterval(30), |
| 30 | cricket::FOURCC_NV12); |
| 31 | |
| 32 | // This class used to capture frames using AVFoundation APIs on iOS. It is meant |
| 33 | // to be owned by an instance of AVFoundationVideoCapturer. The reason for this |
| 34 | // because other webrtc objects own cricket::VideoCapturer, which is not |
| 35 | // ref counted. To prevent bad behavior we do not expose this class directly. |
| 36 | @interface RTCAVFoundationVideoCapturerInternal : NSObject |
| 37 | <AVCaptureVideoDataOutputSampleBufferDelegate> |
| 38 | |
| 39 | @property(nonatomic, readonly) AVCaptureSession *captureSession; |
| 40 | @property(nonatomic, readonly) BOOL isRunning; |
hjon | a1cf366 | 2016-03-14 20:55:22 -0700 | [diff] [blame] | 41 | @property(nonatomic, readonly) BOOL canUseBackCamera; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 42 | @property(nonatomic, assign) BOOL useBackCamera; // Defaults to NO. |
| 43 | |
| 44 | // We keep a pointer back to AVFoundationVideoCapturer to make callbacks on it |
| 45 | // when we receive frames. This is safe because this object should be owned by |
| 46 | // it. |
| 47 | - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer; |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 48 | |
| 49 | // Starts and stops the capture session asynchronously. We cannot do this |
| 50 | // synchronously without blocking a WebRTC thread. |
| 51 | - (void)start; |
| 52 | - (void)stop; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 53 | |
| 54 | @end |
| 55 | |
| 56 | @implementation RTCAVFoundationVideoCapturerInternal { |
| 57 | // Keep pointers to inputs for convenience. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 58 | AVCaptureDeviceInput *_frontCameraInput; |
| 59 | AVCaptureDeviceInput *_backCameraInput; |
| 60 | AVCaptureVideoDataOutput *_videoDataOutput; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 61 | // The cricket::VideoCapturer that owns this class. Should never be NULL. |
| 62 | webrtc::AVFoundationVideoCapturer *_capturer; |
| 63 | BOOL _orientationHasChanged; |
| 64 | } |
| 65 | |
| 66 | @synthesize captureSession = _captureSession; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 67 | @synthesize isRunning = _isRunning; |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 68 | @synthesize useBackCamera = _useBackCamera; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 69 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 70 | // This is called from the thread that creates the video source, which is likely |
| 71 | // the main thread. |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 72 | - (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer *)capturer { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 73 | RTC_DCHECK(capturer); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 74 | if (self = [super init]) { |
| 75 | _capturer = capturer; |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 76 | // Create the capture session and all relevant inputs and outputs. We need |
| 77 | // to do this in init because the application may want the capture session |
| 78 | // before we start the capturer for e.g. AVCapturePreviewLayer. All objects |
| 79 | // created here are retained until dealloc and never recreated. |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 80 | if (![self setupCaptureSession]) { |
| 81 | return nil; |
| 82 | } |
| 83 | NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; |
| 84 | [center addObserver:self |
| 85 | selector:@selector(deviceOrientationDidChange:) |
| 86 | name:UIDeviceOrientationDidChangeNotification |
| 87 | object:nil]; |
| 88 | [center addObserverForName:AVCaptureSessionRuntimeErrorNotification |
| 89 | object:nil |
| 90 | queue:nil |
| 91 | usingBlock:^(NSNotification *notification) { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 92 | RTCLogError(@"Capture session error: %@", notification.userInfo); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 93 | }]; |
| 94 | } |
| 95 | return self; |
| 96 | } |
| 97 | |
| 98 | - (void)dealloc { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 99 | RTC_DCHECK(!_isRunning); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 100 | [[NSNotificationCenter defaultCenter] removeObserver:self]; |
| 101 | _capturer = nullptr; |
| 102 | } |
| 103 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 104 | - (AVCaptureSession *)captureSession { |
| 105 | return _captureSession; |
| 106 | } |
| 107 | |
| 108 | // Called from any thread (likely main thread). |
hjon | a1cf366 | 2016-03-14 20:55:22 -0700 | [diff] [blame] | 109 | - (BOOL)canUseBackCamera { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 110 | return _backCameraInput != nil; |
hjon | a1cf366 | 2016-03-14 20:55:22 -0700 | [diff] [blame] | 111 | } |
| 112 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 113 | // Called from any thread (likely main thread). |
| 114 | - (BOOL)useBackCamera { |
| 115 | @synchronized(self) { |
| 116 | return _useBackCamera; |
| 117 | } |
| 118 | } |
| 119 | |
| 120 | // Called from any thread (likely main thread). |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 121 | - (void)setUseBackCamera:(BOOL)useBackCamera { |
hjon | a1cf366 | 2016-03-14 20:55:22 -0700 | [diff] [blame] | 122 | if (!self.canUseBackCamera) { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 123 | if (useBackCamera) { |
| 124 | RTCLogWarning(@"No rear-facing camera exists or it cannot be used;" |
| 125 | "not switching."); |
| 126 | } |
hjon | a1cf366 | 2016-03-14 20:55:22 -0700 | [diff] [blame] | 127 | return; |
| 128 | } |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 129 | @synchronized(self) { |
| 130 | if (_useBackCamera == useBackCamera) { |
| 131 | return; |
| 132 | } |
| 133 | _useBackCamera = useBackCamera; |
| 134 | [self updateSessionInputForUseBackCamera:useBackCamera]; |
| 135 | } |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 136 | } |
| 137 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 138 | // Called from WebRTC thread. |
| 139 | - (void)start { |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 140 | if (_isRunning) { |
| 141 | return; |
| 142 | } |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 143 | _isRunning = YES; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 144 | [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 145 | block:^{ |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 146 | _orientationHasChanged = NO; |
| 147 | [self updateOrientation]; |
| 148 | [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; |
| 149 | AVCaptureSession *captureSession = self.captureSession; |
| 150 | [captureSession startRunning]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 151 | }]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 152 | } |
| 153 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 154 | // Called from same thread as start. |
| 155 | - (void)stop { |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 156 | if (!_isRunning) { |
| 157 | return; |
| 158 | } |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 159 | _isRunning = NO; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 160 | [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 161 | block:^{ |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 162 | [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr]; |
| 163 | [_captureSession stopRunning]; |
| 164 | [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 165 | }]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 166 | } |
| 167 | |
| 168 | #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate |
| 169 | |
| 170 | - (void)captureOutput:(AVCaptureOutput *)captureOutput |
| 171 | didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
| 172 | fromConnection:(AVCaptureConnection *)connection { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 173 | NSParameterAssert(captureOutput == _videoDataOutput); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 174 | if (!_isRunning) { |
| 175 | return; |
| 176 | } |
| 177 | _capturer->CaptureSampleBuffer(sampleBuffer); |
| 178 | } |
| 179 | |
| 180 | - (void)captureOutput:(AVCaptureOutput *)captureOutput |
| 181 | didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 182 | fromConnection:(AVCaptureConnection *)connection { |
| 183 | RTCLogError(@"Dropped sample buffer."); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 184 | } |
| 185 | |
| 186 | #pragma mark - Private |
| 187 | |
| 188 | - (BOOL)setupCaptureSession { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 189 | AVCaptureSession *captureSession = [[AVCaptureSession alloc] init]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 190 | #if defined(__IPHONE_7_0) && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_7_0 |
| 191 | NSString *version = [[UIDevice currentDevice] systemVersion]; |
| 192 | if ([version integerValue] >= 7) { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 193 | captureSession.usesApplicationAudioSession = NO; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 194 | } |
| 195 | #endif |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 196 | if (![captureSession canSetSessionPreset:kDefaultPreset]) { |
| 197 | RTCLogError(@"Session preset unsupported."); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 198 | return NO; |
| 199 | } |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 200 | captureSession.sessionPreset = kDefaultPreset; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 201 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 202 | // Add the output. |
| 203 | AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput]; |
| 204 | if (![captureSession canAddOutput:videoDataOutput]) { |
| 205 | RTCLogError(@"Video data output unsupported."); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 206 | return NO; |
| 207 | } |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 208 | [captureSession addOutput:videoDataOutput]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 209 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 210 | // Get the front and back cameras. If there isn't a front camera |
| 211 | // give up. |
| 212 | AVCaptureDeviceInput *frontCameraInput = [self frontCameraInput]; |
| 213 | AVCaptureDeviceInput *backCameraInput = [self backCameraInput]; |
| 214 | if (!frontCameraInput) { |
| 215 | RTCLogError(@"No front camera for capture session."); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 216 | return NO; |
| 217 | } |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 218 | |
| 219 | // Add the inputs. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 220 | if (![captureSession canAddInput:frontCameraInput] || |
| 221 | (backCameraInput && ![captureSession canAddInput:backCameraInput])) { |
| 222 | RTCLogError(@"Session does not support capture inputs."); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 223 | return NO; |
| 224 | } |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 225 | AVCaptureDeviceInput *input = self.useBackCamera ? |
| 226 | backCameraInput : frontCameraInput; |
| 227 | [captureSession addInput:input]; |
| 228 | _captureSession = captureSession; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 229 | return YES; |
| 230 | } |
| 231 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 232 | - (AVCaptureVideoDataOutput *)videoDataOutput { |
| 233 | if (!_videoDataOutput) { |
| 234 | // Make the capturer output NV12. Ideally we want I420 but that's not |
| 235 | // currently supported on iPhone / iPad. |
| 236 | AVCaptureVideoDataOutput *videoDataOutput = |
| 237 | [[AVCaptureVideoDataOutput alloc] init]; |
| 238 | videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; |
| 239 | videoDataOutput.videoSettings = @{ |
| 240 | (NSString *)kCVPixelBufferPixelFormatTypeKey : |
| 241 | @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) |
| 242 | }; |
| 243 | videoDataOutput.alwaysDiscardsLateVideoFrames = NO; |
| 244 | dispatch_queue_t queue = |
| 245 | [RTCDispatcher dispatchQueueForType:RTCDispatcherTypeCaptureSession]; |
| 246 | [videoDataOutput setSampleBufferDelegate:self queue:queue]; |
| 247 | _videoDataOutput = videoDataOutput; |
| 248 | } |
| 249 | return _videoDataOutput; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 250 | } |
| 251 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 252 | - (AVCaptureDevice *)videoCaptureDeviceForPosition: |
| 253 | (AVCaptureDevicePosition)position { |
| 254 | for (AVCaptureDevice *captureDevice in |
| 255 | [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { |
| 256 | if (captureDevice.position == position) { |
| 257 | return captureDevice; |
| 258 | } |
| 259 | } |
| 260 | return nil; |
| 261 | } |
| 262 | |
| 263 | - (AVCaptureDeviceInput *)frontCameraInput { |
| 264 | if (!_frontCameraInput) { |
| 265 | AVCaptureDevice *frontCameraDevice = |
| 266 | [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront]; |
| 267 | if (!frontCameraDevice) { |
| 268 | RTCLogWarning(@"Failed to find front capture device."); |
| 269 | return nil; |
| 270 | } |
| 271 | NSError *error = nil; |
| 272 | AVCaptureDeviceInput *frontCameraInput = |
| 273 | [AVCaptureDeviceInput deviceInputWithDevice:frontCameraDevice |
| 274 | error:&error]; |
| 275 | if (!frontCameraInput) { |
| 276 | RTCLogError(@"Failed to create front camera input: %@", |
| 277 | error.localizedDescription); |
| 278 | return nil; |
| 279 | } |
| 280 | _frontCameraInput = frontCameraInput; |
| 281 | } |
| 282 | return _frontCameraInput; |
| 283 | } |
| 284 | |
| 285 | - (AVCaptureDeviceInput *)backCameraInput { |
| 286 | if (!_backCameraInput) { |
| 287 | AVCaptureDevice *backCameraDevice = |
| 288 | [self videoCaptureDeviceForPosition:AVCaptureDevicePositionBack]; |
| 289 | if (!backCameraDevice) { |
| 290 | RTCLogWarning(@"Failed to find front capture device."); |
| 291 | return nil; |
| 292 | } |
| 293 | NSError *error = nil; |
| 294 | AVCaptureDeviceInput *backCameraInput = |
| 295 | [AVCaptureDeviceInput deviceInputWithDevice:backCameraDevice |
| 296 | error:&error]; |
| 297 | if (!backCameraInput) { |
| 298 | RTCLogError(@"Failed to create front camera input: %@", |
| 299 | error.localizedDescription); |
| 300 | return nil; |
| 301 | } |
| 302 | _backCameraInput = backCameraInput; |
| 303 | } |
| 304 | return _backCameraInput; |
| 305 | } |
| 306 | |
| 307 | - (void)deviceOrientationDidChange:(NSNotification *)notification { |
| 308 | [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 309 | block:^{ |
| 310 | _orientationHasChanged = YES; |
| 311 | [self updateOrientation]; |
| 312 | }]; |
| 313 | } |
| 314 | |
| 315 | // Called from capture session queue. |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 316 | - (void)updateOrientation { |
| 317 | AVCaptureConnection *connection = |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 318 | [_videoDataOutput connectionWithMediaType:AVMediaTypeVideo]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 319 | if (!connection.supportsVideoOrientation) { |
| 320 | // TODO(tkchin): set rotation bit on frames. |
| 321 | return; |
| 322 | } |
| 323 | AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; |
| 324 | switch ([UIDevice currentDevice].orientation) { |
| 325 | case UIDeviceOrientationPortrait: |
| 326 | orientation = AVCaptureVideoOrientationPortrait; |
| 327 | break; |
| 328 | case UIDeviceOrientationPortraitUpsideDown: |
| 329 | orientation = AVCaptureVideoOrientationPortraitUpsideDown; |
| 330 | break; |
| 331 | case UIDeviceOrientationLandscapeLeft: |
| 332 | orientation = AVCaptureVideoOrientationLandscapeRight; |
| 333 | break; |
| 334 | case UIDeviceOrientationLandscapeRight: |
| 335 | orientation = AVCaptureVideoOrientationLandscapeLeft; |
| 336 | break; |
| 337 | case UIDeviceOrientationFaceUp: |
| 338 | case UIDeviceOrientationFaceDown: |
| 339 | case UIDeviceOrientationUnknown: |
| 340 | if (!_orientationHasChanged) { |
| 341 | connection.videoOrientation = orientation; |
| 342 | } |
| 343 | return; |
| 344 | } |
| 345 | connection.videoOrientation = orientation; |
| 346 | } |
| 347 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 348 | // Update the current session input to match what's stored in _useBackCamera. |
| 349 | - (void)updateSessionInputForUseBackCamera:(BOOL)useBackCamera { |
| 350 | [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession |
| 351 | block:^{ |
| 352 | [_captureSession beginConfiguration]; |
| 353 | AVCaptureDeviceInput *oldInput = _backCameraInput; |
| 354 | AVCaptureDeviceInput *newInput = _frontCameraInput; |
| 355 | if (useBackCamera) { |
| 356 | oldInput = _frontCameraInput; |
| 357 | newInput = _backCameraInput; |
| 358 | } |
| 359 | if (oldInput) { |
| 360 | // Ok to remove this even if it's not attached. Will be no-op. |
| 361 | [_captureSession removeInput:oldInput]; |
| 362 | } |
| 363 | if (newInput) { |
| 364 | [_captureSession addInput:newInput]; |
| 365 | } |
| 366 | [self updateOrientation]; |
| 367 | [_captureSession commitConfiguration]; |
| 368 | }]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 369 | } |
| 370 | |
| 371 | @end |
| 372 | |
| 373 | namespace webrtc { |
| 374 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 375 | enum AVFoundationVideoCapturerMessageType : uint32_t { |
| 376 | kMessageTypeFrame, |
| 377 | }; |
| 378 | |
| 379 | struct AVFoundationFrame { |
| 380 | AVFoundationFrame(CVImageBufferRef buffer, int64_t time) |
| 381 | : image_buffer(buffer), capture_time(time) {} |
| 382 | CVImageBufferRef image_buffer; |
| 383 | int64_t capture_time; |
| 384 | }; |
| 385 | |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 386 | AVFoundationVideoCapturer::AVFoundationVideoCapturer() |
| 387 | : _capturer(nil), _startThread(nullptr) { |
| 388 | // Set our supported formats. This matches kDefaultPreset. |
| 389 | std::vector<cricket::VideoFormat> supportedFormats; |
| 390 | supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat)); |
| 391 | SetSupportedFormats(supportedFormats); |
| 392 | _capturer = |
| 393 | [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this]; |
| 394 | } |
| 395 | |
| 396 | AVFoundationVideoCapturer::~AVFoundationVideoCapturer() { |
| 397 | _capturer = nil; |
| 398 | } |
| 399 | |
| 400 | cricket::CaptureState AVFoundationVideoCapturer::Start( |
| 401 | const cricket::VideoFormat& format) { |
| 402 | if (!_capturer) { |
| 403 | LOG(LS_ERROR) << "Failed to create AVFoundation capturer."; |
| 404 | return cricket::CaptureState::CS_FAILED; |
| 405 | } |
| 406 | if (_capturer.isRunning) { |
| 407 | LOG(LS_ERROR) << "The capturer is already running."; |
| 408 | return cricket::CaptureState::CS_FAILED; |
| 409 | } |
| 410 | if (format != kDefaultFormat) { |
| 411 | LOG(LS_ERROR) << "Unsupported format provided."; |
| 412 | return cricket::CaptureState::CS_FAILED; |
| 413 | } |
| 414 | |
| 415 | // Keep track of which thread capture started on. This is the thread that |
| 416 | // frames need to be sent to. |
| 417 | RTC_DCHECK(!_startThread); |
| 418 | _startThread = rtc::Thread::Current(); |
| 419 | |
| 420 | SetCaptureFormat(&format); |
| 421 | // This isn't super accurate because it takes a while for the AVCaptureSession |
| 422 | // to spin up, and this call returns async. |
| 423 | // TODO(tkchin): make this better. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 424 | [_capturer start]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 425 | SetCaptureState(cricket::CaptureState::CS_RUNNING); |
| 426 | |
| 427 | return cricket::CaptureState::CS_STARTING; |
| 428 | } |
| 429 | |
| 430 | void AVFoundationVideoCapturer::Stop() { |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 431 | [_capturer stop]; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 432 | SetCaptureFormat(NULL); |
| 433 | _startThread = nullptr; |
| 434 | } |
| 435 | |
| 436 | bool AVFoundationVideoCapturer::IsRunning() { |
| 437 | return _capturer.isRunning; |
| 438 | } |
| 439 | |
| 440 | AVCaptureSession* AVFoundationVideoCapturer::GetCaptureSession() { |
| 441 | return _capturer.captureSession; |
| 442 | } |
| 443 | |
hjon | a1cf366 | 2016-03-14 20:55:22 -0700 | [diff] [blame] | 444 | bool AVFoundationVideoCapturer::CanUseBackCamera() const { |
| 445 | return _capturer.canUseBackCamera; |
| 446 | } |
| 447 | |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 448 | void AVFoundationVideoCapturer::SetUseBackCamera(bool useBackCamera) { |
| 449 | _capturer.useBackCamera = useBackCamera; |
| 450 | } |
| 451 | |
| 452 | bool AVFoundationVideoCapturer::GetUseBackCamera() const { |
| 453 | return _capturer.useBackCamera; |
| 454 | } |
| 455 | |
| 456 | void AVFoundationVideoCapturer::CaptureSampleBuffer( |
| 457 | CMSampleBufferRef sampleBuffer) { |
| 458 | if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || |
| 459 | !CMSampleBufferIsValid(sampleBuffer) || |
| 460 | !CMSampleBufferDataIsReady(sampleBuffer)) { |
| 461 | return; |
| 462 | } |
| 463 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 464 | CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
| 465 | if (image_buffer == NULL) { |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 466 | return; |
| 467 | } |
| 468 | |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 469 | // Retain the buffer and post it to the webrtc thread. It will be released |
| 470 | // after it has successfully been signaled. |
| 471 | CVBufferRetain(image_buffer); |
| 472 | AVFoundationFrame frame(image_buffer, rtc::TimeNanos()); |
| 473 | _startThread->Post(this, kMessageTypeFrame, |
| 474 | new rtc::TypedMessageData<AVFoundationFrame>(frame)); |
| 475 | } |
| 476 | |
| 477 | void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) { |
| 478 | switch (msg->message_id) { |
| 479 | case kMessageTypeFrame: { |
| 480 | rtc::TypedMessageData<AVFoundationFrame>* data = |
| 481 | static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata); |
| 482 | const AVFoundationFrame& frame = data->data(); |
| 483 | OnFrameMessage(frame.image_buffer, frame.capture_time); |
| 484 | delete data; |
| 485 | break; |
| 486 | } |
| 487 | } |
| 488 | } |
| 489 | |
| 490 | void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer, |
| 491 | int64_t capture_time) { |
| 492 | RTC_DCHECK(_startThread->IsCurrent()); |
| 493 | |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 494 | // Base address must be unlocked to access frame data. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 495 | CVOptionFlags lock_flags = kCVPixelBufferLock_ReadOnly; |
| 496 | CVReturn ret = CVPixelBufferLockBaseAddress(image_buffer, lock_flags); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 497 | if (ret != kCVReturnSuccess) { |
| 498 | return; |
| 499 | } |
| 500 | |
| 501 | static size_t const kYPlaneIndex = 0; |
| 502 | static size_t const kUVPlaneIndex = 1; |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 503 | uint8_t* y_plane_address = |
| 504 | static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer, |
| 505 | kYPlaneIndex)); |
| 506 | size_t y_plane_height = |
| 507 | CVPixelBufferGetHeightOfPlane(image_buffer, kYPlaneIndex); |
| 508 | size_t y_plane_width = |
| 509 | CVPixelBufferGetWidthOfPlane(image_buffer, kYPlaneIndex); |
| 510 | size_t y_plane_bytes_per_row = |
| 511 | CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kYPlaneIndex); |
| 512 | size_t uv_plane_height = |
| 513 | CVPixelBufferGetHeightOfPlane(image_buffer, kUVPlaneIndex); |
| 514 | size_t uv_plane_bytes_per_row = |
| 515 | CVPixelBufferGetBytesPerRowOfPlane(image_buffer, kUVPlaneIndex); |
| 516 | size_t frame_size = y_plane_bytes_per_row * y_plane_height + |
| 517 | uv_plane_bytes_per_row * uv_plane_height; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 518 | |
| 519 | // Sanity check assumption that planar bytes are contiguous. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 520 | uint8_t* uv_plane_address = |
| 521 | static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(image_buffer, |
| 522 | kUVPlaneIndex)); |
| 523 | RTC_DCHECK(uv_plane_address == |
| 524 | y_plane_address + y_plane_height * y_plane_bytes_per_row); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 525 | |
| 526 | // Stuff data into a cricket::CapturedFrame. |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 527 | cricket::CapturedFrame frame; |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 528 | frame.width = y_plane_width; |
| 529 | frame.height = y_plane_height; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 530 | frame.pixel_width = 1; |
| 531 | frame.pixel_height = 1; |
| 532 | frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12); |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 533 | frame.time_stamp = capture_time; |
| 534 | frame.data = y_plane_address; |
| 535 | frame.data_size = frame_size; |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 536 | |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 537 | // This will call a superclass method that will perform the frame conversion |
| 538 | // to I420. |
tkchin | 89717aa | 2016-03-31 17:14:04 -0700 | [diff] [blame] | 539 | SignalFrameCaptured(this, &frame); |
| 540 | |
| 541 | CVPixelBufferUnlockBaseAddress(image_buffer, lock_flags); |
| 542 | CVBufferRelease(image_buffer); |
Jon Hjelle | 7ac8bab | 2016-01-21 11:44:55 -0800 | [diff] [blame] | 543 | } |
| 544 | |
| 545 | } // namespace webrtc |