Add AVFoundation video capture support to Mac objc SDK (based on iOS)

The AppRTCDemo app on Mac OSX does not show or send local video streams,
as ACFoundation capture session is not compiled in or implemented in
the appropriate places.  This is the first part of a two-part patch
that implements local capture on the Mac for AppRTCDemo

P.S. This is my first patch to WebRTC.   I didn't see any relevant tests, but I could write some if you can point me at a location. Also, I don't have access to the automated tests (I don't think)

BUG=webrtc:3417

Review-Url: https://codereview.webrtc.org/2046863004
Cr-Commit-Position: refs/heads/master@{#13080}
diff --git a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
index 2398ce5..2f5a87b 100644
--- a/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
+++ b/webrtc/sdk/objc/Framework/Classes/RTCPeerConnectionFactory.mm
@@ -11,9 +11,7 @@
 #import "RTCPeerConnectionFactory+Private.h"
 
 #import "NSString+StdString.h"
-#if defined(WEBRTC_IOS)
 #import "RTCAVFoundationVideoSource+Private.h"
-#endif
 #import "RTCAudioTrack+Private.h"
 #import "RTCMediaStream+Private.h"
 #import "RTCPeerConnection+Private.h"
@@ -54,12 +52,8 @@
 
 - (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
     (nullable RTCMediaConstraints *)constraints {
-#if defined(WEBRTC_IOS)
   return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
                                                  constraints:constraints];
-#else
-  return nil;
-#endif
 }
 
 - (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
index 0d510fb..4a0ecea 100644
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
@@ -12,7 +12,9 @@
 
 #import <AVFoundation/AVFoundation.h>
 #import <Foundation/Foundation.h>
+#if TARGET_OS_IPHONE
 #import <UIKit/UIKit.h>
+#endif
 
 #import "RTCDispatcher+Private.h"
 #import "WebRTC/RTCLogging.h"
@@ -88,6 +90,7 @@
       return nil;
     }
     NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
+#if TARGET_OS_IPHONE
     [center addObserver:self
                selector:@selector(deviceOrientationDidChange:)
                    name:UIDeviceOrientationDidChangeNotification
@@ -100,6 +103,7 @@
                selector:@selector(handleCaptureSessionInterruptionEnded:)
                    name:AVCaptureSessionInterruptionEndedNotification
                  object:_captureSession];
+#endif
     [center addObserver:self
                selector:@selector(handleCaptureSessionRuntimeError:)
                    name:AVCaptureSessionRuntimeErrorNotification
@@ -188,7 +192,9 @@
                                block:^{
     _orientationHasChanged = NO;
     [self updateOrientation];
+#if TARGET_OS_IPHONE
     [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
+#endif
     AVCaptureSession *captureSession = self.captureSession;
     [captureSession startRunning];
   }];
@@ -207,12 +213,15 @@
                                block:^{
     [_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
     [_captureSession stopRunning];
+#if TARGET_OS_IPHONE
     [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
+#endif
   }];
 }
 
 #pragma mark iOS notifications
 
+#if TARGET_OS_IPHONE
 - (void)deviceOrientationDidChange:(NSNotification *)notification {
   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                                block:^{
@@ -220,6 +229,7 @@
     [self updateOrientation];
   }];
 }
+#endif
 
 #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
 
@@ -273,16 +283,21 @@
 }
 
 - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
-  NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
+  NSError *error =
+      [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
   RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
 
   [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
                                block:^{
+#if TARGET_OS_IPHONE
     if (error.code == AVErrorMediaServicesWereReset) {
       [self handleNonFatalError];
     } else {
       [self handleFatalError];
     }
+#else
+    [self handleFatalError];
+#endif
   }];
 }
 
@@ -402,8 +417,13 @@
 
 - (AVCaptureDeviceInput *)frontCameraInput {
   if (!_frontCameraInput) {
+#if TARGET_OS_IPHONE
     AVCaptureDevice *frontCameraDevice =
         [self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
+#else
+    AVCaptureDevice *frontCameraDevice =
+        [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
+#endif
     if (!frontCameraDevice) {
       RTCLogWarning(@"Failed to find front capture device.");
       return nil;
@@ -452,6 +472,7 @@
     // TODO(tkchin): set rotation bit on frames.
     return;
   }
+#if TARGET_OS_IPHONE
   AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
   switch ([UIDevice currentDevice].orientation) {
     case UIDeviceOrientationPortrait:
@@ -475,6 +496,7 @@
       return;
   }
   connection.videoOrientation = orientation;
+#endif
 }
 
 // Update the current session input to match what's stored in _useBackCamera.