AVFoundation Video Capturer: Remove thread jump when delivering frames

WebRTC no longer has any restriction on what thread frames should be
delivered on. One possible problem with this CL is that NV21->I420
conversion and scaling is done on the thread that delivers frames, which
might cause fps regressions.

R=nisse@webrtc.org, perkj@webrtc.org, tkchin@webrtc.org

Review URL: https://codereview.webrtc.org/2137503003 .

Cr-Commit-Position: refs/heads/master@{#14021}
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
index 5ce0462..ebabb8c 100644
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.h
@@ -25,8 +25,7 @@
 
 namespace webrtc {
 
-class AVFoundationVideoCapturer : public cricket::VideoCapturer,
-                                  public rtc::MessageHandler {
+class AVFoundationVideoCapturer : public cricket::VideoCapturer {
  public:
   AVFoundationVideoCapturer();
   ~AVFoundationVideoCapturer();
@@ -59,16 +58,8 @@
   void CaptureSampleBuffer(CMSampleBufferRef sample_buffer,
                            webrtc::VideoRotation rotation);
 
-  // Handles messages from posts.
-  void OnMessage(rtc::Message *msg) override;
-
  private:
-  void OnFrameMessage(CVImageBufferRef image_buffer,
-                      webrtc::VideoRotation rotation,
-                      int64_t capture_time_ns);
-
   RTCAVFoundationVideoCapturerInternal *_capturer;
-  rtc::Thread *_startThread;  // Set in Start(), unset in Stop().
   webrtc::I420BufferPool _buffer_pool;
 };  // AVFoundationVideoCapturer
 
diff --git a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
index 625f83d..ef29049 100644
--- a/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
+++ b/webrtc/sdk/objc/Framework/Classes/avfoundationvideocapturer.mm
@@ -596,18 +596,7 @@
   kMessageTypeFrame,
 };
 
-struct AVFoundationFrame {
-  AVFoundationFrame(CVImageBufferRef buffer,
-                    webrtc::VideoRotation rotation,
-                    int64_t time)
-      : image_buffer(buffer), rotation(rotation), capture_time(time) {}
-  CVImageBufferRef image_buffer;
-  webrtc::VideoRotation rotation;
-  int64_t capture_time;
-};
-
-AVFoundationVideoCapturer::AVFoundationVideoCapturer()
-    : _capturer(nil), _startThread(nullptr) {
+AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
   // Set our supported formats. This matches kAvailablePresets.
   _capturer =
       [[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
@@ -663,11 +652,6 @@
   _capturer.captureSession.sessionPreset = desiredPreset;
   [_capturer.captureSession commitConfiguration];
 
-  // Keep track of which thread capture started on. This is the thread that
-  // frames need to be sent to.
-  RTC_DCHECK(!_startThread);
-  _startThread = rtc::Thread::Current();
-
   SetCaptureFormat(&format);
   // This isn't super accurate because it takes a while for the AVCaptureSession
   // to spin up, and this call returns async.
@@ -686,7 +670,6 @@
 void AVFoundationVideoCapturer::Stop() {
   [_capturer stop];
   SetCaptureFormat(NULL);
-  _startThread = nullptr;
 }
 
 bool AVFoundationVideoCapturer::IsRunning() {
@@ -722,32 +705,6 @@
     return;
   }
 
-  // Retain the buffer and post it to the webrtc thread. It will be released
-  // after it has successfully been signaled.
-  CVBufferRetain(image_buffer);
-  AVFoundationFrame frame(image_buffer, rotation, rtc::TimeNanos());
-  _startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
-                     new rtc::TypedMessageData<AVFoundationFrame>(frame));
-}
-
-void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
-  switch (msg->message_id) {
-    case kMessageTypeFrame: {
-      rtc::TypedMessageData<AVFoundationFrame>* data =
-        static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
-      const AVFoundationFrame& frame = data->data();
-      OnFrameMessage(frame.image_buffer, frame.rotation, frame.capture_time);
-      delete data;
-      break;
-    }
-  }
-}
-
-void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
-                                               webrtc::VideoRotation rotation,
-                                               int64_t capture_time_ns) {
-  RTC_DCHECK(_startThread->IsCurrent());
-
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
       new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
 
@@ -763,11 +720,10 @@
   int64_t translated_camera_time_us;
 
   if (!AdaptFrame(captured_width, captured_height,
-                  capture_time_ns / rtc::kNumNanosecsPerMicrosec,
+                  rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
                   rtc::TimeMicros(), &adapted_width, &adapted_height,
                   &crop_width, &crop_height, &crop_x, &crop_y,
                   &translated_camera_time_us)) {
-    CVBufferRelease(image_buffer);
     return;
   }
 
@@ -801,8 +757,6 @@
   OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
                                     translated_camera_time_us, 0),
           captured_width, captured_height);
-
-  CVBufferRelease(image_buffer);
 }
 
 }  // namespace webrtc