Report timing frames info in GetStats.

Some frames are already marked as 'timing frames' via video-timing RTP header extension. Timestamps along full WebRTC pipeline are gathered for these frames. This CL implements reporting of these timestamps for a single
timing frame since the last GetStats(). The frame with the longest end-to-end delay between two consecutive GetStats calls is reported.

The purpose of this timing information is not to provide a realtime statistics but to provide debugging information as it will help identify problematic places in video pipeline for outliers (frames which took longest to process).

BUG=webrtc:7594

Review-Url: https://codereview.webrtc.org/2946413002
Cr-Commit-Position: refs/heads/master@{#18909}
diff --git a/webrtc/modules/video_coding/frame_buffer2.cc b/webrtc/modules/video_coding/frame_buffer2.cc
index 1114e7c..2beffc7 100644
--- a/webrtc/modules/video_coding/frame_buffer2.cc
+++ b/webrtc/modules/video_coding/frame_buffer2.cc
@@ -149,6 +149,7 @@
       }
 
       UpdateJitterDelay();
+      UpdateTimingFrameInfo();
       PropagateDecodability(next_frame_it_->second);
 
       // Sanity check for RTP timestamp monotonicity.
@@ -534,8 +535,15 @@
   }
 }
 
+void FrameBuffer::UpdateTimingFrameInfo() {
+  TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
+  rtc::Optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
+  if (info)
+    stats_callback_->OnTimingFrameInfoUpdated(*info);
+}
+
 void FrameBuffer::ClearFramesAndHistory() {
-  TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
+  TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
   frames_.clear();
   last_decoded_frame_it_ = frames_.end();
   last_continuous_frame_it_ = frames_.end();
diff --git a/webrtc/modules/video_coding/frame_buffer2.h b/webrtc/modules/video_coding/frame_buffer2.h
index ffeb2aa..25ba0a9 100644
--- a/webrtc/modules/video_coding/frame_buffer2.h
+++ b/webrtc/modules/video_coding/frame_buffer2.h
@@ -153,6 +153,8 @@
 
   void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_);
 
+  void UpdateTimingFrameInfo() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
   void ClearFramesAndHistory() EXCLUSIVE_LOCKS_REQUIRED(crit_);
 
   bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms)
diff --git a/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
index 58b3f7a..9d15302 100644
--- a/webrtc/modules/video_coding/frame_buffer2_unittest.cc
+++ b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -116,6 +116,7 @@
                     int jitter_buffer_ms,
                     int min_playout_delay_ms,
                     int render_delay_ms));
+  MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info));
 };
 
 class TestFrameBuffer2 : public ::testing::Test {
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
index 42ee8b6..80b08b5 100644
--- a/webrtc/modules/video_coding/generic_decoder.cc
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -10,6 +10,8 @@
 
 #include "webrtc/modules/video_coding/generic_decoder.h"
 
+#include <algorithm>
+
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/timeutils.h"
@@ -91,6 +93,7 @@
 
   // Report timing information.
   if (frameInfo->timing.is_timing_frame) {
+    int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
     // Convert remote timestamps to local time from ntp timestamps.
     frameInfo->timing.encode_start_ms -= ntp_offset_;
     frameInfo->timing.encode_finish_ms -= ntp_offset_;
@@ -98,19 +101,44 @@
     frameInfo->timing.pacer_exit_ms -= ntp_offset_;
     frameInfo->timing.network_timestamp_ms -= ntp_offset_;
     frameInfo->timing.network2_timestamp_ms -= ntp_offset_;
-    // TODO(ilnik): Report timing information here.
-    // Capture time: decodedImage.ntp_time_ms() - ntp_offset
-    // Encode start: frameInfo->timing.encode_start_ms
-    // Encode finish: frameInfo->timing.encode_finish_ms
-    // Packetization done: frameInfo->timing.packetization_finish_ms
-    // Pacer exit: frameInfo->timing.pacer_exit_ms
-    // Network timestamp: frameInfo->timing.network_timestamp_ms
-    // Network2 timestamp: frameInfo->timing.network2_timestamp_ms
-    // Receive start: frameInfo->timing.receive_start_ms
-    // Receive finish: frameInfo->timing.receive_finish_ms
-    // Decode start: frameInfo->decodeStartTimeMs
-    // Decode finish: now_ms
-    // Render time: frameInfo->renderTimeMs
+
+    int64_t sender_delta_ms = 0;
+    if (decodedImage.ntp_time_ms() < 0) {
+      // Sender clock is not estimated yet. Make sure that sender times are all
+      // negative to indicate that. Yet they still should be relatively correct.
+      sender_delta_ms =
+          std::max({capture_time_ms, frameInfo->timing.encode_start_ms,
+                    frameInfo->timing.encode_finish_ms,
+                    frameInfo->timing.packetization_finish_ms,
+                    frameInfo->timing.pacer_exit_ms,
+                    frameInfo->timing.network_timestamp_ms,
+                    frameInfo->timing.network2_timestamp_ms}) +
+          1;
+    }
+
+    TimingFrameInfo timing_frame_info;
+
+    timing_frame_info.capture_time_ms = capture_time_ms - sender_delta_ms;
+    timing_frame_info.encode_start_ms =
+        frameInfo->timing.encode_start_ms - sender_delta_ms;
+    timing_frame_info.encode_finish_ms =
+        frameInfo->timing.encode_finish_ms - sender_delta_ms;
+    timing_frame_info.packetization_finish_ms =
+        frameInfo->timing.packetization_finish_ms - sender_delta_ms;
+    timing_frame_info.pacer_exit_ms =
+        frameInfo->timing.pacer_exit_ms - sender_delta_ms;
+    timing_frame_info.network_timestamp_ms =
+        frameInfo->timing.network_timestamp_ms - sender_delta_ms;
+    timing_frame_info.network2_timestamp_ms =
+        frameInfo->timing.network2_timestamp_ms - sender_delta_ms;
+    timing_frame_info.receive_start_ms = frameInfo->timing.receive_start_ms;
+    timing_frame_info.receive_finish_ms = frameInfo->timing.receive_finish_ms;
+    timing_frame_info.decode_start_ms = frameInfo->decodeStartTimeMs;
+    timing_frame_info.decode_finish_ms = now_ms;
+    timing_frame_info.render_time_ms = frameInfo->renderTimeMs;
+    timing_frame_info.rtp_timestamp = decodedImage.timestamp();
+
+    _timing->SetTimingFrameInfo(timing_frame_info);
   }
 
   decodedImage.set_timestamp_us(
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
index 6ed472d..19d3813 100644
--- a/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -109,6 +109,8 @@
                                            int min_playout_delay_ms,
                                            int render_delay_ms) = 0;
 
+  virtual void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) = 0;
+
  protected:
   virtual ~VCMReceiveStatisticsCallback() {}
 };
diff --git a/webrtc/modules/video_coding/timing.cc b/webrtc/modules/video_coding/timing.cc
index 21f4a3d..340f2c1 100644
--- a/webrtc/modules/video_coding/timing.cc
+++ b/webrtc/modules/video_coding/timing.cc
@@ -21,21 +21,22 @@
 namespace webrtc {
 
 VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing)
-  : clock_(clock),
-    master_(false),
-    ts_extrapolator_(),
-    codec_timer_(new VCMCodecTimer()),
-    render_delay_ms_(kDefaultRenderDelayMs),
-    min_playout_delay_ms_(0),
-    max_playout_delay_ms_(10000),
-    jitter_delay_ms_(0),
-    current_delay_ms_(0),
-    last_decode_ms_(0),
-    prev_frame_timestamp_(0),
-    num_decoded_frames_(0),
-    num_delayed_decoded_frames_(0),
-    first_decoded_frame_ms_(-1),
-    sum_missed_render_deadline_ms_(0) {
+    : clock_(clock),
+      master_(false),
+      ts_extrapolator_(),
+      codec_timer_(new VCMCodecTimer()),
+      render_delay_ms_(kDefaultRenderDelayMs),
+      min_playout_delay_ms_(0),
+      max_playout_delay_ms_(10000),
+      jitter_delay_ms_(0),
+      current_delay_ms_(0),
+      last_decode_ms_(0),
+      prev_frame_timestamp_(0),
+      timing_frame_info_(),
+      num_decoded_frames_(0),
+      num_delayed_decoded_frames_(0),
+      first_decoded_frame_ms_(-1),
+      sum_missed_render_deadline_ms_(0) {
   if (master_timing == NULL) {
     master_ = true;
     ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
@@ -304,4 +305,14 @@
   return (num_decoded_frames_ > 0);
 }
 
+void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) {
+  rtc::CritScope cs(&crit_sect_);
+  timing_frame_info_.emplace(info);
+}
+
+rtc::Optional<TimingFrameInfo> VCMTiming::GetTimingFrameInfo() {
+  rtc::CritScope cs(&crit_sect_);
+  return timing_frame_info_;
+}
+
 }  // namespace webrtc
diff --git a/webrtc/modules/video_coding/timing.h b/webrtc/modules/video_coding/timing.h
index bfd6778..82252bf 100644
--- a/webrtc/modules/video_coding/timing.h
+++ b/webrtc/modules/video_coding/timing.h
@@ -102,6 +102,9 @@
                           int* min_playout_delay_ms,
                           int* render_delay_ms) const;
 
+  void SetTimingFrameInfo(const TimingFrameInfo& info);
+  rtc::Optional<TimingFrameInfo> GetTimingFrameInfo();
+
   enum { kDefaultRenderDelayMs = 10 };
   enum { kDelayMaxChangeMsPerS = 100 };
 
@@ -131,6 +134,7 @@
   int current_delay_ms_ GUARDED_BY(crit_sect_);
   int last_decode_ms_ GUARDED_BY(crit_sect_);
   uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_);
+  rtc::Optional<TimingFrameInfo> timing_frame_info_ GUARDED_BY(crit_sect_);
 
   // Statistics.
   size_t num_decoded_frames_ GUARDED_BY(crit_sect_);