Implement timing frames.

Timing information is gathered in EncodedImage,
starting at encoders. Then it's sent using RTP header extension. In the
end, it's gathered at the GenericDecoder. Actual reporting and tests
will be in the next CLs.

BUG=webrtc:7594

Review-Url: https://codereview.webrtc.org/2911193002
Cr-Commit-Position: refs/heads/master@{#18659}
diff --git a/webrtc/modules/video_coding/frame_buffer.cc b/webrtc/modules/video_coding/frame_buffer.cc
index 5ea12dc..11bf88a 100644
--- a/webrtc/modules/video_coding/frame_buffer.cc
+++ b/webrtc/modules/video_coding/frame_buffer.cc
@@ -164,6 +164,27 @@
     rotation_ = packet.video_header.rotation;
     _rotation_set = true;
     content_type_ = packet.video_header.content_type;
+    if (packet.video_header.video_timing.is_timing_frame) {
+      timing_.is_timing_frame = true;
+      timing_.encode_start_ms =
+          ntp_time_ms_ + packet.video_header.video_timing.encode_start_delta_ms;
+      timing_.encode_finish_ms =
+          ntp_time_ms_ +
+          packet.video_header.video_timing.encode_finish_delta_ms;
+      timing_.packetization_finish_ms =
+          ntp_time_ms_ +
+          packet.video_header.video_timing.packetization_finish_delta_ms;
+      timing_.pacer_exit_ms =
+          ntp_time_ms_ + packet.video_header.video_timing.pacer_exit_delta_ms;
+      timing_.network_timestamp_ms =
+          ntp_time_ms_ +
+          packet.video_header.video_timing.network_timstamp_delta_ms;
+      timing_.network2_timestamp_ms =
+          ntp_time_ms_ +
+          packet.video_header.video_timing.network2_timstamp_delta_ms;
+    } else {
+      timing_.is_timing_frame = false;
+    }
   }
 
   if (packet.is_first_packet_in_frame) {