Report timing frames info in GetStats.
Some frames are already marked as 'timing frames' via video-timing RTP header extension. Timestamps along full WebRTC pipeline are gathered for these frames. This CL implements reporting of these timestamps for a single
timing frame since the last GetStats(). The frame with the longest end-to-end delay between two consecutive GetStats calls is reported.
The purpose of this timing information is not to provide a realtime statistics but to provide debugging information as it will help identify problematic places in video pipeline for outliers (frames which took longest to process).
BUG=webrtc:7594
Review-Url: https://codereview.webrtc.org/2946413002
Cr-Commit-Position: refs/heads/master@{#18909}
diff --git a/webrtc/modules/include/module_common_types.h b/webrtc/modules/include/module_common_types.h
index b719226..abf8b94 100644
--- a/webrtc/modules/include/module_common_types.h
+++ b/webrtc/modules/include/module_common_types.h
@@ -61,7 +61,7 @@
VideoContentType content_type;
- VideoTiming video_timing;
+ VideoSendTiming video_timing;
bool is_first_packet_in_frame;
uint8_t simulcastIdx; // Index if the simulcast encoder creating
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
index f630baf..c3fc9dc 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.cc
@@ -266,40 +266,41 @@
constexpr const char* VideoTimingExtension::kUri;
bool VideoTimingExtension::Parse(rtc::ArrayView<const uint8_t> data,
- VideoTiming* timing) {
+ VideoSendTiming* timing) {
RTC_DCHECK(timing);
if (data.size() != kValueSizeBytes)
return false;
timing->encode_start_delta_ms =
ByteReader<uint16_t>::ReadBigEndian(data.data());
timing->encode_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
- data.data() + 2 * VideoTiming::kEncodeFinishDeltaIdx);
+ data.data() + 2 * VideoSendTiming::kEncodeFinishDeltaIdx);
timing->packetization_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
- data.data() + 2 * VideoTiming::kPacketizationFinishDeltaIdx);
+ data.data() + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx);
timing->pacer_exit_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
- data.data() + 2 * VideoTiming::kPacerExitDeltaIdx);
+ data.data() + 2 * VideoSendTiming::kPacerExitDeltaIdx);
timing->network_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
- data.data() + 2 * VideoTiming::kNetworkTimestampDeltaIdx);
+ data.data() + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx);
timing->network2_timstamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
- data.data() + 2 * VideoTiming::kNetwork2TimestampDeltaIdx);
+ data.data() + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx);
timing->is_timing_frame = true;
return true;
}
-bool VideoTimingExtension::Write(uint8_t* data, const VideoTiming& timing) {
+bool VideoTimingExtension::Write(uint8_t* data, const VideoSendTiming& timing) {
ByteWriter<uint16_t>::WriteBigEndian(data, timing.encode_start_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
- data + 2 * VideoTiming::kEncodeFinishDeltaIdx,
+ data + 2 * VideoSendTiming::kEncodeFinishDeltaIdx,
timing.encode_finish_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
- data + 2 * VideoTiming::kPacketizationFinishDeltaIdx,
+ data + 2 * VideoSendTiming::kPacketizationFinishDeltaIdx,
timing.packetization_finish_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
- data + 2 * VideoTiming::kPacerExitDeltaIdx, timing.pacer_exit_delta_ms);
+ data + 2 * VideoSendTiming::kPacerExitDeltaIdx,
+ timing.pacer_exit_delta_ms);
ByteWriter<uint16_t>::WriteBigEndian(
- data + 2 * VideoTiming::kNetworkTimestampDeltaIdx, 0); // reserved
+ data + 2 * VideoSendTiming::kNetworkTimestampDeltaIdx, 0); // reserved
ByteWriter<uint16_t>::WriteBigEndian(
- data + 2 * VideoTiming::kNetwork2TimestampDeltaIdx, 0); // reserved
+ data + 2 * VideoSendTiming::kNetwork2TimestampDeltaIdx, 0); // reserved
return true;
}
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
index c0a4bca..c637a84 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_header_extensions.h
@@ -134,9 +134,10 @@
static constexpr const char* kUri =
"http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
- static bool Parse(rtc::ArrayView<const uint8_t> data, VideoTiming* timing);
- static size_t ValueSize(const VideoTiming&) { return kValueSizeBytes; }
- static bool Write(uint8_t* data, const VideoTiming& timing);
+ static bool Parse(rtc::ArrayView<const uint8_t> data,
+ VideoSendTiming* timing);
+ static size_t ValueSize(const VideoSendTiming&) { return kValueSizeBytes; }
+ static bool Write(uint8_t* data, const VideoSendTiming& timing);
static size_t ValueSize(uint16_t time_delta_ms, uint8_t idx) {
return kValueSizeBytes;
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
index 55bad2d..5d5b31b 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
+++ b/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h
@@ -32,26 +32,26 @@
void set_packetization_finish_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
- VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
- VideoTiming::kPacketizationFinishDeltaIdx);
+ VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+ VideoSendTiming::kPacketizationFinishDeltaIdx);
}
void set_pacer_exit_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
- VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
- VideoTiming::kPacerExitDeltaIdx);
+ VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+ VideoSendTiming::kPacerExitDeltaIdx);
}
void set_network_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
- VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
- VideoTiming::kNetworkTimestampDeltaIdx);
+ VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+ VideoSendTiming::kNetworkTimestampDeltaIdx);
}
void set_network2_time_ms(int64_t time) {
SetExtension<VideoTimingExtension>(
- VideoTiming::GetDeltaCappedMs(capture_time_ms_, time),
- VideoTiming::kNetwork2TimestampDeltaIdx);
+ VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time),
+ VideoSendTiming::kNetwork2TimestampDeltaIdx);
}
private:
diff --git a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index bcfa650..89c1d1d 100644
--- a/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -473,7 +473,7 @@
packet->SetMarker(true);
packet->SetTimestamp(kTimestamp);
packet->set_capture_time_ms(capture_time_ms);
- const VideoTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
+ const VideoSendTiming kVideoTiming = {0u, 0u, 0u, 0u, 0u, 0u, true};
packet->SetExtension<VideoTimingExtension>(kVideoTiming);
EXPECT_TRUE(rtp_sender_->AssignSequenceNumber(packet.get()));
size_t packet_size = packet->size();
@@ -1578,7 +1578,7 @@
rtp_sender_video_->SendVideo(kRtpVideoGeneric, kVideoFrameKey, kPayload,
kTimestamp, kCaptureTimestamp, kFrame,
sizeof(kFrame), nullptr, &hdr);
- VideoTiming timing;
+ VideoSendTiming timing;
EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
&timing));
EXPECT_EQ(kPacketizationTimeMs, timing.packetization_finish_delta_ms);
diff --git a/webrtc/modules/video_coding/frame_buffer2.cc b/webrtc/modules/video_coding/frame_buffer2.cc
index 1114e7c..2beffc7 100644
--- a/webrtc/modules/video_coding/frame_buffer2.cc
+++ b/webrtc/modules/video_coding/frame_buffer2.cc
@@ -149,6 +149,7 @@
}
UpdateJitterDelay();
+ UpdateTimingFrameInfo();
PropagateDecodability(next_frame_it_->second);
// Sanity check for RTP timestamp monotonicity.
@@ -534,8 +535,15 @@
}
}
+void FrameBuffer::UpdateTimingFrameInfo() {
+ TRACE_EVENT0("webrtc", "FrameBuffer::UpdateTimingFrameInfo");
+ rtc::Optional<TimingFrameInfo> info = timing_->GetTimingFrameInfo();
+ if (info)
+ stats_callback_->OnTimingFrameInfoUpdated(*info);
+}
+
void FrameBuffer::ClearFramesAndHistory() {
- TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
+ TRACE_EVENT0("webrtc", "FrameBuffer::ClearFramesAndHistory");
frames_.clear();
last_decoded_frame_it_ = frames_.end();
last_continuous_frame_it_ = frames_.end();
diff --git a/webrtc/modules/video_coding/frame_buffer2.h b/webrtc/modules/video_coding/frame_buffer2.h
index ffeb2aa..25ba0a9 100644
--- a/webrtc/modules/video_coding/frame_buffer2.h
+++ b/webrtc/modules/video_coding/frame_buffer2.h
@@ -153,6 +153,8 @@
void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ void UpdateTimingFrameInfo() EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
void ClearFramesAndHistory() EXCLUSIVE_LOCKS_REQUIRED(crit_);
bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms)
diff --git a/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
index 58b3f7a..9d15302 100644
--- a/webrtc/modules/video_coding/frame_buffer2_unittest.cc
+++ b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -116,6 +116,7 @@
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms));
+ MOCK_METHOD1(OnTimingFrameInfoUpdated, void(const TimingFrameInfo& info));
};
class TestFrameBuffer2 : public ::testing::Test {
diff --git a/webrtc/modules/video_coding/generic_decoder.cc b/webrtc/modules/video_coding/generic_decoder.cc
index 42ee8b6..80b08b5 100644
--- a/webrtc/modules/video_coding/generic_decoder.cc
+++ b/webrtc/modules/video_coding/generic_decoder.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/video_coding/generic_decoder.h"
+#include <algorithm>
+
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/timeutils.h"
@@ -91,6 +93,7 @@
// Report timing information.
if (frameInfo->timing.is_timing_frame) {
+ int64_t capture_time_ms = decodedImage.ntp_time_ms() - ntp_offset_;
// Convert remote timestamps to local time from ntp timestamps.
frameInfo->timing.encode_start_ms -= ntp_offset_;
frameInfo->timing.encode_finish_ms -= ntp_offset_;
@@ -98,19 +101,44 @@
frameInfo->timing.pacer_exit_ms -= ntp_offset_;
frameInfo->timing.network_timestamp_ms -= ntp_offset_;
frameInfo->timing.network2_timestamp_ms -= ntp_offset_;
- // TODO(ilnik): Report timing information here.
- // Capture time: decodedImage.ntp_time_ms() - ntp_offset
- // Encode start: frameInfo->timing.encode_start_ms
- // Encode finish: frameInfo->timing.encode_finish_ms
- // Packetization done: frameInfo->timing.packetization_finish_ms
- // Pacer exit: frameInfo->timing.pacer_exit_ms
- // Network timestamp: frameInfo->timing.network_timestamp_ms
- // Network2 timestamp: frameInfo->timing.network2_timestamp_ms
- // Receive start: frameInfo->timing.receive_start_ms
- // Receive finish: frameInfo->timing.receive_finish_ms
- // Decode start: frameInfo->decodeStartTimeMs
- // Decode finish: now_ms
- // Render time: frameInfo->renderTimeMs
+
+ int64_t sender_delta_ms = 0;
+ if (decodedImage.ntp_time_ms() < 0) {
+ // Sender clock is not estimated yet. Make sure that sender times are all
+ // negative to indicate that. Yet they still should be relatively correct.
+ sender_delta_ms =
+ std::max({capture_time_ms, frameInfo->timing.encode_start_ms,
+ frameInfo->timing.encode_finish_ms,
+ frameInfo->timing.packetization_finish_ms,
+ frameInfo->timing.pacer_exit_ms,
+ frameInfo->timing.network_timestamp_ms,
+ frameInfo->timing.network2_timestamp_ms}) +
+ 1;
+ }
+
+ TimingFrameInfo timing_frame_info;
+
+ timing_frame_info.capture_time_ms = capture_time_ms - sender_delta_ms;
+ timing_frame_info.encode_start_ms =
+ frameInfo->timing.encode_start_ms - sender_delta_ms;
+ timing_frame_info.encode_finish_ms =
+ frameInfo->timing.encode_finish_ms - sender_delta_ms;
+ timing_frame_info.packetization_finish_ms =
+ frameInfo->timing.packetization_finish_ms - sender_delta_ms;
+ timing_frame_info.pacer_exit_ms =
+ frameInfo->timing.pacer_exit_ms - sender_delta_ms;
+ timing_frame_info.network_timestamp_ms =
+ frameInfo->timing.network_timestamp_ms - sender_delta_ms;
+ timing_frame_info.network2_timestamp_ms =
+ frameInfo->timing.network2_timestamp_ms - sender_delta_ms;
+ timing_frame_info.receive_start_ms = frameInfo->timing.receive_start_ms;
+ timing_frame_info.receive_finish_ms = frameInfo->timing.receive_finish_ms;
+ timing_frame_info.decode_start_ms = frameInfo->decodeStartTimeMs;
+ timing_frame_info.decode_finish_ms = now_ms;
+ timing_frame_info.render_time_ms = frameInfo->renderTimeMs;
+ timing_frame_info.rtp_timestamp = decodedImage.timestamp();
+
+ _timing->SetTimingFrameInfo(timing_frame_info);
}
decodedImage.set_timestamp_us(
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
index 6ed472d..19d3813 100644
--- a/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -109,6 +109,8 @@
int min_playout_delay_ms,
int render_delay_ms) = 0;
+ virtual void OnTimingFrameInfoUpdated(const TimingFrameInfo& info) = 0;
+
protected:
virtual ~VCMReceiveStatisticsCallback() {}
};
diff --git a/webrtc/modules/video_coding/timing.cc b/webrtc/modules/video_coding/timing.cc
index 21f4a3d..340f2c1 100644
--- a/webrtc/modules/video_coding/timing.cc
+++ b/webrtc/modules/video_coding/timing.cc
@@ -21,21 +21,22 @@
namespace webrtc {
VCMTiming::VCMTiming(Clock* clock, VCMTiming* master_timing)
- : clock_(clock),
- master_(false),
- ts_extrapolator_(),
- codec_timer_(new VCMCodecTimer()),
- render_delay_ms_(kDefaultRenderDelayMs),
- min_playout_delay_ms_(0),
- max_playout_delay_ms_(10000),
- jitter_delay_ms_(0),
- current_delay_ms_(0),
- last_decode_ms_(0),
- prev_frame_timestamp_(0),
- num_decoded_frames_(0),
- num_delayed_decoded_frames_(0),
- first_decoded_frame_ms_(-1),
- sum_missed_render_deadline_ms_(0) {
+ : clock_(clock),
+ master_(false),
+ ts_extrapolator_(),
+ codec_timer_(new VCMCodecTimer()),
+ render_delay_ms_(kDefaultRenderDelayMs),
+ min_playout_delay_ms_(0),
+ max_playout_delay_ms_(10000),
+ jitter_delay_ms_(0),
+ current_delay_ms_(0),
+ last_decode_ms_(0),
+ prev_frame_timestamp_(0),
+ timing_frame_info_(),
+ num_decoded_frames_(0),
+ num_delayed_decoded_frames_(0),
+ first_decoded_frame_ms_(-1),
+ sum_missed_render_deadline_ms_(0) {
if (master_timing == NULL) {
master_ = true;
ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
@@ -304,4 +305,14 @@
return (num_decoded_frames_ > 0);
}
+void VCMTiming::SetTimingFrameInfo(const TimingFrameInfo& info) {
+ rtc::CritScope cs(&crit_sect_);
+ timing_frame_info_.emplace(info);
+}
+
+rtc::Optional<TimingFrameInfo> VCMTiming::GetTimingFrameInfo() {
+ rtc::CritScope cs(&crit_sect_);
+ return timing_frame_info_;
+}
+
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/timing.h b/webrtc/modules/video_coding/timing.h
index bfd6778..82252bf 100644
--- a/webrtc/modules/video_coding/timing.h
+++ b/webrtc/modules/video_coding/timing.h
@@ -102,6 +102,9 @@
int* min_playout_delay_ms,
int* render_delay_ms) const;
+ void SetTimingFrameInfo(const TimingFrameInfo& info);
+ rtc::Optional<TimingFrameInfo> GetTimingFrameInfo();
+
enum { kDefaultRenderDelayMs = 10 };
enum { kDelayMaxChangeMsPerS = 100 };
@@ -131,6 +134,7 @@
int current_delay_ms_ GUARDED_BY(crit_sect_);
int last_decode_ms_ GUARDED_BY(crit_sect_);
uint32_t prev_frame_timestamp_ GUARDED_BY(crit_sect_);
+ rtc::Optional<TimingFrameInfo> timing_frame_info_ GUARDED_BY(crit_sect_);
// Statistics.
size_t num_decoded_frames_ GUARDED_BY(crit_sect_);