Add estimatedPlayoutTimestamp to RTCInboundRTPStreamStats.
https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
Partial implementation: currently only populated when a/v sync is enabled.
Bug: webrtc:7065
Change-Id: I8595cc848d080d7c3bef152462a9becf0e5a2196
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/155621
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Reviewed-by: Oskar Sundbom <ossu@webrtc.org>
Reviewed-by: Henrik Boström <hbos@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#29581}
diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h
index f26c574..dd2eacd 100644
--- a/api/stats/rtcstats_objects.h
+++ b/api/stats/rtcstats_objects.h
@@ -446,6 +446,8 @@
RTCStatsMember<double> total_decode_time;
// https://henbos.github.io/webrtc-provisional-stats/#dom-rtcinboundrtpstreamstats-contenttype
RTCStatsMember<std::string> content_type;
+ // TODO(asapersson): Currently only populated if audio/video sync is enabled.
+ RTCStatsMember<double> estimated_playout_timestamp;
// TODO(hbos): This is only implemented for video; implement it for audio as
// well.
RTCStatsMember<std::string> decoder_implementation;
diff --git a/audio/audio_receive_stream.cc b/audio/audio_receive_stream.cc
index 190693c..c6291c7 100644
--- a/audio/audio_receive_stream.cc
+++ b/audio/audio_receive_stream.cc
@@ -206,6 +206,9 @@
stats.audio_level = channel_receive_->GetSpeechOutputLevelFullRange();
stats.total_output_energy = channel_receive_->GetTotalOutputEnergy();
stats.total_output_duration = channel_receive_->GetTotalOutputDuration();
+ stats.estimated_playout_ntp_timestamp_ms =
+ channel_receive_->GetCurrentEstimatedPlayoutNtpTimestampMs(
+ rtc::TimeMillis());
// Get jitter buffer and total delay (alg + jitter + playout) stats.
auto ns = channel_receive_->GetNetworkStatistics();
@@ -310,9 +313,18 @@
return info;
}
-uint32_t AudioReceiveStream::GetPlayoutTimestamp() const {
+bool AudioReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const {
// Called on video capture thread.
- return channel_receive_->GetPlayoutTimestamp();
+ return channel_receive_->GetPlayoutRtpTimestamp(rtp_timestamp, time_ms);
+}
+
+void AudioReceiveStream::SetEstimatedPlayoutNtpTimestampMs(
+ int64_t ntp_timestamp_ms,
+ int64_t time_ms) {
+ // Called on video capture thread.
+ channel_receive_->SetEstimatedPlayoutNtpTimestampMs(ntp_timestamp_ms,
+ time_ms);
}
void AudioReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
diff --git a/audio/audio_receive_stream.h b/audio/audio_receive_stream.h
index 86301a3..26bcf63 100644
--- a/audio/audio_receive_stream.h
+++ b/audio/audio_receive_stream.h
@@ -87,7 +87,10 @@
// Syncable
int id() const override;
absl::optional<Syncable::Info> GetInfo() const override;
- uint32_t GetPlayoutTimestamp() const override;
+ bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const override;
+ void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
+ int64_t time_ms) override;
void SetMinimumPlayoutDelay(int delay_ms) override;
void AssociateSendStream(AudioSendStream* send_stream);
diff --git a/audio/audio_receive_stream_unittest.cc b/audio/audio_receive_stream_unittest.cc
index ae6605c..473b387 100644
--- a/audio/audio_receive_stream_unittest.cc
+++ b/audio/audio_receive_stream_unittest.cc
@@ -62,6 +62,7 @@
const unsigned int kSpeechOutputLevel = 99;
const double kTotalOutputEnergy = 0.25;
const double kTotalOutputDuration = 0.5;
+const int64_t kPlayoutNtpTimestampMs = 5678;
const CallReceiveStatistics kCallStats = {678, 234, -12, 567, 78, 890, 123};
const std::pair<int, SdpAudioFormat> kReceiveCodec = {
@@ -145,6 +146,8 @@
.WillOnce(Return(kAudioDecodeStats));
EXPECT_CALL(*channel_receive_, GetReceiveCodec())
.WillOnce(Return(kReceiveCodec));
+ EXPECT_CALL(*channel_receive_, GetCurrentEstimatedPlayoutNtpTimestampMs(_))
+ .WillOnce(Return(kPlayoutNtpTimestampMs));
}
private:
@@ -315,6 +318,7 @@
stats.decoding_muted_output);
EXPECT_EQ(kCallStats.capture_start_ntp_time_ms_,
stats.capture_start_ntp_time_ms);
+ EXPECT_EQ(kPlayoutNtpTimestampMs, stats.estimated_playout_ntp_timestamp_ms);
}
TEST(AudioReceiveStreamTest, SetGain) {
diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc
index fa1463a..7fe41a1 100644
--- a/audio/channel_receive.cc
+++ b/audio/channel_receive.cc
@@ -141,7 +141,12 @@
// Audio+Video Sync.
uint32_t GetDelayEstimate() const override;
void SetMinimumPlayoutDelay(int delayMs) override;
- uint32_t GetPlayoutTimestamp() const override;
+ bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const override;
+ void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
+ int64_t time_ms) override;
+ absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
+ int64_t now_ms) const override;
// Audio quality.
bool SetBaseMinimumPlayoutDelayMs(int delay_ms) override;
@@ -178,7 +183,7 @@
size_t packet_length,
const RTPHeader& header);
int ResendPackets(const uint16_t* sequence_numbers, int length);
- void UpdatePlayoutTimestamp(bool rtcp);
+ void UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms);
int GetRtpTimestampRateHz() const;
int64_t GetRTT() const;
@@ -242,7 +247,13 @@
rtc::CriticalSection video_sync_lock_;
uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_);
+ absl::optional<int64_t> playout_timestamp_rtp_time_ms_
+ RTC_GUARDED_BY(video_sync_lock_);
uint32_t playout_delay_ms_ RTC_GUARDED_BY(video_sync_lock_);
+ absl::optional<int64_t> playout_timestamp_ntp_
+ RTC_GUARDED_BY(video_sync_lock_);
+ absl::optional<int64_t> playout_timestamp_ntp_time_ms_
+ RTC_GUARDED_BY(video_sync_lock_);
rtc::CriticalSection ts_stats_lock_;
@@ -573,7 +584,7 @@
}
// Store playout timestamp for the received RTP packet
- UpdatePlayoutTimestamp(false);
+ UpdatePlayoutTimestamp(false, now_ms);
const auto& it = payload_type_frequencies_.find(packet.PayloadType());
if (it == payload_type_frequencies_.end())
@@ -638,7 +649,7 @@
// May be called on either worker thread or network thread.
void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
// Store playout timestamp for the received RTCP packet
- UpdatePlayoutTimestamp(true);
+ UpdatePlayoutTimestamp(true, rtc::TimeMillis());
// Deliver RTCP packet to RTP/RTCP module for parsing
_rtpRtcpModule->IncomingRtcpPacket(data, length);
@@ -806,14 +817,38 @@
}
}
-uint32_t ChannelReceive::GetPlayoutTimestamp() const {
+bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const {
RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
{
rtc::CritScope lock(&video_sync_lock_);
- return playout_timestamp_rtp_;
+ if (!playout_timestamp_rtp_time_ms_)
+ return false;
+ *rtp_timestamp = playout_timestamp_rtp_;
+ *time_ms = playout_timestamp_rtp_time_ms_.value();
+ return true;
}
}
+void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
+ int64_t time_ms) {
+ RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_);
+ rtc::CritScope lock(&video_sync_lock_);
+ playout_timestamp_ntp_ = ntp_timestamp_ms;
+ playout_timestamp_ntp_time_ms_ = time_ms;
+}
+
+absl::optional<int64_t>
+ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const {
+ RTC_DCHECK(worker_thread_checker_.IsCurrent());
+ rtc::CritScope lock(&video_sync_lock_);
+ if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_)
+ return absl::nullopt;
+
+ int64_t elapsed_ms = now_ms - *playout_timestamp_ntp_time_ms_;
+ return *playout_timestamp_ntp_ + elapsed_ms;
+}
+
bool ChannelReceive::SetBaseMinimumPlayoutDelayMs(int delay_ms) {
return acm_receiver_.SetBaseMinimumDelayMs(delay_ms);
}
@@ -841,7 +876,7 @@
return info;
}
-void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp) {
+void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) {
jitter_buffer_playout_timestamp_ = acm_receiver_.GetPlayoutTimestamp();
if (!jitter_buffer_playout_timestamp_) {
@@ -868,6 +903,7 @@
rtc::CritScope lock(&video_sync_lock_);
if (!rtcp) {
playout_timestamp_rtp_ = playout_timestamp;
+ playout_timestamp_rtp_time_ms_ = now_ms;
}
playout_delay_ms_ = delay_ms;
}
diff --git a/audio/channel_receive.h b/audio/channel_receive.h
index 5f71ea3..fb79dc2 100644
--- a/audio/channel_receive.h
+++ b/audio/channel_receive.h
@@ -105,7 +105,12 @@
// Audio+Video Sync.
virtual uint32_t GetDelayEstimate() const = 0;
virtual void SetMinimumPlayoutDelay(int delay_ms) = 0;
- virtual uint32_t GetPlayoutTimestamp() const = 0;
+ virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const = 0;
+ virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
+ int64_t time_ms) = 0;
+ virtual absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
+ int64_t now_ms) const = 0;
// Audio quality.
// Base minimum delay sets lower bound on minimum delay value which
diff --git a/audio/mock_voe_channel_proxy.h b/audio/mock_voe_channel_proxy.h
index e666bf2..d61bc89 100644
--- a/audio/mock_voe_channel_proxy.h
+++ b/audio/mock_voe_channel_proxy.h
@@ -49,7 +49,12 @@
MOCK_CONST_METHOD0(PreferredSampleRate, int());
MOCK_METHOD1(SetAssociatedSendChannel,
void(const voe::ChannelSendInterface* send_channel));
- MOCK_CONST_METHOD0(GetPlayoutTimestamp, uint32_t());
+ MOCK_CONST_METHOD2(GetPlayoutRtpTimestamp,
+ bool(uint32_t* rtp_timestamp, int64_t* time_ms));
+ MOCK_METHOD2(SetEstimatedPlayoutNtpTimestampMs,
+ void(int64_t ntp_timestamp_ms, int64_t time_ms));
+ MOCK_CONST_METHOD1(GetCurrentEstimatedPlayoutNtpTimestampMs,
+ absl::optional<int64_t>(int64_t now_ms));
MOCK_CONST_METHOD0(GetSyncInfo, absl::optional<Syncable::Info>());
MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms));
MOCK_METHOD1(SetBaseMinimumPlayoutDelayMs, bool(int delay_ms));
diff --git a/call/audio_receive_stream.h b/call/audio_receive_stream.h
index 1f8ad10..55c1af7 100644
--- a/call/audio_receive_stream.h
+++ b/call/audio_receive_stream.h
@@ -87,6 +87,8 @@
double relative_packet_arrival_delay_seconds = 0.0;
int32_t interruption_count = 0;
int32_t total_interruption_duration_ms = 0;
+ // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
+ absl::optional<int64_t> estimated_playout_ntp_timestamp_ms;
};
struct Config {
diff --git a/call/syncable.h b/call/syncable.h
index a914793..067e01c 100644
--- a/call/syncable.h
+++ b/call/syncable.h
@@ -35,8 +35,11 @@
virtual int id() const = 0;
virtual absl::optional<Info> GetInfo() const = 0;
- virtual uint32_t GetPlayoutTimestamp() const = 0;
+ virtual bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const = 0;
virtual void SetMinimumPlayoutDelay(int delay_ms) = 0;
+ virtual void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
+ int64_t time_ms) = 0;
};
} // namespace webrtc
diff --git a/call/video_receive_stream.h b/call/video_receive_stream.h
index 6e08738..cff8126 100644
--- a/call/video_receive_stream.h
+++ b/call/video_receive_stream.h
@@ -110,6 +110,8 @@
VideoContentType content_type = VideoContentType::UNSPECIFIED;
+ // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
+ absl::optional<int64_t> estimated_playout_ntp_timestamp_ms;
int sync_offset_ms = std::numeric_limits<int>::max();
uint32_t ssrc = 0;
diff --git a/media/base/media_channel.h b/media/base/media_channel.h
index 3450c44..582d29c 100644
--- a/media/base/media_channel.h
+++ b/media/base/media_channel.h
@@ -460,6 +460,8 @@
// local clock when it was received - not the RTP timestamp of that packet.
// https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-lastpacketreceivedtimestamp
absl::optional<int64_t> last_packet_received_timestamp_ms;
+ // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-estimatedplayouttimestamp
+ absl::optional<int64_t> estimated_playout_ntp_timestamp_ms;
std::string codec_name;
absl::optional<int> codec_payload_type;
std::vector<SsrcReceiverInfo> local_stats;
diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc
index 7bd7b49..9ea80cc 100644
--- a/media/engine/webrtc_video_engine.cc
+++ b/media/engine/webrtc_video_engine.cc
@@ -2829,6 +2829,8 @@
info.total_decode_time_ms = stats.total_decode_time_ms;
info.last_packet_received_timestamp_ms =
stats.rtp_stats.last_packet_received_timestamp_ms;
+ info.estimated_playout_ntp_timestamp_ms =
+ stats.estimated_playout_ntp_timestamp_ms;
info.first_frame_received_to_decoded_ms =
stats.first_frame_received_to_decoded_ms;
info.interframe_delay_max_ms = stats.interframe_delay_max_ms;
diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc
index ee8e5f0..201503a 100644
--- a/media/engine/webrtc_voice_engine.cc
+++ b/media/engine/webrtc_voice_engine.cc
@@ -2271,6 +2271,8 @@
rinfo.capture_start_ntp_time_ms = stats.capture_start_ntp_time_ms;
rinfo.last_packet_received_timestamp_ms =
stats.last_packet_received_timestamp_ms;
+ rinfo.estimated_playout_ntp_timestamp_ms =
+ stats.estimated_playout_ntp_timestamp_ms;
rinfo.jitter_buffer_flushes = stats.jitter_buffer_flushes;
rinfo.relative_packet_arrival_delay_seconds =
stats.relative_packet_arrival_delay_seconds;
diff --git a/pc/rtc_stats_collector.cc b/pc/rtc_stats_collector.cc
index 9d6cf77..ab12c65 100644
--- a/pc/rtc_stats_collector.cc
+++ b/pc/rtc_stats_collector.cc
@@ -285,6 +285,10 @@
*voice_receiver_info.last_packet_received_timestamp_ms) /
rtc::kNumMillisecsPerSec;
}
+ if (voice_receiver_info.estimated_playout_ntp_timestamp_ms) {
+ inbound_audio->estimated_playout_timestamp = static_cast<double>(
+ *voice_receiver_info.estimated_playout_ntp_timestamp_ms);
+ }
inbound_audio->fec_packets_received =
voice_receiver_info.fec_packets_received;
inbound_audio->fec_packets_discarded =
@@ -322,6 +326,10 @@
*video_receiver_info.last_packet_received_timestamp_ms) /
rtc::kNumMillisecsPerSec;
}
+ if (video_receiver_info.estimated_playout_ntp_timestamp_ms) {
+ inbound_video->estimated_playout_timestamp = static_cast<double>(
+ *video_receiver_info.estimated_playout_ntp_timestamp_ms);
+ }
// TODO(https://crbug.com/webrtc/10529): When info's |content_info| is
// optional, support the "unspecified" value.
if (video_receiver_info.content_type == VideoContentType::SCREENSHARE)
diff --git a/pc/rtc_stats_collector_unittest.cc b/pc/rtc_stats_collector_unittest.cc
index 86f8ba9..ce2d54e 100644
--- a/pc/rtc_stats_collector_unittest.cc
+++ b/pc/rtc_stats_collector_unittest.cc
@@ -1789,6 +1789,8 @@
// Set previously undefined values and "GetStats" again.
voice_media_info.receivers[0].last_packet_received_timestamp_ms = 3000;
expected_audio.last_packet_received_timestamp = 3.0;
+ voice_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = 4567;
+ expected_audio.estimated_playout_timestamp = 4567;
voice_media_channel->SetStats(voice_media_info);
report = stats_->GetFreshStatsReport();
@@ -1824,6 +1826,8 @@
video_media_info.receivers[0].last_packet_received_timestamp_ms =
absl::nullopt;
video_media_info.receivers[0].content_type = VideoContentType::UNSPECIFIED;
+ video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms =
+ absl::nullopt;
video_media_info.receivers[0].decoder_implementation_name = "";
RtpCodecParameters codec_parameters;
@@ -1872,11 +1876,13 @@
// Set previously undefined values and "GetStats" again.
video_media_info.receivers[0].qp_sum = 9;
- video_media_info.receivers[0].last_packet_received_timestamp_ms = 1000;
expected_video.qp_sum = 9;
+ video_media_info.receivers[0].last_packet_received_timestamp_ms = 1000;
expected_video.last_packet_received_timestamp = 1.0;
video_media_info.receivers[0].content_type = VideoContentType::SCREENSHARE;
expected_video.content_type = "screenshare";
+ video_media_info.receivers[0].estimated_playout_ntp_timestamp_ms = 1234;
+ expected_video.estimated_playout_timestamp = 1234;
video_media_info.receivers[0].decoder_implementation_name = "libfoodecoder";
expected_video.decoder_implementation = "libfoodecoder";
video_media_channel->SetStats(video_media_info);
diff --git a/pc/rtc_stats_integrationtest.cc b/pc/rtc_stats_integrationtest.cc
index 0d51af0..9000ff9 100644
--- a/pc/rtc_stats_integrationtest.cc
+++ b/pc/rtc_stats_integrationtest.cc
@@ -821,6 +821,9 @@
verifier.TestMemberIsUndefined(inbound_stream.burst_discard_rate);
verifier.TestMemberIsUndefined(inbound_stream.gap_loss_rate);
verifier.TestMemberIsUndefined(inbound_stream.gap_discard_rate);
+ // Test runtime too short to get an estimate (at least two RTCP sender
+ // reports need to be received).
+ verifier.MarkMemberTested(inbound_stream.estimated_playout_timestamp, true);
if (inbound_stream.media_type.is_defined() &&
*inbound_stream.media_type == "video") {
verifier.TestMemberIsDefined(inbound_stream.frames_decoded);
diff --git a/stats/rtcstats_objects.cc b/stats/rtcstats_objects.cc
index 99594a8..b1a1a23 100644
--- a/stats/rtcstats_objects.cc
+++ b/stats/rtcstats_objects.cc
@@ -617,6 +617,7 @@
&key_frames_decoded,
&total_decode_time,
&content_type,
+ &estimated_playout_timestamp,
&decoder_implementation)
// clang-format on
@@ -650,6 +651,7 @@
key_frames_decoded("keyFramesDecoded"),
total_decode_time("totalDecodeTime"),
content_type("contentType"),
+ estimated_playout_timestamp("estimatedPlayoutTimestamp"),
decoder_implementation("decoderImplementation") {}
RTCInboundRTPStreamStats::RTCInboundRTPStreamStats(
@@ -678,6 +680,7 @@
key_frames_decoded(other.key_frames_decoded),
total_decode_time(other.total_decode_time),
content_type(other.content_type),
+ estimated_playout_timestamp(other.estimated_playout_timestamp),
decoder_implementation(other.decoder_implementation) {}
RTCInboundRTPStreamStats::~RTCInboundRTPStreamStats() {}
diff --git a/video/receive_statistics_proxy.cc b/video/receive_statistics_proxy.cc
index d8bde94..657e98d 100644
--- a/video/receive_statistics_proxy.cc
+++ b/video/receive_statistics_proxy.cc
@@ -611,6 +611,17 @@
}
}
+absl::optional<int64_t>
+ReceiveStatisticsProxy::GetCurrentEstimatedPlayoutNtpTimestampMs(
+ int64_t now_ms) const {
+ if (!last_estimated_playout_ntp_timestamp_ms_ ||
+ !last_estimated_playout_time_ms_) {
+ return absl::nullopt;
+ }
+ int64_t elapsed_ms = now_ms - *last_estimated_playout_time_ms_;
+ return *last_estimated_playout_ntp_timestamp_ms_ + elapsed_ms;
+}
+
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
rtc::CritScope lock(&crit_);
// Get current frame rates here, as only updating them on new frames prevents
@@ -637,6 +648,8 @@
static_cast<double>(current_delay_counter_.Sum(1).value_or(0)) /
rtc::kNumMillisecsPerSec;
stats_.jitter_buffer_emitted_count = current_delay_counter_.NumSamples();
+ stats_.estimated_playout_ntp_timestamp_ms =
+ GetCurrentEstimatedPlayoutNtpTimestampMs(now_ms);
return stats_;
}
@@ -813,11 +826,14 @@
QualitySample();
}
-void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t sync_offset_ms,
+void ReceiveStatisticsProxy::OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
+ int64_t sync_offset_ms,
double estimated_freq_khz) {
rtc::CritScope lock(&crit_);
sync_offset_counter_.Add(std::abs(sync_offset_ms));
stats_.sync_offset_ms = sync_offset_ms;
+ last_estimated_playout_ntp_timestamp_ms_ = video_playout_ntp_ms;
+ last_estimated_playout_time_ms_ = clock_->TimeInMilliseconds();
const double kMaxFreqKhz = 10000.0;
int offset_khz = kMaxFreqKhz;
diff --git a/video/receive_statistics_proxy.h b/video/receive_statistics_proxy.h
index 40608a8..02043d6 100644
--- a/video/receive_statistics_proxy.h
+++ b/video/receive_statistics_proxy.h
@@ -52,7 +52,9 @@
absl::optional<uint8_t> qp,
int32_t decode_time_ms,
VideoContentType content_type);
- void OnSyncOffsetUpdated(int64_t sync_offset_ms, double estimated_freq_khz);
+ void OnSyncOffsetUpdated(int64_t video_playout_ntp_ms,
+ int64_t sync_offset_ms,
+ double estimated_freq_khz);
void OnRenderedFrame(const VideoFrame& frame);
void OnIncomingPayloadType(int payload_type);
void OnDecoderImplementationName(const char* implementation_name);
@@ -133,6 +135,9 @@
int decode_time_ms) const
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ absl::optional<int64_t> GetCurrentEstimatedPlayoutNtpTimestampMs(
+ int64_t now_ms) const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
Clock* const clock_;
// Ownership of this object lies with the owner of the ReceiveStatisticsProxy
// instance. Lifetime is guaranteed to outlive |this|.
@@ -187,6 +192,10 @@
mutable rtc::MovingMaxCounter<TimingFrameInfo> timing_frame_info_counter_
RTC_GUARDED_BY(&crit_);
absl::optional<int> num_unique_frames_ RTC_GUARDED_BY(crit_);
+ absl::optional<int64_t> last_estimated_playout_ntp_timestamp_ms_
+ RTC_GUARDED_BY(&crit_);
+ absl::optional<int64_t> last_estimated_playout_time_ms_
+ RTC_GUARDED_BY(&crit_);
rtc::ThreadChecker decode_thread_;
rtc::ThreadChecker network_thread_;
rtc::ThreadChecker main_thread_;
diff --git a/video/receive_statistics_proxy_unittest.cc b/video/receive_statistics_proxy_unittest.cc
index 66adb83..eb7c865 100644
--- a/video/receive_statistics_proxy_unittest.cc
+++ b/video/receive_statistics_proxy_unittest.cc
@@ -598,20 +598,40 @@
1, metrics::NumEvents("WebRTC.Video.ReceivedPacketsLostInPercent", 10));
}
+TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsPlayoutTimestamp) {
+ const int64_t kVideoNtpMs = 21;
+ const int64_t kSyncOffsetMs = 22;
+ const double kFreqKhz = 90.0;
+ EXPECT_EQ(absl::nullopt,
+ statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
+ EXPECT_EQ(kVideoNtpMs,
+ statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
+ fake_clock_.AdvanceTimeMilliseconds(13);
+ EXPECT_EQ(kVideoNtpMs + 13,
+ statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
+ fake_clock_.AdvanceTimeMilliseconds(5);
+ EXPECT_EQ(kVideoNtpMs + 13 + 5,
+ statistics_proxy_->GetStats().estimated_playout_ntp_timestamp_ms);
+}
+
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsAvSyncOffset) {
+ const int64_t kVideoNtpMs = 21;
const int64_t kSyncOffsetMs = 22;
const double kFreqKhz = 90.0;
EXPECT_EQ(std::numeric_limits<int>::max(),
statistics_proxy_->GetStats().sync_offset_ms);
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
EXPECT_EQ(kSyncOffsetMs, statistics_proxy_->GetStats().sync_offset_ms);
}
TEST_F(ReceiveStatisticsProxyTest, AvSyncOffsetHistogramIsUpdated) {
+ const int64_t kVideoNtpMs = 21;
const int64_t kSyncOffsetMs = 22;
const double kFreqKhz = 90.0;
for (int i = 0; i < kMinRequiredSamples; ++i)
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
+ kFreqKhz);
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.AVSyncOffsetInMs"));
@@ -620,18 +640,23 @@
}
TEST_F(ReceiveStatisticsProxyTest, RtpToNtpFrequencyOffsetHistogramIsUpdated) {
+ const int64_t kVideoNtpMs = 21;
const int64_t kSyncOffsetMs = 22;
const double kFreqKhz = 90.0;
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz + 2.2);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
+ kFreqKhz + 2.2);
fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs);
// Process interval passed, max diff: 2.
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz + 1.1);
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz - 4.2);
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz - 0.9);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
+ kFreqKhz + 1.1);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
+ kFreqKhz - 4.2);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs,
+ kFreqKhz - 0.9);
fake_clock_.AdvanceTimeMilliseconds(kFreqOffsetProcessIntervalInMs);
// Process interval passed, max diff: 4.
- statistics_proxy_->OnSyncOffsetUpdated(kSyncOffsetMs, kFreqKhz);
+ statistics_proxy_->OnSyncOffsetUpdated(kVideoNtpMs, kSyncOffsetMs, kFreqKhz);
statistics_proxy_->UpdateHistograms(absl::nullopt, StreamDataCounters(),
nullptr);
// Average reported: (2 + 4) / 2 = 3.
diff --git a/video/rtp_streams_synchronizer.cc b/video/rtp_streams_synchronizer.cc
index 8d0d4ac..156ebbb 100644
--- a/video/rtp_streams_synchronizer.cc
+++ b/video/rtp_streams_synchronizer.cc
@@ -118,9 +118,13 @@
syncable_video_->SetMinimumPlayoutDelay(target_video_delay_ms);
}
+// TODO(https://bugs.webrtc.org/7065): Move RtpToNtpEstimator out of
+// RtpStreamsSynchronizer and into respective receive stream to always populate
+// the estimated playout timestamp.
bool RtpStreamsSynchronizer::GetStreamSyncOffsetInMs(
- uint32_t timestamp,
+ uint32_t rtp_timestamp,
int64_t render_time_ms,
+ int64_t* video_playout_ntp_ms,
int64_t* stream_offset_ms,
double* estimated_freq_khz) const {
rtc::CritScope lock(&crit_);
@@ -128,23 +132,37 @@
return false;
}
- uint32_t playout_timestamp = syncable_audio_->GetPlayoutTimestamp();
+ uint32_t audio_rtp_timestamp;
+ int64_t time_ms;
+ if (!syncable_audio_->GetPlayoutRtpTimestamp(&audio_rtp_timestamp,
+ &time_ms)) {
+ return false;
+ }
int64_t latest_audio_ntp;
- if (!audio_measurement_.rtp_to_ntp.Estimate(playout_timestamp,
+ if (!audio_measurement_.rtp_to_ntp.Estimate(audio_rtp_timestamp,
&latest_audio_ntp)) {
return false;
}
+ syncable_audio_->SetEstimatedPlayoutNtpTimestampMs(latest_audio_ntp, time_ms);
+
int64_t latest_video_ntp;
- if (!video_measurement_.rtp_to_ntp.Estimate(timestamp, &latest_video_ntp)) {
+ if (!video_measurement_.rtp_to_ntp.Estimate(rtp_timestamp,
+ &latest_video_ntp)) {
return false;
}
- int64_t time_to_render_ms = render_time_ms - rtc::TimeMillis();
- if (time_to_render_ms > 0)
- latest_video_ntp += time_to_render_ms;
+ // Current audio ntp.
+ int64_t now_ms = rtc::TimeMillis();
+ latest_audio_ntp += (now_ms - time_ms);
+ // Remove video playout delay.
+ int64_t time_to_render_ms = render_time_ms - now_ms;
+ if (time_to_render_ms > 0)
+ latest_video_ntp -= time_to_render_ms;
+
+ *video_playout_ntp_ms = latest_video_ntp;
*stream_offset_ms = latest_audio_ntp - latest_video_ntp;
*estimated_freq_khz = video_measurement_.rtp_to_ntp.params()->frequency_khz;
return true;
diff --git a/video/rtp_streams_synchronizer.h b/video/rtp_streams_synchronizer.h
index 0778fc5..b6e5e61 100644
--- a/video/rtp_streams_synchronizer.h
+++ b/video/rtp_streams_synchronizer.h
@@ -36,12 +36,14 @@
int64_t TimeUntilNextProcess() override;
void Process() override;
- // Gets the sync offset between the current played out audio frame and the
- // video |frame|. Returns true on success, false otherwise.
- // The estimated frequency is the frequency used in the RTP to NTP timestamp
+ // Gets the estimated playout NTP timestamp for the video frame with
+ // |rtp_timestamp| and the sync offset between the current played out audio
+ // frame and the video frame. Returns true on success, false otherwise.
+ // The |estimated_freq_khz| is the frequency used in the RTP to NTP timestamp
// conversion.
- bool GetStreamSyncOffsetInMs(uint32_t timestamp,
+ bool GetStreamSyncOffsetInMs(uint32_t rtp_timestamp,
int64_t render_time_ms,
+ int64_t* video_playout_ntp_ms,
int64_t* stream_offset_ms,
double* estimated_freq_khz) const;
diff --git a/video/video_receive_stream.cc b/video/video_receive_stream.cc
index 09a2796..a60bb07 100644
--- a/video/video_receive_stream.cc
+++ b/video/video_receive_stream.cc
@@ -502,6 +502,7 @@
// TODO(tommi): This method grabs a lock 6 times.
void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) {
+ int64_t video_playout_ntp_ms;
int64_t sync_offset_ms;
double estimated_freq_khz;
// TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the
@@ -510,9 +511,10 @@
// succeeds most of the time, which leads to grabbing a fourth lock.
if (rtp_stream_sync_.GetStreamSyncOffsetInMs(
video_frame.timestamp(), video_frame.render_time_ms(),
- &sync_offset_ms, &estimated_freq_khz)) {
+ &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) {
// TODO(tommi): OnSyncOffsetUpdated grabs a lock.
- stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms, estimated_freq_khz);
+ stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms,
+ estimated_freq_khz);
}
source_tracker_.OnFrameDelivered(video_frame.packet_infos());
@@ -603,11 +605,18 @@
return info;
}
-uint32_t VideoReceiveStream::GetPlayoutTimestamp() const {
+bool VideoReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const {
RTC_NOTREACHED();
return 0;
}
+void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs(
+ int64_t ntp_timestamp_ms,
+ int64_t time_ms) {
+ RTC_NOTREACHED();
+}
+
void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
RTC_DCHECK_RUN_ON(&module_process_sequence_checker_);
rtc::CritScope cs(&playout_delay_lock_);
diff --git a/video/video_receive_stream.h b/video/video_receive_stream.h
index 0d0c66a..e72c3b1 100644
--- a/video/video_receive_stream.h
+++ b/video/video_receive_stream.h
@@ -124,7 +124,10 @@
// Implements Syncable.
int id() const override;
absl::optional<Syncable::Info> GetInfo() const override;
- uint32_t GetPlayoutTimestamp() const override;
+ bool GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp,
+ int64_t* time_ms) const override;
+ void SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms,
+ int64_t time_ms) override;
// SetMinimumPlayoutDelay is only called by A/V sync.
void SetMinimumPlayoutDelay(int delay_ms) override;