Make the new jitter buffer the default jitter buffer.
This CL contains only the changes necessary to make the switch to the new jitter
buffer, clean up will be done in follow up CLs.
In this CL:
- Removed the WebRTC-NewVideoJitterBuffer experiment and made the
new video jitter buffer the default one.
- Moved WebRTC.Video.KeyFramesReceivedInPermille and
WebRTC.Video.JitterBufferDelayInMs to the ReceiveStatisticsProxy.
BUG=webrtc:5514
Review-Url: https://codereview.webrtc.org/2627463004
Cr-Commit-Position: refs/heads/master@{#16114}
diff --git a/webrtc/modules/video_coding/frame_buffer2.cc b/webrtc/modules/video_coding/frame_buffer2.cc
index 279c613..db4928c 100644
--- a/webrtc/modules/video_coding/frame_buffer2.cc
+++ b/webrtc/modules/video_coding/frame_buffer2.cc
@@ -16,6 +16,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
+#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/modules/video_coding/jitter_estimator.h"
#include "webrtc/modules/video_coding/timing.h"
#include "webrtc/system_wrappers/include/clock.h"
@@ -34,7 +35,8 @@
FrameBuffer::FrameBuffer(Clock* clock,
VCMJitterEstimator* jitter_estimator,
- VCMTiming* timing)
+ VCMTiming* timing,
+ VCMReceiveStatisticsCallback* stats_callback)
: clock_(clock),
new_countinuous_frame_event_(false, false),
jitter_estimator_(jitter_estimator),
@@ -45,11 +47,10 @@
num_frames_history_(0),
num_frames_buffered_(0),
stopped_(false),
- protection_mode_(kProtectionNack) {}
+ protection_mode_(kProtectionNack),
+ stats_callback_(stats_callback) {}
-FrameBuffer::~FrameBuffer() {
- UpdateHistograms();
-}
+FrameBuffer::~FrameBuffer() {}
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
int64_t max_wait_time_ms,
@@ -162,9 +163,8 @@
rtc::CritScope lock(&crit_);
RTC_DCHECK(frame);
- ++num_total_frames_;
- if (frame->num_references == 0)
- ++num_key_frames_;
+ if (stats_callback_)
+ stats_callback_->OnCompleteFrame(frame->num_references == 0, frame->size());
FrameKey key(frame->picture_id, frame->spatial_layer);
int last_continuous_picture_id =
@@ -365,28 +365,22 @@
}
void FrameBuffer::UpdateJitterDelay() {
- int unused;
- int delay;
- timing_->GetTimings(&unused, &unused, &unused, &unused, &delay, &unused,
- &unused);
+ if (!stats_callback_)
+ return;
- accumulated_delay_ += delay;
- ++accumulated_delay_samples_;
-}
-
-void FrameBuffer::UpdateHistograms() const {
- rtc::CritScope lock(&crit_);
- if (num_total_frames_ > 0) {
- int key_frames_permille = (static_cast<float>(num_key_frames_) * 1000.0f /
- static_cast<float>(num_total_frames_) +
- 0.5f);
- RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
- key_frames_permille);
- }
-
- if (accumulated_delay_samples_ > 0) {
- RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
- accumulated_delay_ / accumulated_delay_samples_);
+ int decode_ms;
+ int max_decode_ms;
+ int current_delay_ms;
+ int target_delay_ms;
+ int jitter_buffer_ms;
+ int min_playout_delay_ms;
+ int render_delay_ms;
+ if (timing_->GetTimings(&decode_ms, &max_decode_ms, ¤t_delay_ms,
+ &target_delay_ms, &jitter_buffer_ms,
+ &min_playout_delay_ms, &render_delay_ms)) {
+ stats_callback_->OnFrameBufferTimingsUpdated(
+ decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
+ jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
}
}
diff --git a/webrtc/modules/video_coding/frame_buffer2.h b/webrtc/modules/video_coding/frame_buffer2.h
index b41ef2f..d954bf2 100644
--- a/webrtc/modules/video_coding/frame_buffer2.h
+++ b/webrtc/modules/video_coding/frame_buffer2.h
@@ -28,6 +28,7 @@
namespace webrtc {
class Clock;
+class VCMReceiveStatisticsCallback;
class VCMJitterEstimator;
class VCMTiming;
@@ -39,7 +40,8 @@
FrameBuffer(Clock* clock,
VCMJitterEstimator* jitter_estimator,
- VCMTiming* timing);
+ VCMTiming* timing,
+ VCMReceiveStatisticsCallback* stats_proxy);
virtual ~FrameBuffer();
@@ -141,8 +143,6 @@
void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_);
- void UpdateHistograms() const;
-
FrameMap frames_ GUARDED_BY(crit_);
rtc::CriticalSection crit_;
@@ -157,16 +157,9 @@
int num_frames_buffered_ GUARDED_BY(crit_);
bool stopped_ GUARDED_BY(crit_);
VCMVideoProtection protection_mode_ GUARDED_BY(crit_);
+ VCMReceiveStatisticsCallback* const stats_callback_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer);
-
- // For WebRTC.Video.JitterBufferDelayInMs metric.
- int64_t accumulated_delay_ = 0;
- int64_t accumulated_delay_samples_ = 0;
-
- // For WebRTC.Video.KeyFramesReceivedInPermille metric.
- int64_t num_total_frames_ = 0;
- int64_t num_key_frames_ = 0;
};
} // namespace video_coding
diff --git a/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
index 6079bb9..13f86c5 100644
--- a/webrtc/modules/video_coding/frame_buffer2_unittest.cc
+++ b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -25,6 +25,9 @@
#include "webrtc/test/gmock.h"
#include "webrtc/test/gtest.h"
+using testing::_;
+using testing::Return;
+
namespace webrtc {
namespace video_coding {
@@ -54,6 +57,16 @@
return std::max<int>(0, render_time_ms - now_ms - kDecodeTime);
}
+ bool GetTimings(int* decode_ms,
+ int* max_decode_ms,
+ int* current_delay_ms,
+ int* target_delay_ms,
+ int* jitter_buffer_ms,
+ int* min_playout_delay_ms,
+ int* render_delay_ms) const override {
+ return true;
+ }
+
private:
static constexpr int kDelayMs = 50;
static constexpr int kDecodeTime = kDelayMs / 2;
@@ -82,6 +95,27 @@
int64_t ReceivedTime() const override { return 0; }
int64_t RenderTime() const override { return _renderTimeMs; }
+
+ // In EncodedImage |_length| is used to descibe its size and |_size| to
+ // describe its capacity.
+ void SetSize(int size) { _length = size; }
+};
+
+class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
+ public:
+ MOCK_METHOD2(OnReceiveRatesUpdated,
+ void(uint32_t bitRate, uint32_t frameRate));
+ MOCK_METHOD2(OnCompleteFrame, void(bool is_keyframe, size_t size_bytes));
+ MOCK_METHOD1(OnDiscardedPacketsUpdated, void(int discarded_packets));
+ MOCK_METHOD1(OnFrameCountsUpdated, void(const FrameCounts& frame_counts));
+ MOCK_METHOD7(OnFrameBufferTimingsUpdated,
+ void(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms));
};
class TestFrameBuffer2 : public ::testing::Test {
@@ -95,7 +129,7 @@
: clock_(0),
timing_(&clock_),
jitter_estimator_(&clock_),
- buffer_(&clock_, &jitter_estimator_, &timing_),
+ buffer_(&clock_, &jitter_estimator_, &timing_, &stats_callback_),
rand_(0x34678213),
tear_down_(false),
extract_thread_(&ExtractLoop, this, "Extract Thread"),
@@ -190,6 +224,7 @@
FrameBuffer buffer_;
std::vector<std::unique_ptr<FrameObject>> frames_;
Random rand_;
+ ::testing::NiceMock<VCMReceiveStatisticsCallbackMock> stats_callback_;
int64_t max_wait_time_;
bool tear_down_;
@@ -419,5 +454,30 @@
EXPECT_EQ(pid + 3, InsertFrame(pid + 3, 1, ts, true, pid + 2));
}
+TEST_F(TestFrameBuffer2, StatsCallback) {
+ uint16_t pid = Rand();
+ uint32_t ts = Rand();
+ const int kFrameSize = 5000;
+
+ EXPECT_CALL(stats_callback_, OnCompleteFrame(true, kFrameSize));
+ EXPECT_CALL(stats_callback_,
+ OnFrameBufferTimingsUpdated(_, _, _, _, _, _, _));
+
+ {
+ std::unique_ptr<FrameObjectFake> frame(new FrameObjectFake());
+ frame->SetSize(kFrameSize);
+ frame->picture_id = pid;
+ frame->spatial_layer = 0;
+ frame->timestamp = ts;
+ frame->num_references = 0;
+ frame->inter_layer_predicted = false;
+
+ EXPECT_EQ(buffer_.InsertFrame(std::move(frame)), pid);
+ }
+
+ ExtractFrame();
+ CheckFrame(0, pid, 0);
+}
+
} // namespace video_coding
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/include/video_coding_defines.h b/webrtc/modules/video_coding/include/video_coding_defines.h
index 122ddc6..dede5b6 100644
--- a/webrtc/modules/video_coding/include/video_coding_defines.h
+++ b/webrtc/modules/video_coding/include/video_coding_defines.h
@@ -90,8 +90,16 @@
class VCMReceiveStatisticsCallback {
public:
virtual void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) = 0;
+ virtual void OnCompleteFrame(bool is_keyframe, size_t size_bytes) = 0;
virtual void OnDiscardedPacketsUpdated(int discarded_packets) = 0;
virtual void OnFrameCountsUpdated(const FrameCounts& frame_counts) = 0;
+ virtual void OnFrameBufferTimingsUpdated(int decode_ms,
+ int max_decode_ms,
+ int current_delay_ms,
+ int target_delay_ms,
+ int jitter_buffer_ms,
+ int min_playout_delay_ms,
+ int render_delay_ms) = 0;
protected:
virtual ~VCMReceiveStatisticsCallback() {}
diff --git a/webrtc/modules/video_coding/timing.h b/webrtc/modules/video_coding/timing.h
index e7d2b1f..429c282 100644
--- a/webrtc/modules/video_coding/timing.h
+++ b/webrtc/modules/video_coding/timing.h
@@ -94,13 +94,13 @@
// Return current timing information. Returns true if the first frame has been
// decoded, false otherwise.
- bool GetTimings(int* decode_ms,
- int* max_decode_ms,
- int* current_delay_ms,
- int* target_delay_ms,
- int* jitter_buffer_ms,
- int* min_playout_delay_ms,
- int* render_delay_ms) const;
+ virtual bool GetTimings(int* decode_ms,
+ int* max_decode_ms,
+ int* current_delay_ms,
+ int* target_delay_ms,
+ int* jitter_buffer_ms,
+ int* min_playout_delay_ms,
+ int* render_delay_ms) const;
enum { kDefaultRenderDelayMs = 10 };
enum { kDelayMaxChangeMsPerS = 100 };
diff --git a/webrtc/modules/video_coding/video_receiver.cc b/webrtc/modules/video_coding/video_receiver.cc
index 129a1b5..14f1265 100644
--- a/webrtc/modules/video_coding/video_receiver.cc
+++ b/webrtc/modules/video_coding/video_receiver.cc
@@ -56,31 +56,14 @@
void VideoReceiver::Process() {
// Receive-side statistics
+
+ // TODO(philipel): Remove this if block when we know what to do with
+ // ReceiveStatisticsProxy::QualitySample.
if (_receiveStatsTimer.TimeUntilProcess() == 0) {
_receiveStatsTimer.Processed();
rtc::CritScope cs(&process_crit_);
if (_receiveStatsCallback != nullptr) {
- uint32_t bitRate;
- uint32_t frameRate;
- _receiver.ReceiveStatistics(&bitRate, &frameRate);
- _receiveStatsCallback->OnReceiveRatesUpdated(bitRate, frameRate);
- }
-
- if (_decoderTimingCallback != nullptr) {
- int decode_ms;
- int max_decode_ms;
- int current_delay_ms;
- int target_delay_ms;
- int jitter_buffer_ms;
- int min_playout_delay_ms;
- int render_delay_ms;
- if (_timing->GetTimings(&decode_ms, &max_decode_ms, ¤t_delay_ms,
- &target_delay_ms, &jitter_buffer_ms,
- &min_playout_delay_ms, &render_delay_ms)) {
- _decoderTimingCallback->OnDecoderTiming(
- decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
- jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
- }
+ _receiveStatsCallback->OnReceiveRatesUpdated(0, 0);
}
}
@@ -292,7 +275,7 @@
return ret;
}
-// Used for the WebRTC-NewVideoJitterBuffer experiment.
+// Used for the new jitter buffer.
// TODO(philipel): Clean up among the Decode functions as we replace
// VCMEncodedFrame with FrameObject.
int32_t VideoReceiver::Decode(const webrtc::VCMEncodedFrame* frame) {