New jitter buffer experiment.
BUG=webrtc:5514
Review-Url: https://codereview.webrtc.org/2480293002
Cr-Commit-Position: refs/heads/master@{#15077}
diff --git a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
index 8c798db..a4f74b7 100644
--- a/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
+++ b/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc
@@ -256,6 +256,10 @@
}
if (encoder_ == NULL) {
encoder_ = new vpx_codec_ctx_t;
+ // Only randomize pid/tl0 the first time the encoder is initialized
+ // in order to not make random jumps mid-stream.
+ picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT
+ tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT
}
if (config_ == NULL) {
config_ = new vpx_codec_enc_cfg_t;
@@ -270,8 +274,6 @@
if (num_temporal_layers_ == 0)
num_temporal_layers_ = 1;
- // Random start 16 bits is enough.
- picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; // NOLINT
// Allocate memory for encoded image
if (encoded_image_._buffer != NULL) {
delete[] encoded_image_._buffer;
@@ -366,8 +368,6 @@
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
- tl0_pic_idx_ = static_cast<uint8_t>(rand()); // NOLINT
-
return InitAndSetControlSettings(inst);
}
diff --git a/webrtc/modules/video_coding/frame_buffer2.cc b/webrtc/modules/video_coding/frame_buffer2.cc
index daac7d5..e02e5b6 100644
--- a/webrtc/modules/video_coding/frame_buffer2.cc
+++ b/webrtc/modules/video_coding/frame_buffer2.cc
@@ -28,7 +28,7 @@
constexpr int kMaxFramesBuffered = 600;
// Max number of decoded frame info that will be saved.
-constexpr int kMaxFramesHistory = 20;
+constexpr int kMaxFramesHistory = 50;
} // namespace
FrameBuffer::FrameBuffer(Clock* clock,
@@ -114,7 +114,7 @@
if (next_frame_it != frames_.end()) {
std::unique_ptr<FrameObject> frame = std::move(next_frame_it->second.frame);
int64_t received_time = frame->ReceivedTime();
- uint32_t timestamp = frame->Timestamp();
+ uint32_t timestamp = frame->timestamp;
int64_t frame_delay;
if (inter_frame_delay_.CalculateDelay(timestamp, &frame_delay,
diff --git a/webrtc/modules/video_coding/frame_buffer2_unittest.cc b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
index ebcbeb0..6079bb9 100644
--- a/webrtc/modules/video_coding/frame_buffer2_unittest.cc
+++ b/webrtc/modules/video_coding/frame_buffer2_unittest.cc
@@ -299,7 +299,7 @@
uint32_t ts = Rand();
InsertFrame(pid, 0, ts, false);
- InsertFrame(pid + 1, 0, ts + kFps20, false);
+ InsertFrame(pid + 1, 0, ts + kFps20, false, pid);
for (int i = 2; i < 10; i += 2) {
uint32_t ts_tl0 = ts + i / 2 * kFps10;
InsertFrame(pid + i, 0, ts_tl0, false, pid + i - 2);
diff --git a/webrtc/modules/video_coding/packet_buffer.cc b/webrtc/modules/video_coding/packet_buffer.cc
index 58ad31e..1b6fecc 100644
--- a/webrtc/modules/video_coding/packet_buffer.cc
+++ b/webrtc/modules/video_coding/packet_buffer.cc
@@ -57,50 +57,57 @@
}
bool PacketBuffer::InsertPacket(const VCMPacket& packet) {
- rtc::CritScope lock(&crit_);
- uint16_t seq_num = packet.seqNum;
- size_t index = seq_num % size_;
+ std::vector<std::unique_ptr<RtpFrameObject>> found_frames;
+ {
+ rtc::CritScope lock(&crit_);
+ uint16_t seq_num = packet.seqNum;
+ size_t index = seq_num % size_;
- if (!first_packet_received_) {
- first_seq_num_ = seq_num;
- last_seq_num_ = seq_num;
- first_packet_received_ = true;
- } else if (AheadOf(first_seq_num_, seq_num)) {
- // If we have explicitly cleared past this packet then it's old,
- // don't insert it.
- if (is_cleared_to_first_seq_num_)
- return false;
+ if (!first_packet_received_) {
+ first_seq_num_ = seq_num;
+ last_seq_num_ = seq_num;
+ first_packet_received_ = true;
+ } else if (AheadOf(first_seq_num_, seq_num)) {
+ // If we have explicitly cleared past this packet then it's old,
+ // don't insert it.
+ if (is_cleared_to_first_seq_num_)
+ return false;
- first_seq_num_ = seq_num;
- }
-
- if (sequence_buffer_[index].used) {
- // Duplicate packet, do nothing.
- if (data_buffer_[index].seqNum == packet.seqNum)
- return true;
-
- // The packet buffer is full, try to expand the buffer.
- while (ExpandBufferSize() && sequence_buffer_[seq_num % size_].used) {
+ first_seq_num_ = seq_num;
}
- index = seq_num % size_;
- // Packet buffer is still full.
- if (sequence_buffer_[index].used)
- return false;
+ if (sequence_buffer_[index].used) {
+ // Duplicate packet, do nothing.
+ if (data_buffer_[index].seqNum == packet.seqNum)
+ return true;
+
+ // The packet buffer is full, try to expand the buffer.
+ while (ExpandBufferSize() && sequence_buffer_[seq_num % size_].used) {
+ }
+ index = seq_num % size_;
+
+ // Packet buffer is still full.
+ if (sequence_buffer_[index].used)
+ return false;
+ }
+
+ if (AheadOf(seq_num, last_seq_num_))
+ last_seq_num_ = seq_num;
+
+ sequence_buffer_[index].frame_begin = packet.isFirstPacket;
+ sequence_buffer_[index].frame_end = packet.markerBit;
+ sequence_buffer_[index].seq_num = packet.seqNum;
+ sequence_buffer_[index].continuous = false;
+ sequence_buffer_[index].frame_created = false;
+ sequence_buffer_[index].used = true;
+ data_buffer_[index] = packet;
+
+ found_frames = FindFrames(seq_num);
}
- if (AheadOf(seq_num, last_seq_num_))
- last_seq_num_ = seq_num;
+ for (std::unique_ptr<RtpFrameObject>& frame : found_frames)
+ received_frame_callback_->OnReceivedFrame(std::move(frame));
- sequence_buffer_[index].frame_begin = packet.isFirstPacket;
- sequence_buffer_[index].frame_end = packet.markerBit;
- sequence_buffer_[index].seq_num = packet.seqNum;
- sequence_buffer_[index].continuous = false;
- sequence_buffer_[index].frame_created = false;
- sequence_buffer_[index].used = true;
- data_buffer_[index] = packet;
-
- FindFrames(seq_num);
return true;
}
@@ -187,7 +194,9 @@
return false;
}
-void PacketBuffer::FindFrames(uint16_t seq_num) {
+std::vector<std::unique_ptr<RtpFrameObject>> PacketBuffer::FindFrames(
+ uint16_t seq_num) {
+ std::vector<std::unique_ptr<RtpFrameObject>> found_frames;
while (PotentialNewFrame(seq_num)) {
size_t index = seq_num % size_;
sequence_buffer_[index].continuous = true;
@@ -204,8 +213,8 @@
int start_index = index;
while (true) {
frame_size += data_buffer_[start_index].sizeBytes;
- max_nack_count = std::max(
- max_nack_count, data_buffer_[start_index].timesNacked);
+ max_nack_count =
+ std::max(max_nack_count, data_buffer_[start_index].timesNacked);
sequence_buffer_[start_index].frame_created = true;
if (sequence_buffer_[start_index].frame_begin)
@@ -215,15 +224,13 @@
start_seq_num--;
}
- std::unique_ptr<RtpFrameObject> frame(
+ found_frames.emplace_back(
new RtpFrameObject(this, start_seq_num, seq_num, frame_size,
max_nack_count, clock_->TimeInMilliseconds()));
-
- received_frame_callback_->OnReceivedFrame(std::move(frame));
}
-
++seq_num;
}
+ return found_frames;
}
void PacketBuffer::ReturnFrame(RtpFrameObject* frame) {
@@ -267,7 +274,6 @@
}
VCMPacket* PacketBuffer::GetPacket(uint16_t seq_num) {
- rtc::CritScope lock(&crit_);
size_t index = seq_num % size_;
if (!sequence_buffer_[index].used ||
seq_num != sequence_buffer_[index].seq_num) {
diff --git a/webrtc/modules/video_coding/packet_buffer.h b/webrtc/modules/video_coding/packet_buffer.h
index 15645f6..36b3027 100644
--- a/webrtc/modules/video_coding/packet_buffer.h
+++ b/webrtc/modules/video_coding/packet_buffer.h
@@ -99,8 +99,9 @@
EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Test if all packets of a frame has arrived, and if so, creates a frame.
- // May create multiple frames per invocation.
- void FindFrames(uint16_t seq_num) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ // Returns a vector of received frames.
+ std::vector<std::unique_ptr<RtpFrameObject>> FindFrames(uint16_t seq_num)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Copy the bitstream for |frame| to |destination|.
// Virtual for testing.
@@ -108,7 +109,8 @@
// Get the packet with sequence number |seq_num|.
// Virtual for testing.
- virtual VCMPacket* GetPacket(uint16_t seq_num);
+ virtual VCMPacket* GetPacket(uint16_t seq_num)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Mark all slots used by |frame| as not used.
// Virtual for testing.
diff --git a/webrtc/modules/video_coding/rtp_frame_reference_finder.cc b/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
index 861a117..5a9420a 100644
--- a/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
+++ b/webrtc/modules/video_coding/rtp_frame_reference_finder.cc
@@ -371,6 +371,11 @@
const RTPVideoHeaderVP9& codec_header = rtp_codec_header->VP9;
+ bool old_frame = Vp9PidTl0Fix(*frame, &rtp_codec_header->VP9.picture_id,
+ &rtp_codec_header->VP9.tl0_pic_idx);
+ if (old_frame)
+ return;
+
if (codec_header.picture_id == kNoPictureId ||
codec_header.temporal_idx == kNoTemporalIdx) {
ManageFrameGeneric(std::move(frame), codec_header.picture_id);
@@ -585,5 +590,145 @@
return last_unwrap_;
}
+bool RtpFrameReferenceFinder::Vp9PidTl0Fix(const RtpFrameObject& frame,
+ int16_t* picture_id,
+ int16_t* tl0_pic_idx) {
+ const int kTl0PicIdLength = 256;
+ const uint8_t kMaxPidDiff = 128;
+
+ // We are currently receiving VP9 without PID, nothing to fix.
+ if (*picture_id == kNoPictureId)
+ return false;
+
+ // If |vp9_fix_jump_timestamp_| != -1 then a jump has occurred recently.
+ if (vp9_fix_jump_timestamp_ != -1) {
+ // If this frame has a timestamp older than |vp9_fix_jump_timestamp_| then
+ // this frame is old (more previous than the frame where we detected the
+ // jump) and should be dropped.
+ if (AheadOf<uint32_t>(vp9_fix_jump_timestamp_, frame.timestamp))
+ return true;
+
+ // After 60 seconds, reset |vp9_fix_jump_timestamp_| in order to not
+ // discard old frames when the timestamp wraps.
+ int diff_ms =
+ ForwardDiff<uint32_t>(vp9_fix_jump_timestamp_, frame.timestamp) / 90;
+ if (diff_ms > 60 * 1000)
+ vp9_fix_jump_timestamp_ = -1;
+ }
+
+ // Update |vp9_fix_last_timestamp_| with the most recent timestamp.
+ if (vp9_fix_last_timestamp_ == -1)
+ vp9_fix_last_timestamp_ = frame.timestamp;
+ if (AheadOf<uint32_t>(frame.timestamp, vp9_fix_last_timestamp_))
+ vp9_fix_last_timestamp_ = frame.timestamp;
+
+ uint16_t fixed_pid = Add<kPicIdLength>(*picture_id, vp9_fix_pid_offset_);
+ if (vp9_fix_last_picture_id_ == -1)
+ vp9_fix_last_picture_id_ = *picture_id;
+
+ int16_t fixed_tl0 = kNoTl0PicIdx;
+ if (*tl0_pic_idx != kNoTl0PicIdx) {
+ fixed_tl0 = Add<kTl0PicIdLength>(*tl0_pic_idx, vp9_fix_tl0_pic_idx_offset_);
+ // Update |vp9_fix_last_tl0_pic_idx_| with the most recent tl0 pic index.
+ if (vp9_fix_last_tl0_pic_idx_ == -1)
+ vp9_fix_last_tl0_pic_idx_ = *tl0_pic_idx;
+ if (AheadOf<uint8_t>(fixed_tl0, vp9_fix_last_tl0_pic_idx_))
+ vp9_fix_last_tl0_pic_idx_ = fixed_tl0;
+ }
+
+ bool has_jumped = DetectVp9PicIdJump(fixed_pid, fixed_tl0, frame.timestamp);
+ if (!has_jumped)
+ has_jumped = DetectVp9Tl0PicIdxJump(fixed_tl0, frame.timestamp);
+
+ if (has_jumped) {
+ // First we calculate the offset to get to the previous picture id, and then
+ // we add kMaxPid to avoid accidently referencing any previous
+ // frames that was inserted into the FrameBuffer.
+ vp9_fix_pid_offset_ = ForwardDiff<uint16_t, kPicIdLength>(
+ *picture_id, vp9_fix_last_picture_id_);
+ vp9_fix_pid_offset_ += kMaxPidDiff;
+
+ fixed_pid = Add<kPicIdLength>(*picture_id, vp9_fix_pid_offset_);
+ vp9_fix_last_picture_id_ = fixed_pid;
+ vp9_fix_jump_timestamp_ = frame.timestamp;
+ gof_info_.clear();
+
+ vp9_fix_tl0_pic_idx_offset_ =
+ ForwardDiff<uint8_t>(*tl0_pic_idx, vp9_fix_last_tl0_pic_idx_);
+ vp9_fix_tl0_pic_idx_offset_ += kMaxGofSaved;
+ fixed_tl0 = Add<kTl0PicIdLength>(*tl0_pic_idx, vp9_fix_tl0_pic_idx_offset_);
+ vp9_fix_last_tl0_pic_idx_ = fixed_tl0;
+ }
+
+ // Update |vp9_fix_last_picture_id_| with the most recent picture id.
+ if (AheadOf<uint16_t, kPicIdLength>(fixed_pid, vp9_fix_last_picture_id_))
+ vp9_fix_last_picture_id_ = fixed_pid;
+
+ *picture_id = fixed_pid;
+ *tl0_pic_idx = fixed_tl0;
+
+ return false;
+}
+
+bool RtpFrameReferenceFinder::DetectVp9PicIdJump(int fixed_pid,
+ int fixed_tl0,
+ uint32_t timestamp) const {
+ // Test if there has been a jump backwards in the picture id.
+ if (AheadOrAt<uint32_t>(timestamp, vp9_fix_last_timestamp_) &&
+ AheadOf<uint16_t, kPicIdLength>(vp9_fix_last_picture_id_, fixed_pid)) {
+ return true;
+ }
+
+ // Test if we have jumped forward too much. The reason we have to do this
+ // is because the FrameBuffer holds history of old frames and inserting
+ // frames with a much advanced picture id can result in the frame buffer
+ // holding more than half of the interval of picture ids.
+ if (AheadOrAt<uint32_t>(timestamp, vp9_fix_last_timestamp_) &&
+ ForwardDiff<uint16_t, kPicIdLength>(vp9_fix_last_picture_id_, fixed_pid) >
+ 128) {
+ return true;
+ }
+
+ // Special case where the picture id jump forward but not by much and the
+ // tl0 jumps to the id of an already saved gof for that id. In order to
+ // detect this we check if the picture id span over the length of the GOF.
+ if (fixed_tl0 != kNoTl0PicIdx) {
+ auto info_it = gof_info_.find(fixed_tl0);
+ if (info_it != gof_info_.end()) {
+ int last_pid_gof_idx_0 =
+ Subtract<kPicIdLength>(info_it->second.last_picture_id,
+ info_it->second.last_picture_id %
+ info_it->second.gof->num_frames_in_gof);
+ int pif_gof_end = Add<kPicIdLength>(
+ last_pid_gof_idx_0, info_it->second.gof->num_frames_in_gof);
+ if (AheadOf<uint16_t, kPicIdLength>(fixed_pid, pif_gof_end))
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool RtpFrameReferenceFinder::DetectVp9Tl0PicIdxJump(int fixed_tl0,
+ uint32_t timestamp) const {
+ if (fixed_tl0 != kNoTl0PicIdx) {
+ // Test if there has been a jump backwards in tl0 pic index.
+ if (AheadOrAt<uint32_t>(timestamp, vp9_fix_last_timestamp_) &&
+ AheadOf<uint8_t>(vp9_fix_last_tl0_pic_idx_, fixed_tl0)) {
+ return true;
+ }
+
+ // Test if there has been a jump forward. If the jump forward results
+ // in the tl0 pic index for this frame to be considered smaller than the
+ // smallest item in |gof_info_| then we have jumped forward far enough to
+ // wrap.
+ if (!gof_info_.empty() &&
+ AheadOf<uint8_t>(gof_info_.begin()->first, fixed_tl0)) {
+ return true;
+ }
+ }
+ return false;
+}
+
} // namespace video_coding
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/rtp_frame_reference_finder.h b/webrtc/modules/video_coding/rtp_frame_reference_finder.h
index e9e7d60..db1ddf8 100644
--- a/webrtc/modules/video_coding/rtp_frame_reference_finder.h
+++ b/webrtc/modules/video_coding/rtp_frame_reference_finder.h
@@ -57,12 +57,12 @@
void ClearTo(uint16_t seq_num);
private:
- static const uint16_t kPicIdLength = 1 << 7;
+ static const uint16_t kPicIdLength = 1 << 15;
static const uint8_t kMaxTemporalLayers = 5;
- static const int kMaxLayerInfo = 10;
- static const int kMaxStashedFrames = 10;
- static const int kMaxNotYetReceivedFrames = 20;
- static const int kMaxGofSaved = 15;
+ static const int kMaxLayerInfo = 50;
+ static const int kMaxStashedFrames = 50;
+ static const int kMaxNotYetReceivedFrames = 100;
+ static const int kMaxGofSaved = 50;
static const int kMaxPaddingAge = 100;
@@ -129,6 +129,24 @@
// All picture ids are unwrapped to 16 bits.
uint16_t UnwrapPictureId(uint16_t picture_id) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+ // Returns true if the frame is old and should be dropped.
+ // TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+ // around M59).
+ bool Vp9PidTl0Fix(const RtpFrameObject& frame,
+ int16_t* picture_id,
+ int16_t* tl0_pic_idx) EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+ // around M59).
+ bool DetectVp9PicIdJump(int fixed_pid,
+ int fixed_tl0,
+ uint32_t timestamp) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
+
+ // TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+ // around M59).
+ bool DetectVp9Tl0PicIdxJump(int fixed_tl0, uint32_t timestamp) const
+ EXCLUSIVE_LOCKS_REQUIRED(crit_);
// For every group of pictures, hold two sequence numbers. The first being
// the sequence number of the last packet of the last completed frame, and
@@ -196,6 +214,15 @@
int cleared_to_seq_num_ GUARDED_BY(crit_);
OnCompleteFrameCallback* frame_callback_;
+
+ // Vp9PidFix variables
+ // TODO(philipel): Remove when VP9 PID does not jump mid-stream.
+ int vp9_fix_last_timestamp_ = -1;
+ int vp9_fix_jump_timestamp_ = -1;
+ int vp9_fix_last_picture_id_ = -1;
+ int vp9_fix_pid_offset_ = 0;
+ int vp9_fix_last_tl0_pic_idx_ = -1;
+ int vp9_fix_tl0_pic_idx_offset_ = 0;
};
} // namespace video_coding
diff --git a/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc b/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc
index 5fa5ac8..928785c 100644
--- a/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc
+++ b/webrtc/modules/video_coding/rtp_frame_reference_finder_unittest.cc
@@ -122,6 +122,7 @@
bool up_switch = false,
GofInfoVP9* ss = nullptr) {
VCMPacket packet;
+ packet.timestamp = pid;
packet.codec = kVideoCodecVP9;
packet.seqNum = seq_num_start;
packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
@@ -152,6 +153,7 @@
bool inter = false,
std::vector<uint8_t> refs = std::vector<uint8_t>()) {
VCMPacket packet;
+ packet.timestamp = pid;
packet.codec = kVideoCodecVP9;
packet.seqNum = seq_num_start;
packet.frameType = keyframe ? kVideoFrameKey : kVideoFrameDelta;
@@ -1221,5 +1223,235 @@
CheckReferencesVp9(pid + 8, 1, pid + 7);
}
+// TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+// around M59).
+TEST_F(TestRtpFrameReferenceFinder, Vp9PidFix_PidJumpsBackwardThenForward) {
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1);
+
+ VCMPacket packet;
+ packet.timestamp = 0;
+ packet.codec = kVideoCodecVP9;
+ packet.frameType = kVideoFrameKey;
+ packet.video_header.codecHeader.VP9.flexible_mode = false;
+ packet.video_header.codecHeader.VP9.picture_id = 1;
+ packet.video_header.codecHeader.VP9.temporal_idx = 0;
+ packet.video_header.codecHeader.VP9.spatial_idx = 0;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 0;
+ packet.video_header.codecHeader.VP9.temporal_up_switch = true;
+ packet.video_header.codecHeader.VP9.ss_data_available = true;
+ packet.video_header.codecHeader.VP9.gof = ss;
+
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ // Timestamp goes forward but pid goes backwards.
+ packet.timestamp = 1;
+ packet.video_header.codecHeader.VP9.picture_id = 0;
+
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 2;
+ packet.video_header.codecHeader.VP9.picture_id = 5000;
+
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ ASSERT_EQ(3UL, frames_from_callback_.size());
+ CheckReferencesVp9(1, 0);
+ CheckReferencesVp9(129, 0);
+ CheckReferencesVp9(257, 0);
+}
+
+// TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+// around M59).
+TEST_F(TestRtpFrameReferenceFinder, Vp9PidFix_Tl0JumpsBackwardThenForward) {
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1);
+
+ VCMPacket packet;
+ packet.timestamp = 0;
+ packet.codec = kVideoCodecVP9;
+ packet.frameType = kVideoFrameKey;
+ packet.video_header.codecHeader.VP9.flexible_mode = false;
+ packet.video_header.codecHeader.VP9.picture_id = 0;
+ packet.video_header.codecHeader.VP9.temporal_idx = 0;
+ packet.video_header.codecHeader.VP9.spatial_idx = 0;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 1;
+ packet.video_header.codecHeader.VP9.temporal_up_switch = true;
+ packet.video_header.codecHeader.VP9.ss_data_available = true;
+ packet.video_header.codecHeader.VP9.gof = ss;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 1;
+ packet.video_header.codecHeader.VP9.picture_id = 1;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 0;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 2;
+ packet.frameType = kVideoFrameDelta;
+ packet.video_header.codecHeader.VP9.picture_id = 2;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 2;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 3;
+ packet.frameType = kVideoFrameKey;
+ packet.video_header.codecHeader.VP9.ss_data_available = true;
+ packet.video_header.codecHeader.VP9.picture_id = 3;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 129;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp9(0, 0);
+ CheckReferencesVp9(128, 0);
+ CheckReferencesVp9(129, 0, 128);
+ CheckReferencesVp9(257, 0);
+}
+
+// TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+// around M59).
+TEST_F(TestRtpFrameReferenceFinder, Vp9PidFix_PidSmallJumpForward) {
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1);
+
+ VCMPacket packet;
+ packet.timestamp = 0;
+ packet.codec = kVideoCodecVP9;
+ packet.frameType = kVideoFrameKey;
+ packet.video_header.codecHeader.VP9.flexible_mode = false;
+ packet.video_header.codecHeader.VP9.picture_id = 1;
+ packet.video_header.codecHeader.VP9.temporal_idx = 0;
+ packet.video_header.codecHeader.VP9.spatial_idx = 0;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 1;
+ packet.video_header.codecHeader.VP9.temporal_up_switch = true;
+ packet.video_header.codecHeader.VP9.ss_data_available = true;
+ packet.video_header.codecHeader.VP9.gof = ss;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 1;
+ packet.video_header.codecHeader.VP9.picture_id = 2;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 2;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 2;
+ packet.video_header.codecHeader.VP9.picture_id = 3;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 2;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 2;
+ packet.video_header.codecHeader.VP9.picture_id = 4;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 1;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ ASSERT_EQ(4UL, frames_from_callback_.size());
+ CheckReferencesVp9(1, 0);
+ CheckReferencesVp9(2, 0);
+ CheckReferencesVp9(3, 0);
+ CheckReferencesVp9(131, 0);
+}
+
+// TODO(philipel): Remove when VP9 PID/TL0 does not jump mid-stream (should be
+// around M59).
+TEST_F(TestRtpFrameReferenceFinder, Vp9PidFix_DropOldFrame) {
+ GofInfoVP9 ss;
+ ss.SetGofInfoVP9(kTemporalStructureMode1);
+
+ VCMPacket packet;
+ packet.timestamp = 0;
+ packet.codec = kVideoCodecVP9;
+ packet.frameType = kVideoFrameKey;
+ packet.video_header.codecHeader.VP9.flexible_mode = false;
+ packet.video_header.codecHeader.VP9.picture_id = 1;
+ packet.video_header.codecHeader.VP9.temporal_idx = 0;
+ packet.video_header.codecHeader.VP9.spatial_idx = 0;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 1;
+ packet.video_header.codecHeader.VP9.temporal_up_switch = true;
+ packet.video_header.codecHeader.VP9.ss_data_available = true;
+ packet.video_header.codecHeader.VP9.gof = ss;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 1;
+ packet.video_header.codecHeader.VP9.picture_id = 0;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 2;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ packet.timestamp = 0;
+ packet.video_header.codecHeader.VP9.picture_id = 3;
+ packet.video_header.codecHeader.VP9.tl0_pic_idx = 2;
+ {
+ ref_packet_buffer_->InsertPacket(packet);
+ std::unique_ptr<RtpFrameObject> frame(
+ new RtpFrameObject(ref_packet_buffer_, 0, 0, 0, 0, 0));
+ reference_finder_->ManageFrame(std::move(frame));
+ }
+
+ ASSERT_EQ(2UL, frames_from_callback_.size());
+ CheckReferencesVp9(1, 0);
+ CheckReferencesVp9(129, 0);
+}
+
} // namespace video_coding
} // namespace webrtc
diff --git a/webrtc/modules/video_coding/timing.cc b/webrtc/modules/video_coding/timing.cc
index 29b064e..d41e530 100644
--- a/webrtc/modules/video_coding/timing.cc
+++ b/webrtc/modules/video_coding/timing.cc
@@ -154,6 +154,7 @@
max_change_ms = kDelayMaxChangeMsPerS *
(frame_timestamp - prev_frame_timestamp_) / 90000;
}
+
if (max_change_ms <= 0) {
// Any changes less than 1 ms are truncated and
// will be postponed. Negative change will be due
diff --git a/webrtc/modules/video_coding/video_coding_impl.h b/webrtc/modules/video_coding/video_coding_impl.h
index cb24654..fa19ac4 100644
--- a/webrtc/modules/video_coding/video_coding_impl.h
+++ b/webrtc/modules/video_coding/video_coding_impl.h
@@ -143,6 +143,8 @@
int32_t Decode(uint16_t maxWaitTimeMs);
+ int32_t Decode(const webrtc::VCMEncodedFrame* frame);
+
int32_t ReceiveCodec(VideoCodec* currentReceiveCodec) const;
VideoCodecType ReceiveCodec() const;
diff --git a/webrtc/modules/video_coding/video_receiver.cc b/webrtc/modules/video_coding/video_receiver.cc
index 475f686..46373ef 100644
--- a/webrtc/modules/video_coding/video_receiver.cc
+++ b/webrtc/modules/video_coding/video_receiver.cc
@@ -290,6 +290,14 @@
return ret;
}
+// Used for the WebRTC-NewVideoJitterBuffer experiment.
+// TODO(philipel): Clean up among the Decode functions as we replace
+// VCMEncodedFrame with FrameObject.
+int32_t VideoReceiver::Decode(const webrtc::VCMEncodedFrame* frame) {
+ rtc::CritScope lock(&receive_crit_);
+ return Decode(*frame);
+}
+
int32_t VideoReceiver::RequestSliceLossIndication(
const uint64_t pictureID) const {
TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID);