blob: 0831c0cd5fe920216c1bfb33771f2aa1d3d1fac0 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/video_coding/frame_buffer2.h"
12
13#include <algorithm>
philipele0b2f152016-09-28 10:23:49 +020014#include <cstring>
15#include <queue>
philipelbe7a9e52016-05-19 12:19:35 +020016
17#include "webrtc/base/checks.h"
philipele0b2f152016-09-28 10:23:49 +020018#include "webrtc/base/logging.h"
philipelbe7a9e52016-05-19 12:19:35 +020019#include "webrtc/modules/video_coding/jitter_estimator.h"
philipelbe7a9e52016-05-19 12:19:35 +020020#include "webrtc/modules/video_coding/timing.h"
21#include "webrtc/system_wrappers/include/clock.h"
philipel266f0a42016-11-28 08:49:07 -080022#include "webrtc/system_wrappers/include/metrics.h"
philipelbe7a9e52016-05-19 12:19:35 +020023
24namespace webrtc {
25namespace video_coding {
26
27namespace {
philipele0b2f152016-09-28 10:23:49 +020028// Max number of frames the buffer will hold.
29constexpr int kMaxFramesBuffered = 600;
philipelbe7a9e52016-05-19 12:19:35 +020030
philipele0b2f152016-09-28 10:23:49 +020031// Max number of decoded frame info that will be saved.
philipelfd5a20f2016-11-15 00:57:57 -080032constexpr int kMaxFramesHistory = 50;
philipelbe7a9e52016-05-19 12:19:35 +020033} // namespace
34
philipelbe7a9e52016-05-19 12:19:35 +020035FrameBuffer::FrameBuffer(Clock* clock,
36 VCMJitterEstimator* jitter_estimator,
philipel27378f32017-01-27 02:19:05 -080037 VCMTiming* timing)
philipelbe7a9e52016-05-19 12:19:35 +020038 : clock_(clock),
philipele0b2f152016-09-28 10:23:49 +020039 new_countinuous_frame_event_(false, false),
philipelbe7a9e52016-05-19 12:19:35 +020040 jitter_estimator_(jitter_estimator),
41 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020042 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipele0b2f152016-09-28 10:23:49 +020043 last_decoded_frame_it_(frames_.end()),
44 last_continuous_frame_it_(frames_.end()),
45 num_frames_history_(0),
46 num_frames_buffered_(0),
philipel4f6cd6a2016-08-03 10:59:32 +020047 stopped_(false),
philipel27378f32017-01-27 02:19:05 -080048 protection_mode_(kProtectionNack) {}
philipel266f0a42016-11-28 08:49:07 -080049
philipel27378f32017-01-27 02:19:05 -080050FrameBuffer::~FrameBuffer() {
51 UpdateHistograms();
52}
philipelbe7a9e52016-05-19 12:19:35 +020053
philipel75562822016-09-05 10:57:41 +020054FrameBuffer::ReturnReason FrameBuffer::NextFrame(
55 int64_t max_wait_time_ms,
56 std::unique_ptr<FrameObject>* frame_out) {
philipelbe7a9e52016-05-19 12:19:35 +020057 int64_t latest_return_time = clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +020058 int64_t wait_ms = max_wait_time_ms;
philipele0b2f152016-09-28 10:23:49 +020059 FrameMap::iterator next_frame_it;
60
61 do {
62 int64_t now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +020063 {
64 rtc::CritScope lock(&crit_);
philipele0b2f152016-09-28 10:23:49 +020065 new_countinuous_frame_event_.Reset();
philipel504c47d2016-06-30 17:33:02 +020066 if (stopped_)
philipel75562822016-09-05 10:57:41 +020067 return kStopped;
philipelbe7a9e52016-05-19 12:19:35 +020068
philipel504c47d2016-06-30 17:33:02 +020069 wait_ms = max_wait_time_ms;
philipele0b2f152016-09-28 10:23:49 +020070
71 // Need to hold |crit_| in order to use |frames_|, therefore we
72 // set it here in the loop instead of outside the loop in order to not
73 // acquire the lock unnecesserily.
philipel4f6cd6a2016-08-03 10:59:32 +020074 next_frame_it = frames_.end();
philipelbe7a9e52016-05-19 12:19:35 +020075
philipele0b2f152016-09-28 10:23:49 +020076 // |frame_it| points to the first frame after the
77 // |last_decoded_frame_it_|.
78 auto frame_it = frames_.end();
79 if (last_decoded_frame_it_ == frames_.end()) {
80 frame_it = frames_.begin();
philipelbe7a9e52016-05-19 12:19:35 +020081 } else {
philipele0b2f152016-09-28 10:23:49 +020082 frame_it = last_decoded_frame_it_;
83 ++frame_it;
philipelbe7a9e52016-05-19 12:19:35 +020084 }
philipele0b2f152016-09-28 10:23:49 +020085
86 // |continuous_end_it| points to the first frame after the
87 // |last_continuous_frame_it_|.
88 auto continuous_end_it = last_continuous_frame_it_;
89 if (continuous_end_it != frames_.end())
90 ++continuous_end_it;
91
92 for (; frame_it != continuous_end_it; ++frame_it) {
philipel93e451b2016-10-06 12:25:13 +020093 if (!frame_it->second.continuous ||
94 frame_it->second.num_missing_decodable > 0) {
philipele0b2f152016-09-28 10:23:49 +020095 continue;
philipel93e451b2016-10-06 12:25:13 +020096 }
philipele0b2f152016-09-28 10:23:49 +020097
98 FrameObject* frame = frame_it->second.frame.get();
99 next_frame_it = frame_it;
100 if (frame->RenderTime() == -1)
101 frame->SetRenderTime(timing_->RenderTimeMs(frame->timestamp, now_ms));
102 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
103
104 // This will cause the frame buffer to prefer high framerate rather
105 // than high resolution in the case of the decoder not decoding fast
106 // enough and the stream has multiple spatial and temporal layers.
107 if (wait_ms == 0)
108 continue;
109
110 break;
111 }
112 } // rtc::Critscope lock(&crit_);
113
114 wait_ms = std::min<int64_t>(wait_ms, latest_return_time - now_ms);
115 wait_ms = std::max<int64_t>(wait_ms, 0);
116 } while (new_countinuous_frame_event_.Wait(wait_ms));
117
118 rtc::CritScope lock(&crit_);
119 if (next_frame_it != frames_.end()) {
120 std::unique_ptr<FrameObject> frame = std::move(next_frame_it->second.frame);
philipele0b2f152016-09-28 10:23:49 +0200121
philipele0754302017-01-25 08:56:23 -0800122 if (!frame->delayed_by_retransmission()) {
123 int64_t frame_delay;
124
125 if (inter_frame_delay_.CalculateDelay(frame->timestamp, &frame_delay,
126 frame->ReceivedTime())) {
127 jitter_estimator_->UpdateEstimate(frame_delay, frame->size());
128 }
129
130 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
131 timing_->SetJitterDelay(jitter_estimator_->GetJitterEstimate(rtt_mult));
132 timing_->UpdateCurrentDelay(frame->RenderTime(),
133 clock_->TimeInMilliseconds());
philipelbe7a9e52016-05-19 12:19:35 +0200134 }
philipele0b2f152016-09-28 10:23:49 +0200135
philipelbe742702016-11-30 01:31:40 -0800136 UpdateJitterDelay();
137
philipele0b2f152016-09-28 10:23:49 +0200138 PropagateDecodability(next_frame_it->second);
139 AdvanceLastDecodedFrame(next_frame_it);
philipelfcc60062017-01-18 05:35:20 -0800140 last_decoded_frame_timestamp_ = frame->timestamp;
philipele0b2f152016-09-28 10:23:49 +0200141 *frame_out = std::move(frame);
142 return kFrameFound;
143 } else {
144 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200145 }
146}
147
philipel4f6cd6a2016-08-03 10:59:32 +0200148void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
149 rtc::CritScope lock(&crit_);
150 protection_mode_ = mode;
151}
152
philipel504c47d2016-06-30 17:33:02 +0200153void FrameBuffer::Start() {
154 rtc::CritScope lock(&crit_);
155 stopped_ = false;
156}
157
158void FrameBuffer::Stop() {
159 rtc::CritScope lock(&crit_);
160 stopped_ = true;
philipele0b2f152016-09-28 10:23:49 +0200161 new_countinuous_frame_event_.Set();
philipel504c47d2016-06-30 17:33:02 +0200162}
163
philipele0b2f152016-09-28 10:23:49 +0200164int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
philipelbe7a9e52016-05-19 12:19:35 +0200165 rtc::CritScope lock(&crit_);
philipel93e451b2016-10-06 12:25:13 +0200166 RTC_DCHECK(frame);
167
philipel27378f32017-01-27 02:19:05 -0800168 ++num_total_frames_;
169 if (frame->num_references == 0)
170 ++num_key_frames_;
philipel266f0a42016-11-28 08:49:07 -0800171
philipelbe7a9e52016-05-19 12:19:35 +0200172 FrameKey key(frame->picture_id, frame->spatial_layer);
philipele0b2f152016-09-28 10:23:49 +0200173 int last_continuous_picture_id =
174 last_continuous_frame_it_ == frames_.end()
175 ? -1
176 : last_continuous_frame_it_->first.picture_id;
177
178 if (num_frames_buffered_ >= kMaxFramesBuffered) {
179 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
180 << ":" << static_cast<int>(key.spatial_layer)
181 << ") could not be inserted due to the frame "
182 << "buffer being full, dropping frame.";
183 return last_continuous_picture_id;
184 }
185
186 if (frame->inter_layer_predicted && frame->spatial_layer == 0) {
187 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
188 << ":" << static_cast<int>(key.spatial_layer)
189 << ") is marked as inter layer predicted, dropping frame.";
190 return last_continuous_picture_id;
191 }
192
193 if (last_decoded_frame_it_ != frames_.end() &&
194 key < last_decoded_frame_it_->first) {
philipelfcc60062017-01-18 05:35:20 -0800195 if (AheadOf(frame->timestamp, last_decoded_frame_timestamp_) &&
196 frame->num_references == 0) {
197 // If this frame has a newer timestamp but an earlier picture id then we
198 // assume there has been a jump in the picture id due to some encoder
199 // reconfiguration or some other reason. Even though this is not according
200 // to spec we can still continue to decode from this frame if it is a
201 // keyframe.
202 LOG(LS_WARNING) << "A jump in picture id was detected, clearing buffer.";
203 ClearFramesAndHistory();
204 last_continuous_picture_id = -1;
205 } else {
206 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
207 << key.picture_id << ":"
208 << static_cast<int>(key.spatial_layer)
209 << ") inserted after frame ("
210 << last_decoded_frame_it_->first.picture_id << ":"
211 << static_cast<int>(
212 last_decoded_frame_it_->first.spatial_layer)
213 << ") was handed off for decoding, dropping frame.";
214 return last_continuous_picture_id;
215 }
philipele0b2f152016-09-28 10:23:49 +0200216 }
217
218 auto info = frames_.insert(std::make_pair(key, FrameInfo())).first;
219
philipel93e451b2016-10-06 12:25:13 +0200220 if (info->second.frame) {
221 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
222 << ":" << static_cast<int>(key.spatial_layer)
223 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200224 return last_continuous_picture_id;
225 }
226
philipel93e451b2016-10-06 12:25:13 +0200227 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
228 return last_continuous_picture_id;
229
philipele0b2f152016-09-28 10:23:49 +0200230 info->second.frame = std::move(frame);
231 ++num_frames_buffered_;
232
233 if (info->second.num_missing_continuous == 0) {
234 info->second.continuous = true;
235 PropagateContinuity(info);
236 last_continuous_picture_id = last_continuous_frame_it_->first.picture_id;
237
238 // Since we now have new continuous frames there might be a better frame
239 // to return from NextFrame. Signal that thread so that it again can choose
240 // which frame to return.
241 new_countinuous_frame_event_.Set();
242 }
243
244 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200245}
246
philipele0b2f152016-09-28 10:23:49 +0200247void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
248 RTC_DCHECK(start->second.continuous);
249 if (last_continuous_frame_it_ == frames_.end())
250 last_continuous_frame_it_ = start;
251
252 std::queue<FrameMap::iterator> continuous_frames;
253 continuous_frames.push(start);
254
255 // A simple BFS to traverse continuous frames.
256 while (!continuous_frames.empty()) {
257 auto frame = continuous_frames.front();
258 continuous_frames.pop();
259
260 if (last_continuous_frame_it_->first < frame->first)
261 last_continuous_frame_it_ = frame;
262
263 // Loop through all dependent frames, and if that frame no longer has
264 // any unfulfilled dependencies then that frame is continuous as well.
265 for (size_t d = 0; d < frame->second.num_dependent_frames; ++d) {
266 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
267 --frame_ref->second.num_missing_continuous;
268
269 if (frame_ref->second.num_missing_continuous == 0) {
270 frame_ref->second.continuous = true;
271 continuous_frames.push(frame_ref);
272 }
273 }
274 }
275}
276
277void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
278 for (size_t d = 0; d < info.num_dependent_frames; ++d) {
279 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200280 RTC_DCHECK(ref_info != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200281 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
282 --ref_info->second.num_missing_decodable;
283 }
284}
285
286void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
287 if (last_decoded_frame_it_ == frames_.end()) {
288 last_decoded_frame_it_ = frames_.begin();
289 } else {
290 RTC_DCHECK(last_decoded_frame_it_->first < decoded->first);
291 ++last_decoded_frame_it_;
292 }
293 --num_frames_buffered_;
294 ++num_frames_history_;
295
296 // First, delete non-decoded frames from the history.
297 while (last_decoded_frame_it_ != decoded) {
298 if (last_decoded_frame_it_->second.frame)
299 --num_frames_buffered_;
300 last_decoded_frame_it_ = frames_.erase(last_decoded_frame_it_);
philipelbe7a9e52016-05-19 12:19:35 +0200301 }
302
philipele0b2f152016-09-28 10:23:49 +0200303 // Then remove old history if we have too much history saved.
304 if (num_frames_history_ > kMaxFramesHistory) {
305 frames_.erase(frames_.begin());
306 --num_frames_history_;
307 }
308}
309
310bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
311 FrameMap::iterator info) {
312 FrameKey key(frame.picture_id, frame.spatial_layer);
313 info->second.num_missing_continuous = frame.num_references;
314 info->second.num_missing_decodable = frame.num_references;
315
316 RTC_DCHECK(last_decoded_frame_it_ == frames_.end() ||
317 last_decoded_frame_it_->first < info->first);
318
319 // Check how many dependencies that have already been fulfilled.
320 for (size_t i = 0; i < frame.num_references; ++i) {
321 FrameKey ref_key(frame.references[i], frame.spatial_layer);
322 auto ref_info = frames_.find(ref_key);
323
324 // Does |frame| depend on a frame earlier than the last decoded frame?
325 if (last_decoded_frame_it_ != frames_.end() &&
326 ref_key <= last_decoded_frame_it_->first) {
327 if (ref_info == frames_.end()) {
328 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
329 << key.picture_id << ":"
330 << static_cast<int>(key.spatial_layer)
331 << " depends on a non-decoded frame more previous than "
332 << "the last decoded frame, dropping frame.";
333 return false;
334 }
335
336 --info->second.num_missing_continuous;
337 --info->second.num_missing_decodable;
338 } else {
339 if (ref_info == frames_.end())
340 ref_info = frames_.insert(std::make_pair(ref_key, FrameInfo())).first;
341
342 if (ref_info->second.continuous)
343 --info->second.num_missing_continuous;
344
345 // Add backwards reference so |frame| can be updated when new
346 // frames are inserted or decoded.
347 ref_info->second.dependent_frames[ref_info->second.num_dependent_frames] =
348 key;
349 ++ref_info->second.num_dependent_frames;
350 }
philipel93e451b2016-10-06 12:25:13 +0200351 RTC_DCHECK_LE(ref_info->second.num_missing_continuous,
352 ref_info->second.num_missing_decodable);
philipelbe7a9e52016-05-19 12:19:35 +0200353 }
354
philipele0b2f152016-09-28 10:23:49 +0200355 // Check if we have the lower spatial layer frame.
philipelbe7a9e52016-05-19 12:19:35 +0200356 if (frame.inter_layer_predicted) {
philipele0b2f152016-09-28 10:23:49 +0200357 ++info->second.num_missing_continuous;
358 ++info->second.num_missing_decodable;
359
philipelbe7a9e52016-05-19 12:19:35 +0200360 FrameKey ref_key(frame.picture_id, frame.spatial_layer - 1);
philipele0b2f152016-09-28 10:23:49 +0200361 // Gets or create the FrameInfo for the referenced frame.
362 auto ref_info = frames_.insert(std::make_pair(ref_key, FrameInfo())).first;
363 if (ref_info->second.continuous)
364 --info->second.num_missing_continuous;
365
366 if (ref_info == last_decoded_frame_it_) {
367 --info->second.num_missing_decodable;
368 } else {
369 ref_info->second.dependent_frames[ref_info->second.num_dependent_frames] =
370 key;
371 ++ref_info->second.num_dependent_frames;
372 }
philipel93e451b2016-10-06 12:25:13 +0200373 RTC_DCHECK_LE(ref_info->second.num_missing_continuous,
374 ref_info->second.num_missing_decodable);
philipelbe7a9e52016-05-19 12:19:35 +0200375 }
376
philipel93e451b2016-10-06 12:25:13 +0200377 RTC_DCHECK_LE(info->second.num_missing_continuous,
378 info->second.num_missing_decodable);
379
philipelbe7a9e52016-05-19 12:19:35 +0200380 return true;
381}
382
philipelbe742702016-11-30 01:31:40 -0800383void FrameBuffer::UpdateJitterDelay() {
philipel27378f32017-01-27 02:19:05 -0800384 int unused;
385 int delay;
386 timing_->GetTimings(&unused, &unused, &unused, &unused, &delay, &unused,
387 &unused);
philipelbe742702016-11-30 01:31:40 -0800388
philipel27378f32017-01-27 02:19:05 -0800389 accumulated_delay_ += delay;
390 ++accumulated_delay_samples_;
391}
392
393void FrameBuffer::UpdateHistograms() const {
394 rtc::CritScope lock(&crit_);
395 if (num_total_frames_ > 0) {
396 int key_frames_permille = (static_cast<float>(num_key_frames_) * 1000.0f /
397 static_cast<float>(num_total_frames_) +
398 0.5f);
399 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
400 key_frames_permille);
401 }
402
403 if (accumulated_delay_samples_ > 0) {
404 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
405 accumulated_delay_ / accumulated_delay_samples_);
philipelbe742702016-11-30 01:31:40 -0800406 }
philipel266f0a42016-11-28 08:49:07 -0800407}
408
philipelfcc60062017-01-18 05:35:20 -0800409void FrameBuffer::ClearFramesAndHistory() {
410 frames_.clear();
411 last_decoded_frame_it_ = frames_.end();
412 last_continuous_frame_it_ = frames_.end();
413 num_frames_history_ = 0;
414 num_frames_buffered_ = 0;
415}
416
philipelbe7a9e52016-05-19 12:19:35 +0200417} // namespace video_coding
418} // namespace webrtc