blob: 58c41bf1d087e50ecf36a9a3b819072f82ffa7c3 [file] [log] [blame]
philipelbe7a9e52016-05-19 12:19:35 +02001/*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/video_coding/frame_buffer2.h"
12
13#include <algorithm>
philipele0b2f152016-09-28 10:23:49 +020014#include <cstring>
15#include <queue>
philipelbe7a9e52016-05-19 12:19:35 +020016
17#include "webrtc/base/checks.h"
philipele0b2f152016-09-28 10:23:49 +020018#include "webrtc/base/logging.h"
philipelbe7a9e52016-05-19 12:19:35 +020019#include "webrtc/modules/video_coding/jitter_estimator.h"
philipelbe7a9e52016-05-19 12:19:35 +020020#include "webrtc/modules/video_coding/timing.h"
21#include "webrtc/system_wrappers/include/clock.h"
philipel266f0a42016-11-28 08:49:07 -080022#include "webrtc/system_wrappers/include/metrics.h"
philipelbe7a9e52016-05-19 12:19:35 +020023
24namespace webrtc {
25namespace video_coding {
26
27namespace {
philipele0b2f152016-09-28 10:23:49 +020028// Max number of frames the buffer will hold.
29constexpr int kMaxFramesBuffered = 600;
philipelbe7a9e52016-05-19 12:19:35 +020030
philipele0b2f152016-09-28 10:23:49 +020031// Max number of decoded frame info that will be saved.
philipelfd5a20f2016-11-15 00:57:57 -080032constexpr int kMaxFramesHistory = 50;
philipelbe7a9e52016-05-19 12:19:35 +020033} // namespace
34
philipelbe7a9e52016-05-19 12:19:35 +020035FrameBuffer::FrameBuffer(Clock* clock,
36 VCMJitterEstimator* jitter_estimator,
philipelc08c1912017-01-17 04:03:53 -080037 VCMTiming* timing)
philipelbe7a9e52016-05-19 12:19:35 +020038 : clock_(clock),
philipele0b2f152016-09-28 10:23:49 +020039 new_countinuous_frame_event_(false, false),
philipelbe7a9e52016-05-19 12:19:35 +020040 jitter_estimator_(jitter_estimator),
41 timing_(timing),
philipel4f6cd6a2016-08-03 10:59:32 +020042 inter_frame_delay_(clock_->TimeInMilliseconds()),
philipele0b2f152016-09-28 10:23:49 +020043 last_decoded_frame_it_(frames_.end()),
44 last_continuous_frame_it_(frames_.end()),
45 num_frames_history_(0),
46 num_frames_buffered_(0),
philipel4f6cd6a2016-08-03 10:59:32 +020047 stopped_(false),
philipelc08c1912017-01-17 04:03:53 -080048 protection_mode_(kProtectionNack) {}
philipel266f0a42016-11-28 08:49:07 -080049
philipelc08c1912017-01-17 04:03:53 -080050FrameBuffer::~FrameBuffer() {
51 UpdateHistograms();
52}
philipelbe7a9e52016-05-19 12:19:35 +020053
philipel75562822016-09-05 10:57:41 +020054FrameBuffer::ReturnReason FrameBuffer::NextFrame(
55 int64_t max_wait_time_ms,
56 std::unique_ptr<FrameObject>* frame_out) {
philipelbe7a9e52016-05-19 12:19:35 +020057 int64_t latest_return_time = clock_->TimeInMilliseconds() + max_wait_time_ms;
philipel504c47d2016-06-30 17:33:02 +020058 int64_t wait_ms = max_wait_time_ms;
philipele0b2f152016-09-28 10:23:49 +020059 FrameMap::iterator next_frame_it;
60
61 do {
62 int64_t now_ms = clock_->TimeInMilliseconds();
philipel504c47d2016-06-30 17:33:02 +020063 {
64 rtc::CritScope lock(&crit_);
philipele0b2f152016-09-28 10:23:49 +020065 new_countinuous_frame_event_.Reset();
philipel504c47d2016-06-30 17:33:02 +020066 if (stopped_)
philipel75562822016-09-05 10:57:41 +020067 return kStopped;
philipelbe7a9e52016-05-19 12:19:35 +020068
philipel504c47d2016-06-30 17:33:02 +020069 wait_ms = max_wait_time_ms;
philipele0b2f152016-09-28 10:23:49 +020070
71 // Need to hold |crit_| in order to use |frames_|, therefore we
72 // set it here in the loop instead of outside the loop in order to not
73 // acquire the lock unnecesserily.
philipel4f6cd6a2016-08-03 10:59:32 +020074 next_frame_it = frames_.end();
philipelbe7a9e52016-05-19 12:19:35 +020075
philipele0b2f152016-09-28 10:23:49 +020076 // |frame_it| points to the first frame after the
77 // |last_decoded_frame_it_|.
78 auto frame_it = frames_.end();
79 if (last_decoded_frame_it_ == frames_.end()) {
80 frame_it = frames_.begin();
philipelbe7a9e52016-05-19 12:19:35 +020081 } else {
philipele0b2f152016-09-28 10:23:49 +020082 frame_it = last_decoded_frame_it_;
83 ++frame_it;
philipelbe7a9e52016-05-19 12:19:35 +020084 }
philipele0b2f152016-09-28 10:23:49 +020085
86 // |continuous_end_it| points to the first frame after the
87 // |last_continuous_frame_it_|.
88 auto continuous_end_it = last_continuous_frame_it_;
89 if (continuous_end_it != frames_.end())
90 ++continuous_end_it;
91
92 for (; frame_it != continuous_end_it; ++frame_it) {
philipel93e451b2016-10-06 12:25:13 +020093 if (!frame_it->second.continuous ||
94 frame_it->second.num_missing_decodable > 0) {
philipele0b2f152016-09-28 10:23:49 +020095 continue;
philipel93e451b2016-10-06 12:25:13 +020096 }
philipele0b2f152016-09-28 10:23:49 +020097
98 FrameObject* frame = frame_it->second.frame.get();
99 next_frame_it = frame_it;
100 if (frame->RenderTime() == -1)
101 frame->SetRenderTime(timing_->RenderTimeMs(frame->timestamp, now_ms));
102 wait_ms = timing_->MaxWaitingTime(frame->RenderTime(), now_ms);
103
104 // This will cause the frame buffer to prefer high framerate rather
105 // than high resolution in the case of the decoder not decoding fast
106 // enough and the stream has multiple spatial and temporal layers.
107 if (wait_ms == 0)
108 continue;
109
110 break;
111 }
112 } // rtc::Critscope lock(&crit_);
113
114 wait_ms = std::min<int64_t>(wait_ms, latest_return_time - now_ms);
115 wait_ms = std::max<int64_t>(wait_ms, 0);
116 } while (new_countinuous_frame_event_.Wait(wait_ms));
117
118 rtc::CritScope lock(&crit_);
119 if (next_frame_it != frames_.end()) {
120 std::unique_ptr<FrameObject> frame = std::move(next_frame_it->second.frame);
121 int64_t received_time = frame->ReceivedTime();
philipelfd5a20f2016-11-15 00:57:57 -0800122 uint32_t timestamp = frame->timestamp;
philipele0b2f152016-09-28 10:23:49 +0200123
124 int64_t frame_delay;
125 if (inter_frame_delay_.CalculateDelay(timestamp, &frame_delay,
126 received_time)) {
nisse37abf532016-10-28 00:37:29 -0700127 jitter_estimator_->UpdateEstimate(frame_delay, frame->size());
philipelbe7a9e52016-05-19 12:19:35 +0200128 }
philipele0b2f152016-09-28 10:23:49 +0200129 float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0;
130 timing_->SetJitterDelay(jitter_estimator_->GetJitterEstimate(rtt_mult));
131 timing_->UpdateCurrentDelay(frame->RenderTime(),
132 clock_->TimeInMilliseconds());
133
philipelbe742702016-11-30 01:31:40 -0800134 UpdateJitterDelay();
135
philipele0b2f152016-09-28 10:23:49 +0200136 PropagateDecodability(next_frame_it->second);
137 AdvanceLastDecodedFrame(next_frame_it);
philipelfcc60062017-01-18 05:35:20 -0800138 last_decoded_frame_timestamp_ = frame->timestamp;
philipele0b2f152016-09-28 10:23:49 +0200139 *frame_out = std::move(frame);
140 return kFrameFound;
141 } else {
142 return kTimeout;
philipelbe7a9e52016-05-19 12:19:35 +0200143 }
144}
145
philipel4f6cd6a2016-08-03 10:59:32 +0200146void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
147 rtc::CritScope lock(&crit_);
148 protection_mode_ = mode;
149}
150
philipel504c47d2016-06-30 17:33:02 +0200151void FrameBuffer::Start() {
152 rtc::CritScope lock(&crit_);
153 stopped_ = false;
154}
155
156void FrameBuffer::Stop() {
157 rtc::CritScope lock(&crit_);
158 stopped_ = true;
philipele0b2f152016-09-28 10:23:49 +0200159 new_countinuous_frame_event_.Set();
philipel504c47d2016-06-30 17:33:02 +0200160}
161
philipele0b2f152016-09-28 10:23:49 +0200162int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
philipelbe7a9e52016-05-19 12:19:35 +0200163 rtc::CritScope lock(&crit_);
philipel93e451b2016-10-06 12:25:13 +0200164 RTC_DCHECK(frame);
165
philipelc08c1912017-01-17 04:03:53 -0800166 ++num_total_frames_;
167 if (frame->num_references == 0)
168 ++num_key_frames_;
philipel266f0a42016-11-28 08:49:07 -0800169
philipelbe7a9e52016-05-19 12:19:35 +0200170 FrameKey key(frame->picture_id, frame->spatial_layer);
philipele0b2f152016-09-28 10:23:49 +0200171 int last_continuous_picture_id =
172 last_continuous_frame_it_ == frames_.end()
173 ? -1
174 : last_continuous_frame_it_->first.picture_id;
175
176 if (num_frames_buffered_ >= kMaxFramesBuffered) {
177 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
178 << ":" << static_cast<int>(key.spatial_layer)
179 << ") could not be inserted due to the frame "
180 << "buffer being full, dropping frame.";
181 return last_continuous_picture_id;
182 }
183
184 if (frame->inter_layer_predicted && frame->spatial_layer == 0) {
185 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
186 << ":" << static_cast<int>(key.spatial_layer)
187 << ") is marked as inter layer predicted, dropping frame.";
188 return last_continuous_picture_id;
189 }
190
191 if (last_decoded_frame_it_ != frames_.end() &&
192 key < last_decoded_frame_it_->first) {
philipelfcc60062017-01-18 05:35:20 -0800193 if (AheadOf(frame->timestamp, last_decoded_frame_timestamp_) &&
194 frame->num_references == 0) {
195 // If this frame has a newer timestamp but an earlier picture id then we
196 // assume there has been a jump in the picture id due to some encoder
197 // reconfiguration or some other reason. Even though this is not according
198 // to spec we can still continue to decode from this frame if it is a
199 // keyframe.
200 LOG(LS_WARNING) << "A jump in picture id was detected, clearing buffer.";
201 ClearFramesAndHistory();
202 last_continuous_picture_id = -1;
203 } else {
204 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
205 << key.picture_id << ":"
206 << static_cast<int>(key.spatial_layer)
207 << ") inserted after frame ("
208 << last_decoded_frame_it_->first.picture_id << ":"
209 << static_cast<int>(
210 last_decoded_frame_it_->first.spatial_layer)
211 << ") was handed off for decoding, dropping frame.";
212 return last_continuous_picture_id;
213 }
philipele0b2f152016-09-28 10:23:49 +0200214 }
215
216 auto info = frames_.insert(std::make_pair(key, FrameInfo())).first;
217
philipel93e451b2016-10-06 12:25:13 +0200218 if (info->second.frame) {
219 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) (" << key.picture_id
220 << ":" << static_cast<int>(key.spatial_layer)
221 << ") already inserted, dropping frame.";
philipele0b2f152016-09-28 10:23:49 +0200222 return last_continuous_picture_id;
223 }
224
philipel93e451b2016-10-06 12:25:13 +0200225 if (!UpdateFrameInfoWithIncomingFrame(*frame, info))
226 return last_continuous_picture_id;
227
philipele0b2f152016-09-28 10:23:49 +0200228 info->second.frame = std::move(frame);
229 ++num_frames_buffered_;
230
231 if (info->second.num_missing_continuous == 0) {
232 info->second.continuous = true;
233 PropagateContinuity(info);
234 last_continuous_picture_id = last_continuous_frame_it_->first.picture_id;
235
236 // Since we now have new continuous frames there might be a better frame
237 // to return from NextFrame. Signal that thread so that it again can choose
238 // which frame to return.
239 new_countinuous_frame_event_.Set();
240 }
241
242 return last_continuous_picture_id;
philipelbe7a9e52016-05-19 12:19:35 +0200243}
244
philipele0b2f152016-09-28 10:23:49 +0200245void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
246 RTC_DCHECK(start->second.continuous);
247 if (last_continuous_frame_it_ == frames_.end())
248 last_continuous_frame_it_ = start;
249
250 std::queue<FrameMap::iterator> continuous_frames;
251 continuous_frames.push(start);
252
253 // A simple BFS to traverse continuous frames.
254 while (!continuous_frames.empty()) {
255 auto frame = continuous_frames.front();
256 continuous_frames.pop();
257
258 if (last_continuous_frame_it_->first < frame->first)
259 last_continuous_frame_it_ = frame;
260
261 // Loop through all dependent frames, and if that frame no longer has
262 // any unfulfilled dependencies then that frame is continuous as well.
263 for (size_t d = 0; d < frame->second.num_dependent_frames; ++d) {
264 auto frame_ref = frames_.find(frame->second.dependent_frames[d]);
265 --frame_ref->second.num_missing_continuous;
266
267 if (frame_ref->second.num_missing_continuous == 0) {
268 frame_ref->second.continuous = true;
269 continuous_frames.push(frame_ref);
270 }
271 }
272 }
273}
274
275void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
276 for (size_t d = 0; d < info.num_dependent_frames; ++d) {
277 auto ref_info = frames_.find(info.dependent_frames[d]);
philipel93e451b2016-10-06 12:25:13 +0200278 RTC_DCHECK(ref_info != frames_.end());
philipele0b2f152016-09-28 10:23:49 +0200279 RTC_DCHECK_GT(ref_info->second.num_missing_decodable, 0U);
280 --ref_info->second.num_missing_decodable;
281 }
282}
283
284void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
285 if (last_decoded_frame_it_ == frames_.end()) {
286 last_decoded_frame_it_ = frames_.begin();
287 } else {
288 RTC_DCHECK(last_decoded_frame_it_->first < decoded->first);
289 ++last_decoded_frame_it_;
290 }
291 --num_frames_buffered_;
292 ++num_frames_history_;
293
294 // First, delete non-decoded frames from the history.
295 while (last_decoded_frame_it_ != decoded) {
296 if (last_decoded_frame_it_->second.frame)
297 --num_frames_buffered_;
298 last_decoded_frame_it_ = frames_.erase(last_decoded_frame_it_);
philipelbe7a9e52016-05-19 12:19:35 +0200299 }
300
philipele0b2f152016-09-28 10:23:49 +0200301 // Then remove old history if we have too much history saved.
302 if (num_frames_history_ > kMaxFramesHistory) {
303 frames_.erase(frames_.begin());
304 --num_frames_history_;
305 }
306}
307
308bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
309 FrameMap::iterator info) {
310 FrameKey key(frame.picture_id, frame.spatial_layer);
311 info->second.num_missing_continuous = frame.num_references;
312 info->second.num_missing_decodable = frame.num_references;
313
314 RTC_DCHECK(last_decoded_frame_it_ == frames_.end() ||
315 last_decoded_frame_it_->first < info->first);
316
317 // Check how many dependencies that have already been fulfilled.
318 for (size_t i = 0; i < frame.num_references; ++i) {
319 FrameKey ref_key(frame.references[i], frame.spatial_layer);
320 auto ref_info = frames_.find(ref_key);
321
322 // Does |frame| depend on a frame earlier than the last decoded frame?
323 if (last_decoded_frame_it_ != frames_.end() &&
324 ref_key <= last_decoded_frame_it_->first) {
325 if (ref_info == frames_.end()) {
326 LOG(LS_WARNING) << "Frame with (picture_id:spatial_id) ("
327 << key.picture_id << ":"
328 << static_cast<int>(key.spatial_layer)
329 << " depends on a non-decoded frame more previous than "
330 << "the last decoded frame, dropping frame.";
331 return false;
332 }
333
334 --info->second.num_missing_continuous;
335 --info->second.num_missing_decodable;
336 } else {
337 if (ref_info == frames_.end())
338 ref_info = frames_.insert(std::make_pair(ref_key, FrameInfo())).first;
339
340 if (ref_info->second.continuous)
341 --info->second.num_missing_continuous;
342
343 // Add backwards reference so |frame| can be updated when new
344 // frames are inserted or decoded.
345 ref_info->second.dependent_frames[ref_info->second.num_dependent_frames] =
346 key;
347 ++ref_info->second.num_dependent_frames;
348 }
philipel93e451b2016-10-06 12:25:13 +0200349 RTC_DCHECK_LE(ref_info->second.num_missing_continuous,
350 ref_info->second.num_missing_decodable);
philipelbe7a9e52016-05-19 12:19:35 +0200351 }
352
philipele0b2f152016-09-28 10:23:49 +0200353 // Check if we have the lower spatial layer frame.
philipelbe7a9e52016-05-19 12:19:35 +0200354 if (frame.inter_layer_predicted) {
philipele0b2f152016-09-28 10:23:49 +0200355 ++info->second.num_missing_continuous;
356 ++info->second.num_missing_decodable;
357
philipelbe7a9e52016-05-19 12:19:35 +0200358 FrameKey ref_key(frame.picture_id, frame.spatial_layer - 1);
philipele0b2f152016-09-28 10:23:49 +0200359 // Gets or create the FrameInfo for the referenced frame.
360 auto ref_info = frames_.insert(std::make_pair(ref_key, FrameInfo())).first;
361 if (ref_info->second.continuous)
362 --info->second.num_missing_continuous;
363
364 if (ref_info == last_decoded_frame_it_) {
365 --info->second.num_missing_decodable;
366 } else {
367 ref_info->second.dependent_frames[ref_info->second.num_dependent_frames] =
368 key;
369 ++ref_info->second.num_dependent_frames;
370 }
philipel93e451b2016-10-06 12:25:13 +0200371 RTC_DCHECK_LE(ref_info->second.num_missing_continuous,
372 ref_info->second.num_missing_decodable);
philipelbe7a9e52016-05-19 12:19:35 +0200373 }
374
philipel93e451b2016-10-06 12:25:13 +0200375 RTC_DCHECK_LE(info->second.num_missing_continuous,
376 info->second.num_missing_decodable);
377
philipelbe7a9e52016-05-19 12:19:35 +0200378 return true;
379}
380
philipelbe742702016-11-30 01:31:40 -0800381void FrameBuffer::UpdateJitterDelay() {
philipelc08c1912017-01-17 04:03:53 -0800382 int unused;
383 int delay;
384 timing_->GetTimings(&unused, &unused, &unused, &unused, &delay, &unused,
385 &unused);
philipelbe742702016-11-30 01:31:40 -0800386
philipelc08c1912017-01-17 04:03:53 -0800387 accumulated_delay_ += delay;
388 ++accumulated_delay_samples_;
389}
390
391void FrameBuffer::UpdateHistograms() const {
392 rtc::CritScope lock(&crit_);
393 if (num_total_frames_ > 0) {
394 int key_frames_permille = (static_cast<float>(num_key_frames_) * 1000.0f /
395 static_cast<float>(num_total_frames_) +
396 0.5f);
397 RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
398 key_frames_permille);
399 }
400
401 if (accumulated_delay_samples_ > 0) {
402 RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
403 accumulated_delay_ / accumulated_delay_samples_);
philipelbe742702016-11-30 01:31:40 -0800404 }
philipel266f0a42016-11-28 08:49:07 -0800405}
406
philipelfcc60062017-01-18 05:35:20 -0800407void FrameBuffer::ClearFramesAndHistory() {
408 frames_.clear();
409 last_decoded_frame_it_ = frames_.end();
410 last_continuous_frame_it_ = frames_.end();
411 num_frames_history_ = 0;
412 num_frames_buffered_ = 0;
413}
414
philipelbe7a9e52016-05-19 12:19:35 +0200415} // namespace video_coding
416} // namespace webrtc